diff --git a/.catkin_workspace b/.catkin_workspace index 52fd97e7..5d0de8a6 100644 --- a/.catkin_workspace +++ b/.catkin_workspace @@ -1 +1,2 @@ # This file currently only serves to mark the location of a catkin workspace for tool integration + diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index b0f311c5..e22a4ebc 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -41,7 +41,7 @@ "ms-python.vscode-pylance", "ms-iot.vscode-ros", "twxs.cmake", - "ms-vscode.cmake-tools", + "ms-vscode.cmake-tools", "josetr.cmake-language-support-vscode", "ms-vscode-remote.remote-containers", "ms-vscode-remote.remote-ssh-edit", diff --git a/.gitignore b/.gitignore index 5cb11333..edfe0929 100644 --- a/.gitignore +++ b/.gitignore @@ -3,7 +3,7 @@ build/ devel/ # python env -/venv/ +venv/ # ide /.vscode/* @@ -45,4 +45,4 @@ docs/generated-images # non-generated documentation images !src/wr_logic_teleop_ds/control_scheme_power_levels.png -!src/wr_logic_teleop_ds/control_scheme_tank_drive.png \ No newline at end of file +!src/wr_logic_teleop_ds/control_scheme_tank_drive.png diff --git a/.gitmodules b/.gitmodules index 5cdb94ba..5eaa1c38 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,18 +1,18 @@ [submodule "src/wroboclaw"] path = src/wroboclaw - url = https://github.com/WisconsinRobotics/wroboclaw + url = git@github.com:WisconsinRobotics/wroboclaw.git [submodule "src/wreadinput"] path = src/wreadinput - url = https://github.com/WisconsinRobotics/wreadinput + url = git@github.com:WisconsinRobotics/wreadinput.git [submodule "src/wready"] path = src/wready - url = https://github.com/WisconsinRobotics/wready + url = git@github.com:WisconsinRobotics/wready.git [submodule "src/wresponsecontrol"] path = src/wresponsecontrol - url = https://github.com/WisconsinRobotics/wresponsecontrol + url = git@github.com:WisconsinRobotics/wresponsecontrol.git [submodule "src/wrplidar_ros"] path = src/wrplidar_ros - url = https://github.com/WisconsinRobotics/wrplidar_ros.git + url = git@github.com:WisconsinRobotics/wrplidar_ros.git [submodule "src/wrevolution"] path = src/wrevolution - url = https://github.com/WisconsinRobotics/wrevolution + url = git@github.com:WisconsinRobotics/wrevolution.git diff --git a/.vscode/settings.json b/.vscode/settings.json index a2fe6ef7..e0e6fdc1 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -5,7 +5,9 @@ ], "python.analysis.extraPaths": [ "./src/wr_hsi_sensing/src", - "./src/wr_logic_ai/src" + "./src/wr_logic_ai/src", + "./src/wr_logic_search/src", + "./src/wr_logic_shortrange/src" ], "C_Cpp.intelliSenseEngine": "disabled", "cmake.sourceDirectory": "${workspaceFolder}/src", @@ -13,4 +15,4 @@ "editor.defaultFormatter": "ms-python.black-formatter" }, "python.formatting.provider": "none" -} \ No newline at end of file +} diff --git a/env_remote.sh b/env_remote.sh index 415096f4..680da6fd 100755 --- a/env_remote.sh +++ b/env_remote.sh @@ -9,8 +9,10 @@ CATKIN_SHELL=sh _CATKIN_SETUP_DIR=$(cd "`dirname "$0"`" > /dev/null && pwd) . "$_CATKIN_SETUP_DIR/setup.sh" -export ROS_MASTER/_URI='http://wrover.local:11311' +# export ROS_MASTER_URI='http://wrover.local:11311' +export ROS_MASTER_URI='http://192.168.2.100:11311' export ROS_IP=`ip addr show eth0 | grep -Po 'inet [\d.]+' | awk '{print $2}'` -export ROS_HOSTNAME='wrover.local' +# export ROS_HOSTNAME='wrover.local' +export ROS_HOSTNAME='192.168.2.100' exec "$@" diff --git a/mapproxy.yaml b/mapproxy.yaml new file mode 100644 index 00000000..e29a8395 --- /dev/null +++ b/mapproxy.yaml @@ -0,0 +1,63 @@ +# ------------------------------- +# MapProxy example configuration. +# ------------------------------- +# +# This is a minimal MapProxy configuration. +# See full_example.yaml and the documentation for more options. +# + +# Starts the following services: +# Demo: +# http://localhost:8080/demo +# WMS: +# capabilities: http://localhost:8080/service?REQUEST=GetCapabilities +# WMTS: +# capabilities: http://localhost:8080/wmts/1.0.0/WMTSCapabilities.xml +# first tile: http://localhost:8080/wmts/osm/webmercator/0/0/0.png +# Tile service (compatible with OSM/etc.) +# first tile: http://localhost:8080/tiles/osm/webmercator/0/0/0.png +# TMS: +# note: TMS is not compatible with OSM/Google Maps/etc. +# first tile: http://localhost:8080/tms/1.0.0/osm/webmercator/0/0/0.png +# KML: +# initial doc: http://localhost:8080/kml/osm/webmercator + +services: + demo: + tms: + use_grid_names: true + # origin for /tiles service + origin: 'nw' + kml: + use_grid_names: true + wmts: + wms: + md: + title: MapProxy WMS Proxy + abstract: This is a minimal MapProxy example. + +layers: + - name: gm_layer + title: Google Maps tiles + sources: [gm_cache] + +caches: + gm_cache: + grids: [gm_grid] + sources: [gm] + cache: + type: file + directory_layout: tms + +sources: + gm: + type: tile + url: https://mt0.google.com/vt/lyrs=s&hl=en&x=%(x)s&y=%(y)s&z=%(z)s + grid: gm_grid + +grids: + gm_grid: + base: GLOBAL_MERCATOR + origin: ul + +globals: diff --git a/project.json b/project.json index 9a744437..7c0a2b48 100644 --- a/project.json +++ b/project.json @@ -58,6 +58,13 @@ "package": "ros-noetic-mapviz", "provider": "apt" } + }, + { + "name": "smach", + "build": { + "package": "ros-noetic-smach-ros", + "provider": "apt" + } } ] -} +} \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index f3e34d51..8512b19b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,5 +5,5 @@ python-statemachine==2.0.0 scipy rpi.gpio smbus -numpy==1.24.1 -opencv-contrib-python==4.7.0.68 +numpy==1.23.5 +opencv-contrib-python==4.9.0.80 diff --git a/src/wr_control_drive_ds/CMakeLists.txt b/src/wr_control_drive_ds/CMakeLists.txt index e9561b96..d4119c4e 100644 --- a/src/wr_control_drive_ds/CMakeLists.txt +++ b/src/wr_control_drive_ds/CMakeLists.txt @@ -11,7 +11,7 @@ find_package(catkin REQUIRED COMPONENTS roscpp rospy wr_drive_msgs - wroboclaw + #wroboclaw ) ## System dependencies are found with CMake's conventions diff --git a/src/wr_control_drive_ds/launch/std.launch b/src/wr_control_drive_ds/launch/std.launch index 756e559f..5958390a 100644 --- a/src/wr_control_drive_ds/launch/std.launch +++ b/src/wr_control_drive_ds/launch/std.launch @@ -4,8 +4,8 @@ - + diff --git a/src/wr_entry_point/launch/comp/comp_init.launch b/src/wr_entry_point/launch/comp/comp_init.launch index 9534f54b..f200c1ce 100644 --- a/src/wr_entry_point/launch/comp/comp_init.launch +++ b/src/wr_entry_point/launch/comp/comp_init.launch @@ -7,7 +7,8 @@ - + diff --git a/src/wr_entry_point/launch/erdm.launch b/src/wr_entry_point/launch/erdm.launch index a7a5c602..3dd9d00d 100644 --- a/src/wr_entry_point/launch/erdm.launch +++ b/src/wr_entry_point/launch/erdm.launch @@ -17,4 +17,6 @@ + + \ No newline at end of file diff --git a/src/wr_hsi_roboclaw/config/roboclaw_enc.yaml b/src/wr_hsi_roboclaw/config/roboclaw_enc.yaml index 34960c85..14ef0de4 100644 --- a/src/wr_hsi_roboclaw/config/roboclaw_enc.yaml +++ b/src/wr_hsi_roboclaw/config/roboclaw_enc.yaml @@ -1,5 +1,6 @@ -com_port: '/dev/ttyAMA0' -baud: 115200 +com_port: '/dev/roboclaw' +# baud: 115200 +baud: 9600 claws: aux0: # arm turntable/shoulder, science ? topic: '/hsi/roboclaw/aux0' diff --git a/src/wr_hsi_roboclaw/config/roboclaw_noenc.yaml b/src/wr_hsi_roboclaw/config/roboclaw_noenc.yaml index d02f9aa7..ff534051 100644 --- a/src/wr_hsi_roboclaw/config/roboclaw_noenc.yaml +++ b/src/wr_hsi_roboclaw/config/roboclaw_noenc.yaml @@ -1,4 +1,4 @@ -com_port: '/dev/ttyTHS1' +com_port: '/dev/ttyTHS0' baud: 38400 claws: aux0: # arm turntable/shoulder, science ? diff --git a/src/wr_hsi_sensing/CMakeLists.txt b/src/wr_hsi_sensing/CMakeLists.txt index 2cce2494..02373287 100644 --- a/src/wr_hsi_sensing/CMakeLists.txt +++ b/src/wr_hsi_sensing/CMakeLists.txt @@ -10,6 +10,10 @@ project(wr_hsi_sensing) find_package(catkin REQUIRED COMPONENTS message_generation std_msgs + sensor_msgs + geometry_msgs + visualization_msgs + roscpp rospy ublox_gps ) @@ -52,7 +56,7 @@ catkin_python_setup() ## Generate messages in the 'msg' folder add_message_files( - FILES + FILES # Message1.msg # Message2.msg CoordinateMsg.msg @@ -121,15 +125,16 @@ catkin_package( ## Specify additional locations of header files ## Your package locations should be listed before other locations -include_directories( +#include_directories( # include -${catkin_INCLUDE_DIRS} -) +#${catkin_INCLUDE_DIRS} +#) ## Declare a C++ library -# add_library(${PROJECT_NAME} -# src/${PROJECT_NAME}/wr_hsi_sensing.cpp +# add_library(sh2 +# # src/${PROJECT_NAME}/wr_hsi_sensing.cpp # ) +# target_link_libraries(${PROJECT_NAME} sh2) ## Add cmake target dependencies of the library ## as an example, code may need to be generated before libraries @@ -139,7 +144,8 @@ ${catkin_INCLUDE_DIRS} ## Declare a C++ executable ## With catkin_make all packages are built within a single CMake context ## The recommended prefix ensures that target names across packages don't collide -# add_executable(${PROJECT_NAME}_node src/wr_hsi_sensing_node.cpp) +#add_executable() +#target_link_libraries(${catkin_LIBRARIES} ${PROJECT_NAME}) ## Rename C++ executable without prefix ## The above recommended prefix causes long target names, the following renames the diff --git a/src/wr_hsi_sensing/config/ardusimple_config.yaml b/src/wr_hsi_sensing/config/ardusimple_config.yaml index 78798d71..c64b2526 100644 --- a/src/wr_hsi_sensing/config/ardusimple_config.yaml +++ b/src/wr_hsi_sensing/config/ardusimple_config.yaml @@ -1,6 +1,6 @@ debug: 1 # Range 0-4 (0 means no debug statements will print) -device: /dev/ttyACM0 +device: /dev/gps frame_id: gps # TODO: change it in the future? config_on_startup: false diff --git a/src/wr_hsi_sensing/launch/mapviz.launch b/src/wr_hsi_sensing/launch/mapviz.launch index 4f40db93..9ff49637 100644 --- a/src/wr_hsi_sensing/launch/mapviz.launch +++ b/src/wr_hsi_sensing/launch/mapviz.launch @@ -2,6 +2,7 @@ + @@ -11,10 +12,11 @@ - + + - \ No newline at end of file + diff --git a/src/wr_hsi_sensing/nodes/coordinates.json b/src/wr_hsi_sensing/nodes/coordinates.json new file mode 100755 index 00000000..01f95cb0 --- /dev/null +++ b/src/wr_hsi_sensing/nodes/coordinates.json @@ -0,0 +1,42 @@ +[ + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Sample Cash" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Rock locations?" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Rock Box" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Astronaout" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Work Area" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Wrekage Site" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "6A: Far Wrekage Site" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Drone" + } +] diff --git a/src/wr_hsi_sensing/nodes/publish_points.py b/src/wr_hsi_sensing/nodes/publish_points.py new file mode 100755 index 00000000..9ca99e27 --- /dev/null +++ b/src/wr_hsi_sensing/nodes/publish_points.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 + +## @file +# @brief Node responsible for mapping Ardusimple GPS data to CoordinateMsg +# @ingroup wr_hsi_sensing +# @defgroup wr_hsi_sensing_gps GPS +# @details Subscribes to the /gps/fix topic and publishes the latitude and longitude data to the /gps_coord_data topic +# @{ + +import rospy +from sensor_msgs.msg import NavSatFix +from visualization_msgs.msg import Marker +#from visualization_msgs.msg import MarkerArray +from geometry_msgs.msg import Point + +import json +from pathlib import Path + + +def main(): + #rospy.init_node("gps_data_subscriber_node", anonymous = False) + + coord_data_publisher = rospy.Publisher("/navsat/fix", NavSatFix, queue_size=1) + + dirname = Path(__file__).parents[0] + file_name = Path.joinpath(dirname, "coordinates.json") + file = open(file_name, "r").read() + coordinates = json.loads(file) + + rospy.loginfo(coordinates) + + rate = rospy.Rate(10) + while not rospy.is_shutdown(): + for coord in coordinates: + rospy.loginfo(coord) + coord_data_publisher.publish(NavSatFix(latitude=coord["lat"],longitude=coord["long"])) + rate.sleep() + +def publish_markers(): + # locations = MarkerArray() + + coord_data_publisher = rospy.Publisher("/marker_data", MarkerArray, queue_size=1) + + markers = [] + lat1 = 38.371768 + long1 = -110.704255 + markers.append(Marker(points=[Point(y = lat1, x = long1)])) + + lat2 = 38.371240 + long2 = -110.704195 + markers.append(Marker(points=[Point(y = lat2, x = long2)])) + + rate = rospy.Rate(10) + while not rospy.is_shutdown(): + coord_data_publisher.publish(MarkerArray(markers)) + + + + +if(__name__ == "__main__"): + rospy.init_node("gps_data_points_pub", anonymous = False) + #publish_markers() + main() + #rospy.spin() diff --git a/src/wr_logic_ai/CMakeLists.txt b/src/wr_logic_ai/CMakeLists.txt index c0a83207..bd6f9007 100644 --- a/src/wr_logic_ai/CMakeLists.txt +++ b/src/wr_logic_ai/CMakeLists.txt @@ -54,8 +54,6 @@ add_message_files( FILES NavigationState.msg ObstaclesMsg.msg - VisionTarget.msg - Waypoint.msg ) # # Generate services in the 'srv' folder @@ -68,9 +66,9 @@ add_message_files( # # Generate actions in the 'action' folder add_action_files( FILES - LongRange.action - ShortRange.action - SearchState.action + WaitForInput.action +# ShortRange.action +# SearchState.action ) # # Generate added messages and services with any dependencies listed here diff --git a/src/wr_logic_ai/action/SearchState.action b/src/wr_logic_ai/action/SearchState.action deleted file mode 100644 index de3dd219..00000000 --- a/src/wr_logic_ai/action/SearchState.action +++ /dev/null @@ -1,8 +0,0 @@ -#goal definition -float64 target_lat # Info of current and target latitudes and longitudes -float64 target_long -uint8 dist ---- -#result definition ---- -#feedback definition \ No newline at end of file diff --git a/src/wr_logic_ai/action/ShortRange.action b/src/wr_logic_ai/action/ShortRange.action deleted file mode 100644 index 5eb763b9..00000000 --- a/src/wr_logic_ai/action/ShortRange.action +++ /dev/null @@ -1,5 +0,0 @@ -uint8 target_type -uint8 TARGET_TYPE_GPS_ONLY = 0 # Might delete later on. -uint8 TARGET_TYPE_VISION = 1 ---- ---- diff --git a/src/wr_logic_ai/action/WaitForInput.action b/src/wr_logic_ai/action/WaitForInput.action new file mode 100644 index 00000000..2e4ee61c --- /dev/null +++ b/src/wr_logic_ai/action/WaitForInput.action @@ -0,0 +1,3 @@ +# Empty action to wait for input +--- +--- diff --git a/src/wr_logic_ai/config/vision_params.yaml b/src/wr_logic_ai/config/vision_params.yaml deleted file mode 100644 index d951a623..00000000 --- a/src/wr_logic_ai/config/vision_params.yaml +++ /dev/null @@ -1,3 +0,0 @@ -video_stream: "" -vision_topic: "/wr_logic_ai/shortrange_ai/vision_target_data" -debug: true \ No newline at end of file diff --git a/src/wr_logic_ai/launch/freefall.launch b/src/wr_logic_ai/launch/freefall.launch old mode 100644 new mode 100755 index f84ebaa0..415b4d3d --- a/src/wr_logic_ai/launch/freefall.launch +++ b/src/wr_logic_ai/launch/freefall.launch @@ -1,7 +1,6 @@ - - + diff --git a/src/wr_logic_ai/launch/long_range.launch b/src/wr_logic_ai/launch/long_range.launch deleted file mode 100644 index 3c1f708b..00000000 --- a/src/wr_logic_ai/launch/long_range.launch +++ /dev/null @@ -1,17 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/wr_logic_ai/launch/mux_select.launch b/src/wr_logic_ai/launch/mux_select.launch deleted file mode 100644 index 2197c1a1..00000000 --- a/src/wr_logic_ai/launch/mux_select.launch +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/src/wr_logic_ai/launch/short_range.launch b/src/wr_logic_ai/launch/short_range.launch old mode 100644 new mode 100755 index fc0f880a..9c87e830 --- a/src/wr_logic_ai/launch/short_range.launch +++ b/src/wr_logic_ai/launch/short_range.launch @@ -1,11 +1,9 @@ - - diff --git a/src/wr_logic_ai/launch/state_machine.launch b/src/wr_logic_ai/launch/state_machine.launch old mode 100644 new mode 100755 index 2640e89d..5a3d09ff --- a/src/wr_logic_ai/launch/state_machine.launch +++ b/src/wr_logic_ai/launch/state_machine.launch @@ -4,39 +4,82 @@ - - - + + + - + + + + + + + + + - + + + + - - + - - - + + + + + + + + + + + + + - - - - - - + + + + + + + + + + + + + + + + + + - - - + + + + + + + - + + + + + - \ No newline at end of file + + + diff --git a/src/wr_logic_ai/launch/test_marker.rviz b/src/wr_logic_ai/launch/test_marker.rviz new file mode 100755 index 00000000..8d1ea8ab --- /dev/null +++ b/src/wr_logic_ai/launch/test_marker.rviz @@ -0,0 +1,129 @@ +Panels: + - Class: rviz/Displays + Help Height: 78 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /Status1 + - /Marker1 + - /Marker1/Status1 + Splitter Ratio: 0.5 + Tree Height: 555 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Name: Time + SyncMode: 0 + SyncSource: "" +Preferences: + PromptSaveOnExit: true +Toolbars: + toolButtonStyle: 2 +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 10 + Reference Frame: + Value: true + - Class: rviz/Marker + Enabled: true + Marker Topic: /wRover_marker + Name: Marker + Namespaces: + "": true + Queue Size: 100 + Value: true + Enabled: true + Global Options: + Background Color: 48; 48; 48 + Default Light: true + Fixed Frame: laser + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 + Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/Orbit + Distance: 148.3261260986328 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: 0 + Y: 0 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 0.5603981018066406 + Target Frame: + Yaw: 0.6353980302810669 + Saved: ~ +Window Geometry: + Displays: + collapsed: false + Height: 846 + Hide Left Dock: false + Hide Right Dock: false + QMainWindow State: 000000ff00000000fd000000040000000000000343000002b4fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003b000002b4000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f000002b4fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003b000002b4000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000006240000003efc0100000002fb0000000800540069006d00650100000000000006240000030700fffffffb0000000800540069006d00650100000000000004500000000000000000000001c6000002b400000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Selection: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Views: + collapsed: false + Width: 1572 + X: -32 + Y: -32 diff --git a/src/wr_logic_ai/launch/test_smach.launch b/src/wr_logic_ai/launch/test_smach.launch new file mode 100644 index 00000000..b0ba3b5e --- /dev/null +++ b/src/wr_logic_ai/launch/test_smach.launch @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/wr_logic_ai/launch/vision.launch b/src/wr_logic_ai/launch/vision.launch deleted file mode 100644 index ebdde00e..00000000 --- a/src/wr_logic_ai/launch/vision.launch +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/src/wr_logic_ai/launch/vision_test.launch b/src/wr_logic_ai/launch/vision_test.launch deleted file mode 100644 index e294b702..00000000 --- a/src/wr_logic_ai/launch/vision_test.launch +++ /dev/null @@ -1,5 +0,0 @@ - - - - - \ No newline at end of file diff --git a/src/wr_logic_ai/launch/wrplidar.launch b/src/wr_logic_ai/launch/wrplidar.launch new file mode 100644 index 00000000..573fb54b --- /dev/null +++ b/src/wr_logic_ai/launch/wrplidar.launch @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/src/wr_logic_ai/meshes/Eclipse_Base_Simple.stl b/src/wr_logic_ai/meshes/Eclipse_Base_Simple.stl new file mode 100644 index 00000000..53cb8f60 Binary files /dev/null and b/src/wr_logic_ai/meshes/Eclipse_Base_Simple.stl differ diff --git a/src/wr_logic_ai/meshes/flag.stl b/src/wr_logic_ai/meshes/flag.stl new file mode 100644 index 00000000..946d30cf Binary files /dev/null and b/src/wr_logic_ai/meshes/flag.stl differ diff --git a/src/wr_logic_ai/msg/VisionTarget.msg b/src/wr_logic_ai/msg/VisionTarget.msg deleted file mode 100644 index 834fc974..00000000 --- a/src/wr_logic_ai/msg/VisionTarget.msg +++ /dev/null @@ -1,4 +0,0 @@ -uint32 id -float32 x_offset -float32 distance_estimate -bool valid diff --git a/src/wr_logic_ai/msg/Waypoint.msg b/src/wr_logic_ai/msg/Waypoint.msg deleted file mode 100644 index f1a6cea1..00000000 --- a/src/wr_logic_ai/msg/Waypoint.msg +++ /dev/null @@ -1,5 +0,0 @@ -float64 target_lat # Info of current and target latitudes and longitudes -float64 target_long -uint8 target_type -uint8 TARGET_TYPE_GPS_ONLY = 0 -uint8 TARGET_TYPE_VISION = 1 diff --git a/src/wr_logic_ai/nodes/drive_mux_select.py b/src/wr_logic_ai/nodes/drive_mux_select.py deleted file mode 100755 index 5874bbd3..00000000 --- a/src/wr_logic_ai/nodes/drive_mux_select.py +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env python3 - -"""@file -@defgroup wr_logic_ai_mux_selector Mux_Selector -@ingroup wr_logic_ai -@brief Node for switching between long-range navigation output and short-range navigation output to -rover motor power inputs -@details In the world of computer engineering, MUX is short for multiplexer. A multiplexer is a logic -circuit that takes in multiple input signals, chooses one among them, and outputs that signal. - -Here, the mux select node performs a similar function by choosing a motor power output between -long-range logics and short-range logics and forwards it to the actual motor power topic. The -switching behavior is controlled by which state the state machine is currently in. The purpose of -doing this extra step is to add a layer of abstraction between higher level logics and low level -controls. Both long range and short range believes that they have complete control over the rover, -thereby simplifying their implementations. -@{ -""" - -import rospy -from wr_logic_ai.msg import NavigationState -from std_msgs.msg import String -from typing import Dict -from topic_tools.srv import MuxSelect - - -class MUX: - """ - This class implements the MUX selector - """ - - def __init__(self) -> None: - ## Saves the current mux output source - self.last_message_state = None - - def state_call_back( - self, message: NavigationState, conversion_table: Dict[int, str], mux_name: str - ): - """ - Switches the output source of the mux according to the given NavigationState message - - @param message (NavigationState): Output source for the mux (either long-range or short-range) - @param conversion_table (Dict[int, str]): Converts the message content to either long-range or - @param short-range's output topic name - @param mux_name (str): This mux node's output topic name - """ - if self.last_message_state != message.state: - self.last_message_state = message.state - rospy.wait_for_service(f"/{mux_name}/select") - try: - proxy = rospy.ServiceProxy(f"/{mux_name}/select", MuxSelect) - proxy(conversion_table[message.state]) - except rospy.ServiceException as e: - rospy.logerr(e) - - -if __name__ == "__main__": - """ - Initializes the MUX object and sets up the subscriber callback function - """ - - rospy.init_node("mux_select_node") - conversion = { - NavigationState.NAVIGATION_STATE_LONG_RANGE: rospy.get_param( - "~long_range_topic_name" - ), - NavigationState.NAVIGATION_STATE_SHORT_RANGE: rospy.get_param( - "~short_range_topic_name" - ), - } - mux_instance = MUX() - sub = rospy.Subscriber( - "/navigation_state", - NavigationState, - lambda msg: mux_instance.state_call_back( - msg, conversion, rospy.get_param("~mux_name") - ), - ) - rospy.spin() - -## }@ diff --git a/src/wr_logic_ai/nodes/inputFakeData.py b/src/wr_logic_ai/nodes/inputFakeData.py deleted file mode 100755 index e2023f67..00000000 --- a/src/wr_logic_ai/nodes/inputFakeData.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python3 -import rospy -from std_msgs.msg import Float64 -from sensor_msgs.msg import LaserScan -from wr_hsi_sensing.msg import CoordinateMsg - -from wr_drive_msgs.msg import DriveTrainCmd -from geometry_msgs.msg import PoseStamped -from finder import offset_lidar_data -from copy import deepcopy - -import math - - -def get_laser_ranges(t=0): - inputData = [] - for x in range(360): - distance = 10 - if t <= x and x <= t + 40: - distance = 0.5 - inputData.append(distance) - return inputData - - -def run_mock_data() -> None: - global mock_heading - - distanceData = rospy.Publisher("/scan", LaserScan, queue_size=10) - mock_heading_pub = rospy.Publisher("/heading_data", Float64, queue_size=10) - mock_gps_pub = rospy.Publisher("/gps_coord_data", CoordinateMsg, queue_size=10) - zero_pub = rospy.Publisher("/debug_zero", PoseStamped, queue_size=10) - zero_msg = PoseStamped() - zero_msg.pose.position.x = 0 - zero_msg.pose.position.y = 0 - zero_msg.pose.position.z = 0 - zero_msg.pose.orientation.x = 0 - zero_msg.pose.orientation.y = 0 - zero_msg.pose.orientation.z = 0 - zero_msg.pose.orientation.w = 1 - zero_msg.header.frame_id = "laser" - frameCount = 0 - - # Testing publishers and subscribers - # rospy.Subscriber('/control/drive_system/cmd', DriveTrainCmd, updateHeading) - - laser = LaserScan() - # vara.intensities - - # laser.angle_min = 0. - laser.angle_max = 2 * math.pi - laser.angle_increment = math.pi / 180 - laser.time_increment = 0 - laser.scan_time = 1 - laser.range_min = 0 - laser.range_max = 150 - laser.ranges = get_laser_ranges(0) - laser.header.frame_id = "laser" - laser.intensities = [] - - mock_heading = 0 - mock_gps = CoordinateMsg() - mock_gps.latitude = 10 - mock_gps.longitude = 10 - - print("sent fake nav data") - - sleeper = rospy.Rate(10) - t = 0 - while not rospy.is_shutdown(): - laser.ranges = get_laser_ranges(t) - # distanceData.publish(laser) - - zero_msg.header.seq = frameCount - zero_msg.header.stamp = rospy.get_rostime() - frameCount += 1 - - mock_heading_pub.publish(mock_heading) - mock_gps_pub.publish(mock_gps) - zero_pub.publish(zero_msg) - sleeper.sleep() - t += 2 - t %= 360 - - -def updateHeading(data) -> None: - global mock_heading - mock_heading = (mock_heading + (data.right_value - data.left_value) * 10) % 360 - - -def display_data(data) -> None: - rviz_data = deepcopy(data) - rviz_data.ranges = offset_lidar_data( - rviz_data.ranges, math.degrees(rviz_data.angle_increment), True - ) - scan_rviz_pub = rospy.Publisher("/scan_rviz", LaserScan, queue_size=10) - scan_rviz_pub.publish(rviz_data) - - -def run_real_data() -> None: - rospy.Subscriber("/scan", LaserScan, display_data) - - -if __name__ == "__main__": - rospy.init_node("publish_fake_data", anonymous=False) - - if rospy.get_param("~run_in_mock", True): - # Run fake data - run_mock_data() - else: - # Run lidar data - sub = rospy.Subscriber("/scan", LaserScan, display_data) - # run_real_data() - rospy.spin() diff --git a/src/wr_logic_ai/nodes/search_action_server.py b/src/wr_logic_ai/nodes/search_action_server.py deleted file mode 100644 index 4416c9c1..00000000 --- a/src/wr_logic_ai/nodes/search_action_server.py +++ /dev/null @@ -1,54 +0,0 @@ -import rospy -import actionlib -from wr_logic_ai.msg import SearchStateAction, SearchStateGoal -import obstacle_avoidance -import travel_timer - -''' -@ingroup wr_logic_ai -@defgroup wr_logic_ai Search Action Server -@brief Tells the rover travel to the target coordinates and calls obstacle avoidance to make sure the rover can actually reach these coordinates. -@details Enters a while loop that continually calls obstacle avoidance to update the target to the next coordinate in the list. -For each coordinate, if the time between the starting time and the current time go above the estimated travel time for that -specific coordinate, the coordinate is assumed to be unreachable, and the rover will skip it and move on to the next coordinate. -''' - -class SearchActionServer(object): - def __init__(self, name) -> None: - """ - Initializes the action server - - @param name (String): Name of the action server - """ - - self._action_name = name - obstacle_avoidance.initialize() - self._as = actionlib.SimpleActionServer( - self._action_name, - SearchStateAction, - execute_cb=self.execute_callback, - auto_start=False, - ) - self._as.start() - - def execute_callback(self, goal: SearchStateGoal): - """ - Executes the long range obstacle avoidance code, and triggers the corresponding state machine event - depending on the result of the navigation - - @param goal (SearchGoal): Goal for the navigation segment, which contains the GPS coordinates - of the target - """ - start_time = rospy.get_rostime() - while ( - rospy.get_rostime() - start_time < travel_timer.calc_point_time(goal.dist) - and not rospy.is_shutdown() - ): - if obstacle_avoidance.update_target(goal.target_lat, goal.target_long): - return self._as.set_succeeded() - return self._as.set_aborted() - -if __name__ == "__main__": - rospy.init_node("search_action_server") - server = SearchActionServer("SearchActionServer") - rospy.spin() \ No newline at end of file diff --git a/src/wr_logic_ai/nodes/search_pattern_client.py b/src/wr_logic_ai/nodes/search_pattern_client.py deleted file mode 100644 index 96d86c62..00000000 --- a/src/wr_logic_ai/nodes/search_pattern_client.py +++ /dev/null @@ -1,29 +0,0 @@ -import rospy -from wr_logic_ai.srv import SearchPatternService, SearchPatternServiceRequest, SearchPatternServiceResponse - -''' -@ingroup wr_logic_ai -@defgroup wr_logic_ai Search Pattern Client -@brief The client for the camera that scans the rover's surroundings for the target object. -@details This client is currently waiting for the response from the server and sending the -result (boolean), which seems a little redundant. This might be changed or completely -removed in the future. -''' - -def main(): - rospy.init_node("search_pattern_client") - search_pattern_service = rospy.ServiceProxy('search_pattern_service', SearchPatternService) - rate = rospy.Rate(10) - rospy.wait_for_service('object_detection') - - while not rospy.is_shutdown(): - try: - response:SearchPatternServiceResponse = search_pattern_service() - return response.output - except rospy.ServiceException as e: - print(f"Service call failed: {e}") - - rate.sleep() - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/src/wr_logic_ai/nodes/search_pattern_server.py b/src/wr_logic_ai/nodes/search_pattern_server.py deleted file mode 100644 index cce3b7db..00000000 --- a/src/wr_logic_ai/nodes/search_pattern_server.py +++ /dev/null @@ -1,34 +0,0 @@ -#!/usr/bin/env python - -import rospy -from wr_logic_ai.srv import SearchPatternService, SearchPatternServiceRequest, SearchPatternServiceResponse -import keyboard # I guess we don't need this anymore since we're not using FreeFall - -''' -@ingroup wr_logic_ai -@defgroup wr_logic_ai Search Pattern Server -@brief The server for the camera that scans the rover's surroundings for the target object. -@details Currently, this is a simulated server where pressing 'w' is a success and -pressing 'l' is a fail. This was originally for testing the search pattern with FreeFall -and will probably be changed in the future as we learn how to integrate the camera into -the state machine software. -''' - -def search_pattern_handler(request: SearchPatternServiceRequest): - target_found = False - - # simulate the camera detecting the target - if keyboard.is_pressed('w'): - target_found = True - return target_found # return what?? - elif keyboard.is_pressed('l'): - return target_found # return what?? - -# initialize the server node -def search_pattern_server(): - rospy.init_node('search_pattern_server') - s = rospy.Service('search_pattern_service', SearchPatternService, search_pattern_handler) - rospy.spin() - -if __name__ == '__main__': - search_pattern_server() \ No newline at end of file diff --git a/src/wr_logic_ai/nodes/shortrange_state_machine.py b/src/wr_logic_ai/nodes/shortrange_state_machine.py deleted file mode 100755 index 5f13e6d3..00000000 --- a/src/wr_logic_ai/nodes/shortrange_state_machine.py +++ /dev/null @@ -1,91 +0,0 @@ -#!/usr/bin/env python - -##@defgroup wr_shortrange_ai -# @{ -# @defgroup wr_shortrange_ai_state_machine Shortrange State Machine -# @brief Node for the shortrange state machine -# @details The shortrange state machine implements the state machine depicted below. -# -# The state machine runs when the ActionServer callback, ShortrangeStateMachine.shortrange_callback(), is executed. -# -# Currently, the only state is [VisionNavigation](@ref wr_logic_ai.shortrange.vision_navigation.VisionNavigation). -# This state drives the rover to an ArUco tag. -# -# ![](ShortrangeStateMachine.png) -# @{ - -from typing import Union - -import rospy -import actionlib -from actionlib_msgs.msg import GoalStatus - -from wr_logic_ai.shortrange.shortrange_util import ShortrangeStateEnum, ShortrangeState -from wr_logic_ai.shortrange.encoder_drive import EncoderDrive -from wr_logic_ai.shortrange.vision_navigation import VisionNavigation -from wr_logic_ai.msg import ShortrangeAction, ShortrangeGoal - - -class ShortrangeStateMachine: - """ - @brief Shortrange state machine class - - The shortrange state machine is implemented as an ActionServer. - """ - - def __init__(self, name: str) -> None: - """ - Initialize the shortrange state machine - - @param name (str): The name of the action - """ - ## The name of the action - self._action_name = name - ## SimpleActionServer using shortrange_callback to execute ShortrangeGoals - self._as = actionlib.SimpleActionServer( - self._action_name, - ShortrangeAction, - execute_cb=self.shortrange_callback, - auto_start=False, - ) - self._as.start() - ## The current state of the state machine - self.state: ShortrangeStateEnum = None - - def shortrange_callback(self, goal: ShortrangeGoal): - """ - Sets the shortrange state based on the ShortrangeGoal message - - @param goal (ShortrangeGoal): ShortrangeGoal message defined in action/ShortRange.action - """ - - # Set initial state based on goal - if goal.target_type == goal.TARGET_TYPE_GPS_ONLY: - self.state = ShortrangeStateEnum.SUCCESS - elif goal.target_type == goal.TARGET_TYPE_SINGLE_MARKER: - self.state = ShortrangeStateEnum.VISION_DRIVE - else: - self.state = None - - distance = 0 - # Run states while the current state is not a terminating state - while self.state and self.state not in ShortrangeStateEnum.TERMINATING: - if self.state is ShortrangeStateEnum.VISION_DRIVE: - self.state = VisionNavigation().run() - elif self.state is ShortrangeStateEnum.ENCODER_DRIVE: - self.state = EncoderDrive(distance).run() - - # Set result of - if self.state is ShortrangeStateEnum.SUCCESS: - self._as.set_succeeded() - else: - self._as.set_aborted() - - -if __name__ == "__main__": - rospy.init_node("shortrange_state_machine") - ShortrangeStateMachine("ShortRangeActionServer") - rospy.spin() - -## @} -# @} diff --git a/src/wr_logic_ai/nodes/state_machine/state_machine.py b/src/wr_logic_ai/nodes/state_machine/state_machine.py index c165f19d..107402af 100755 --- a/src/wr_logic_ai/nodes/state_machine/state_machine.py +++ b/src/wr_logic_ai/nodes/state_machine/state_machine.py @@ -1,422 +1,257 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 -""" -@defgroup wr_logic_ai_state_machine_ai -@{ -@defgroup wr_logic_ai_state_machine_py state_machine.py -@brief Node for state machine that drives the autonomous nagivation code -@details The autonomous navigation code is driven by the state machine below. This node uses the statemachine python library to -handle the setup of the state machine, including initializing states and events, handling state transitions, executing the state -loop, and more. Here, we need to define our states and events as well as behavior upon entering or leaving states. - -State: This will determine what the robot is currently doing - -Every state will have a function for when running, entering, and exiting - -Event: Most events will be either a success or error. This will indicate which state to go to next depending on the event and state it is at. - - -![](NavigationStateMachine.png) -@{ -""" +import rospy +from smach import StateMachine, State +from smach_ros import SimpleActionState +from std_srvs.srv import Empty -from __future__ import annotations -from statemachine import StateMachine, State -from wr_logic_ai.coordinate_manager import CoordinateManager -from wr_logic_ai.msg import ( - NavigationState, - LongRangeAction, - LongRangeGoal, - LongRangeActionResult, +from wr_logic_longrange.msg import LongRangeAction, LongRangeGoal +from wr_logic_longrange.msg import InitCompassAction, InitCompassGoal +from wr_logic_search.msg import SearchStateAction, SearchStateGoal +from wr_logic_shortrange.msg import ShortRangeAction, ShortRangeGoal +from wr_logic_ai.color_matrix import ( + COLOR_NONE, + COLOR_AUTONOMOUS, + COLOR_COMPLETE, + COLOR_ERROR, + set_matrix_color, ) -import coord_calculations, travel_timer -from wr_logic_ai.msg import ShortRangeAction, ShortRangeGoal, ShortRangeActionResult -from wr_logic_ai.msg import SearchStateAction, SearchStateGoal, SearchStateActionResult -from wr_logic_ai.srv import SearchPatternService -from wr_led_matrix.srv import ( - led_matrix as LEDMatrix, - led_matrixRequest as LEDMatrixRequest, -) -from std_srvs.srv import Empty -import rospy -import actionlib -from actionlib_msgs.msg import GoalStatus -from wr_drive_msgs.msg import DriveTrainCmd -import threading -import time -import pdb - -## LED matrix color for when the rover is navigating towards the target using autonomous navigation -COLOR_AUTONOMOUS = LEDMatrixRequest(RED=0, GREEN=0, BLUE=255) -## LED matrix color for when the rover has reached its target -COLOR_COMPLETE = LEDMatrixRequest(RED=0, GREEN=255, BLUE=0) -## LED matrix color for when the rover has encountered an error while executing autonomous navigation -COLOR_ERROR = LEDMatrixRequest(RED=255, GREEN=0, BLUE=0) -## Initial LED matrix color -COLOR_NONE = LEDMatrixRequest(RED=0, GREEN=0, BLUE=0) - - -def set_matrix_color(color: LEDMatrixRequest) -> None: - """Helper function for setting the LED matrix color - - @param color The color to set the LED matrix to - """ - matrix_srv = rospy.ServiceProxy("/led_matrix", LEDMatrix) - matrix_srv.wait_for_service() - matrix_srv.call(COLOR_NONE) - time.sleep(0.5) - matrix_srv.call(color) - - -class NavStateMachine(StateMachine): - """ - This class implements the state machine used in autonomous navigation - - @param StateMachine (StateMachine): The StateMachine class imported from the statemachine python library, required to declare - this class as a state machine - """ - - # Defining states - ## Starter state of the state machine - stInit = State(initial=True) - ## State representing that the robot is running in long range mode - stLongRange = State() - ## State representing that the robot is recovering from a error state - stLongRangeRecovery = State() - ## State representing that the robot is searching for the target - stSearch = State() - ## State representing that the robot is running in short range mode - stShortRange = State() - ## State representing that the robot has completed a task at a waypoint - stWaypointSuccess = State() - ## State representing that the robot has completed all autonomous navigation tasks - stComplete = State() - - # Defining events and transitions - ## Event representing a successful task execution - evSuccess = ( - stLongRange.to(stShortRange) - | stLongRangeRecovery.to(stLongRange) - | stSearch.to(stShortRange) - | stShortRange.to(stWaypointSuccess) - ) - ## Event representing an error condition being raised - evError = ( - stLongRange.to(stLongRangeRecovery) - | stLongRangeRecovery.to(stLongRangeRecovery) - | stSearch.to(stLongRange) - | stShortRange.to(stLongRange) - ) - - ## Event representing a shortcircuit state transition from WaypointSuccess to LongRange - evNotWaiting = stWaypointSuccess.to(stLongRange) - ## Event representing an unconditional state transition from Init to LongRange - evUnconditional = stInit.to(stLongRange) - ## Event representing all tasks are complete - evComplete = stWaypointSuccess.to(stComplete) - - def __init__(self, mgr: CoordinateManager) -> None: - """ - Initializes the state machine - - @param mgr (CoordinateManager): Helper class for retrieving target waypoint GPS coordinates - """ - # Get coordinates into self._mgr - self._mgr = mgr - # Start in current event -1, as state machine runs it will go into event 0 (init) - self.currentEvent = -1 - # State machine will publish into the mux to switch b/w long range and short range - self.mux_pub = rospy.Publisher( - "/navigation_state", NavigationState, queue_size=1 - ) +from wr_logic_ai.coordinate_manager import CoordinateManager - # Initialization of messages to switch the mux - self.mux_long_range = NavigationState() - self.mux_long_range.state = NavigationState.NAVIGATION_STATE_LONG_RANGE - self.mux_search = NavigationState() - self.mux_search.state = NavigationState.NAVIGATION_STATE_SEARCH - self.mux_short_range = NavigationState() - self.mux_short_range.state = NavigationState.NAVIGATION_STATE_SHORT_RANGE - super(NavStateMachine, self).__init__() - - def init_calibrate(self, pub: rospy.Publisher, stop_time: float) -> None: - """ - Function to spin the robot for a certain time. The IMU (N,E,S,W) needs to be spinned to be correct. - - @param stop_time Time when the robot should stop - @param pub (rospy.Publisher): Publishes drive values to motors - """ - if rospy.get_time() < stop_time: - pub.publish(DriveTrainCmd(left_value=0.3, right_value=-0.3)) +TARGET_CONSTANTS = { + "aruco 0": ShortRangeGoal.ARUCO_ID_0, + "aruco 1": ShortRangeGoal.ARUCO_ID_1, + "aruco 2": ShortRangeGoal.ARUCO_ID_2, + "aruco 3": ShortRangeGoal.ARUCO_ID_3, + "aruco 4": ShortRangeGoal.ARUCO_ID_4, + "mallet": ShortRangeGoal.OBJ_MALLET, + "bottle": ShortRangeGoal.OBJ_BOTTLE, +} + + +class St_Longrange_Complete(State): + def __init__(self): + State.__init__(self, outcomes=["longrange_only", "shortrange"], input_keys=["target_lat", "target_long"], output_keys=["search_lat", "search_long", "shortrange_target_type"]) + + def execute(self, userdata): + # Checks if the waypoint requires target search and approach + if CoordinateManager.get_coordinate()["target"] == "none": + return "longrange_only" else: - pub.publish(DriveTrainCmd(left_value=0, right_value=0)) - self.evUnconditional() + userdata.search_lat = userdata.target_lat + userdata.search_long = userdata.target_long - def init_w_ros(self): - """Init function to start ROS publisher and time""" - # Set the LED to autonomous - set_matrix_color(COLOR_AUTONOMOUS) + target_str = CoordinateManager.get_coordinate()["target"] + userdata.shortrange_target_type = TARGET_CONSTANTS.get( + target_str, ShortRangeGoal.ANY + ) + return "shortrange" - # Publisher for sending init calibrate - pub = rospy.Publisher("/control/drive_system/cmd", DriveTrainCmd, queue_size=1) - # Set amount of time to calibrate - stop_time = rospy.get_time() + 7 - self._init_tmr = rospy.Timer( - rospy.Duration.from_sec(0.1), lambda _: self.init_calibrate(pub, stop_time) +class St_Shortrange_Setup(State): + def __init__(self): + State.__init__( + self, outcomes=["shortrange"], output_keys=["shortrange_target_type"] ) - def on_enter_stInit(self) -> None: - print("\non enter stInit") - rospy.loginfo("\non enter stInit") - # Get the coordinates that we will have to go to - self._mgr.read_coordinates_file() - - # Run calibrate for seven seconds - threading.Timer(1, lambda: self.init_w_ros()).start() - - def on_exit_stInit(self) -> None: - # Stop calibration code - self._init_tmr.shutdown() - # Check if there is a new coordinate. Will go to event complete if ended. - if self._mgr.next_coordinate(): - self.evComplete() - - def _longRangeActionComplete( - self, state: GoalStatus, _: LongRangeActionResult - ) -> None: - if state == GoalStatus.SUCCEEDED: - self.evSuccess() - else: - self.evError() - - def on_enter_stLongRange(self) -> None: - print("\non enter stLongRange") - rospy.loginfo("\non enter stLongRange") + def execute(self, userdata): + return "shortrange" - # Publish to mux, runs every .2 seconds. (The mux will tell the robot to get drive values from Long Range and not short range) - self.timer = rospy.Timer( - rospy.Duration(0.2), lambda _: self.mux_pub.publish(self.mux_long_range) - ) - # sets autonomous color for the LED - set_matrix_color(COLOR_AUTONOMOUS) +class St_Return(State): + def __init__(self): + State.__init__(self, outcomes=["longrange"]) - # Initialize the action client that will run the long range - self._client = actionlib.SimpleActionClient( - "LongRangeActionServer", LongRangeAction - ) - self._client.wait_for_server() + def execute(self, userdata): + set_matrix_color(COLOR_ERROR) + previous_coord = False - # Get the coordinates as a LongRangeGoal. Two coordinates (lat, long) - goal = LongRangeGoal( - target_lat=self._mgr.get_coordinate()["lat"], - target_long=self._mgr.get_coordinate()["long"], - ) + # The code will stay here until the user inputs a value ("p" or "c"). + while True: + rospy.loginfo("Operator instruction required.") + key = input("Enter p to go to previous coordinate or c to continue") + if key == "p": + previous_coord = True + break + elif key == "c": + break - # Send coordinates to action service - self._client.send_goal( - goal, - done_cb=lambda status, result: self._longRangeActionComplete( - status, result - ), - ) + if previous_coord: + CoordinateManager.previous_coordinate() - def on_exit_stLongRange(self) -> None: - print("Exting Long Range") - rospy.loginfo("Exting Long Range") - self.timer.shutdown() # Stop timer that was being used for MUX + userdata.target_lat = CoordinateManager.get_coordinate()["lat"] + userdata.target_long = CoordinateManager.get_coordinate()["long"] - def _longRangeRecoveryActionComplete( - self, state: GoalStatus, _: LongRangeActionResult - ) -> None: - if state == GoalStatus.SUCCEEDED: - self.evSuccess() - else: - self.evError() - - def on_enter_stLongRangeRecovery(self) -> None: - print("\non enter stLongRangeRecovery") - rospy.loginfo("\non enter stLongRangeRecovery") + set_matrix_color(COLOR_AUTONOMOUS) + return "longrange" - set_matrix_color(COLOR_ERROR) - # There was a problem in the state machine if mgr is empty (it should have completed) - if self._mgr is None: - raise ValueError - else: - # Get previous coordinate and go to it as same as long range as a live-action debugging strategy - self._mgr.previous_coordinate() - print(self._mgr.get_coordinate()) - self.timer = rospy.Timer( - rospy.Duration(0.2), lambda _: self.mux_pub.publish(self.mux_long_range) - ) - self._client = actionlib.SimpleActionClient( - "LongRangeActionServer", LongRangeAction - ) - self._client.wait_for_server() - goal = LongRangeGoal( - target_lat=self._mgr.get_coordinate()["lat"], - target_long=self._mgr.get_coordinate()["long"], - ) - self._client.send_goal( - goal, - done_cb=lambda status, result: self._longRangeRecoveryActionComplete( - status, result - ), - ) +class St_Waypoint_Complete(State): + def __init__(self): + State.__init__( + self, + outcomes=["next_waypoint", "complete"], + output_keys=["target_lat", "target_long"], + ) - def on_exit_stLongRangeRecovery(self) -> None: - self.timer.shutdown() # Shutdown mux timer + # Defined for lambda function, not part of state machine architecture + def _blink_complete(self) -> None: + # Blinks the color off for 0.5 sec, the other 0.5 sec we sleep + set_matrix_color(COLOR_COMPLETE) - def _searchActionComplete(self, state: GoalStatus, _: SearchStateActionResult) -> None: # SearchActionResult - if state == GoalStatus.SUCCEEDED: - self.evSuccess() - else: - self.evError() + def execute(self, userdata): + # Runs every one second, this blinks the LED to say we finished short range + self._complete_blinker = rospy.Timer( + rospy.Duration.from_sec(1), lambda _: self._blink_complete() + ) - def on_enter_stSearch(self) -> None: - distance = 4 - num_vertices = 22 + # The code will stay here until the user inputs a value ("c"). + while True: + rospy.loginfo("Enter c to continue") + if input("Enter c to continue: ") == "c": + break - print("\non enter stSearch") - rospy.loginfo("\non enter stSearch") - self.timer = rospy.Timer(rospy.Duration(0.2), lambda _: self.mux_pub.publish(self.mux_search)) + self._complete_blinker.shutdown() - # enter autonomous mode - set_matrix_color(COLOR_AUTONOMOUS) + # If there is a next coordinate, go back to long range. If not, it is complete + if CoordinateManager.has_next_coordinate(): + userdata.target_lat = CoordinateManager.get_coordinate()["lat"] + userdata.target_long = CoordinateManager.get_coordinate()["long"] - self._client = actionlib.SimpleActionClient("SearchActionServer", SearchStateAction) # "SearchActionServer", SearchAction - self._client.wait_for_server() - - coords = coord_calculations.get_coords(self._mgr.get_coordinate()["lat"], self._mgr.get_coordinate()["long"], distance, num_vertices) - SEARCH_TIMEOUT_TIME = travel_timer.calc_state_time() # default = 20 meters - - i = 0 - start_time = rospy.get_rostime() - while ( - rospy.get_rostime() - start_time < SEARCH_TIMEOUT_TIME - and not rospy.is_shutdown() - and i < num_vertices - ): - if (i != 0): # skip starting point because rover is already there - goal = SearchStateGoal(target_lat=coords[i]["lat"], target_long=coords[i]["long"], dist=coords[i]['distance']) # SearchGoal - self._client.send_goal(goal, done_cb=lambda status, result: self._searchActionComplete(status, result)) - - # Camera Service - still not sure about integrating the camera with the state machine - camera_service = rospy.ServiceProxy('search_pattern_service', SearchPatternService) - camera_service.wait_for_service('search_pattern_service') - - if camera_service: - break # what should be done when the target object is found? how should we enter ShortRange? - - i += 1 - - def on_exit_stSearch(self) -> None: - print("Exiting Search") - rospy.loginfo("Exiting Search") - self.timer.shutdown() - - def _shortRangeActionComplete( - self, state: GoalStatus, _: ShortRangeActionResult - ) -> None: - if state == GoalStatus.SUCCEEDED: - self.evSuccess() + set_matrix_color(COLOR_AUTONOMOUS) + return "next_waypoint" else: - self.evError() + return "complete" - def on_enter_stShortRange(self) -> None: - print("\non enter stShortRange") - rospy.loginfo("\non enter stShortRange") - set_matrix_color(COLOR_AUTONOMOUS) - - # Set mux to switch to short range - self.timer = rospy.Timer( - rospy.Duration(0.2), lambda _: self.mux_pub.publish(self.mux_short_range) +def main(): + sm = StateMachine(["succeeded", "aborted", "preempted"]) + CoordinateManager.read_coordinates_file() + rospy.init_node("nav_state_machine", anonymous=False) + rospy.loginfo("running state machine") + + set_matrix_color(COLOR_AUTONOMOUS) + + sm.userdata.target_lat = CoordinateManager.get_coordinate()["lat"] + sm.userdata.target_long = CoordinateManager.get_coordinate()["long"] + sm.userdata.shortrange_target_type = ShortRangeGoal.ANY + sm.userdata.search_lat = 0 + sm.userdata.search_long = 0 + + with sm: + # st_init_compass + StateMachine.add( + "st_init_compass", + SimpleActionState( + "InitCompassActionServer", InitCompassAction, goal=InitCompassGoal() + ), + transitions={"succeeded": "st_longrange", "aborted": "st_init_compass"}, ) - # Initialized short range action server that will do short range - self._client = actionlib.SimpleActionClient( - "ShortRangeActionServer", ShortRangeAction - ) - self._client.wait_for_server() - - # Define different types of short range that will be used - TARGET_TYPE_MAPPING = [ - ShortRangeGoal.TARGET_TYPE_GPS_ONLY, # 0 Vision markers - ShortRangeGoal.TARGET_TYPE_VISION, # 1 Vision marker - ] - # Get what type of short range action it is from the coordinates - goal = ShortRangeGoal( - target_type=TARGET_TYPE_MAPPING[ - self._mgr.get_coordinate()["num_vision_posts"] - ] - ) - # Run the short range action and send what type of short range it is. - self._client.send_goal( - goal, - done_cb=lambda status, result: self._shortRangeActionComplete( - status, result + # st_longrange + StateMachine.add( + "st_longrange", + SimpleActionState( + "LongRangeActionServer", + LongRangeAction, + goal_slots=["target_lat", "target_long"], ), + transitions={ + "succeeded": "st_longrange_complete", + "aborted": "st_longrange_fallback", + "preempted": "st_longrange_fallback", + }, + remapping={"target_lat": "target_lat", "target_long": "target_long"}, ) - def on_exit_stShortRange(self) -> None: - # self.timer.shutdown() - pass - - # Defined for lambda function, not part of state machine architecture - def _blink_complete(self) -> None: - # Blinks the color off for 0.5 sec, the other 0.5 sec we sleep - set_matrix_color(COLOR_COMPLETE) + # st_longrange_fallback + StateMachine.add( + "st_longrange_fallback", + SimpleActionState( + "LongRangeActionServer", + LongRangeAction, + goal_slots=["target_lat", "target_long"], + ), + transitions={ + "succeeded": "st_longrange_complete", + "aborted": "st_longrange_abort", + "preempted": "st_longrange_abort", + }, + remapping={"target_lat": "target_lat", "target_long": "target_long"}, + ) - def _wait_for_user_input(self) -> None: - # Make sure service is running - rospy.wait_for_service("wait_for_user_input_service") - try: - # Define proxy of service (this would be the client calling the service) - wait_for_user_input = rospy.ServiceProxy( - "wait_for_user_input_service", Empty - ) - # This will run the service. The code will stay here until the user inputs a value ("c"). - wait_for_user_input() - except rospy.ServiceException as e: - print(e) + # st_longrange_abort + StateMachine.add( + "st_longrange_abort", + St_Return(), + transitions={"longrange": "st_init_compass"}, + ) - # If there is a next coordinate, go back to long range. If not, it is complete - if self._mgr.next_coordinate(): - print("Should Enter event complete") - self.evComplete() - else: - self.evNotWaiting() + # st_longrange_complete + StateMachine.add( + "st_longrange_complete", + St_Longrange_Complete(), + transitions={ + "longrange_only": "st_waypoint_complete", + "shortrange": "st_search", + }, + ) - def on_enter_stWaypointSuccess(self) -> None: - print("\non enter stWaypointSuccess") + # st_search + StateMachine.add( + "st_search", + SimpleActionState( + "SearchActionServer", + SearchStateAction, + goal_slots=["initial_lat", "initial_long", "target_id"], + result_slots=["final_lat", "final_long"], + ), + transitions={ + "succeeded": "st_shortrange_setup", + "aborted": "st_init_compass", + "preempted": "st_init_compass", + }, + remapping={ + "initial_lat": "search_lat", + "initial_long": "search_long", + "final_lat": "search_lat", + "final_long": "search_long", + "target_id": "shortrange_target_type" + }, + ) - # Runs every one second, this blinks the LED to say we finished short range - self._complete_blinker = rospy.Timer( - rospy.Duration.from_sec(1), lambda _: self._blink_complete() + # st_shortrange_setup + StateMachine.add( + "st_shortrange_setup", + St_Shortrange_Setup(), + transitions={"shortrange": "st_shortrange"}, + remapping={"shortrange_target_type": "shortrange_target_type"}, ) - # This runs the wait for user input just once (oneshot = True). - self._check_input = rospy.Timer( - rospy.Duration.from_sec(0.5), - lambda _: self._wait_for_user_input(), - oneshot=True, + # st_shortrange + StateMachine.add( + "st_shortrange", + SimpleActionState( + "ShortRangeActionServer", ShortRangeAction, goal_slots=["target_id"] + ), + transitions={ + "succeeded": "st_waypoint_complete", + "aborted": "st_search", + "preempted": "st_search", + }, + remapping={"target_id": "shortrange_target_type"}, ) - def on_exit_stWaypointSuccess(self) -> None: - self._complete_blinker.shutdown() - self._check_input.shutdown() + # st_waypoint_complete + StateMachine.add( + "st_waypoint_complete", + St_Waypoint_Complete(), + transitions={"next_waypoint": "st_init_compass", "complete": "succeeded"}, + remapping={"target_lat": "target_lat", "target_long": "target_long"}, + ) - def on_enter_stComplete(self) -> None: - print("We finished, wooooo") + sm.execute() if __name__ == "__main__": - rospy.init_node("nav_state_machine", anonymous=False) - statemachine = NavStateMachine(CoordinateManager()) # Initialize state machine - rospy.spin() # Runs nav_state_machine topic - -## @} -## @} + main() diff --git a/src/wr_logic_ai/nodes/state_machine/wait_for_user_input.py b/src/wr_logic_ai/nodes/state_machine/wait_for_user_input.py index 7df06a4e..2e6c8900 100755 --- a/src/wr_logic_ai/nodes/state_machine/wait_for_user_input.py +++ b/src/wr_logic_ai/nodes/state_machine/wait_for_user_input.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """ @file @defgroup wr_logic_ai_state_machine_ai @{ @@ -9,43 +9,52 @@ @{ """ - import rospy -from std_srvs.srv import Empty, EmptyResponse +import actionlib -# TODO(@bennowotny): Should be an action server due to time taken, preemptibility +from wr_logic_ai.msg import WaitForInputAction, WaitForInputActionGoal -def wait_for_user_input(req): - """callback function for the service +class WaitForUserInputActionServer: + def __init__(self, name) -> None: + self._action_name = name + self._as = actionlib.SimpleActionServer( + self._action_name, + WaitForInputAction, + execute_cb=self.wait_for_user_input, + auto_start=False, + ) + self._as.start() - Args: - req (service message): Empty + def wait_for_user_input(self, goal: WaitForInputActionGoal): + """ + Callback function for the action server - Returns: - EmmptyResponse: None - """ - print("Service called") - while True: - if input("Enter c to continue: ") == "c": - return EmptyResponse() + Args: + goal (actionlib goal): Empty msg + """ + while not rospy.is_shutdown(): + if self._as.is_preempt_requested(): + self._as.set_preempted() + return + if input("Enter c to continue: ") == "c": + self._as.set_succeeded() + return -def wait_for_user_input_server(): - """initializes rospy service""" - rospy.init_node("wait_for_user_input_server") - s = rospy.Service("wait_for_user_input_service", Empty, wait_for_user_input) - print("wait for user input server initialized") - rospy.spin() + self._as.set_aborted() -# TODO: Figure what to do with this -def shutdown_server(): - rospy.signal_shutdown("Ended user signal") +def main(): + """initializes rospy action server""" + rospy.init_node("wait_for_user_input_action") + wait_for_input_action = WaitForUserInputActionServer("WaitForUserInputActionServer") + print("wait for user input server initialized") + rospy.spin() if __name__ == "__main__": - wait_for_user_input_server() + main() ## }@ ## }@ diff --git a/src/wr_logic_ai/nodes/travel_timer.py b/src/wr_logic_ai/nodes/travel_timer.py deleted file mode 100644 index 85fde51e..00000000 --- a/src/wr_logic_ai/nodes/travel_timer.py +++ /dev/null @@ -1,54 +0,0 @@ -''' -@ingroup wr_logic_ai -@defgroup wr_logic_ai Travel Timer -@brief Calculates the maximum travel time between two or all coordinates to know when -to skip the coordinate or to restart searching - -Attributes(s): - speed - speed of the rover in meters per second - -Methods(s): - calculate_time(distance) - Calculates maximum travel time between two coordinates - calculate_time(distance) - Calculates maximum travel time between all coordinates -''' - -SPEED = 0.5 # in meters per second, - # purposefully slow to give more time for obstacle avoidance - # and because true speed is inknown - # (motor power outputs are set to some value between 0.3 and 0.5, - # which does not correlate to speed) - -def calc_point_time(distance) -> float: - ''' - Description: Calculates the maximum time it should take the rover to travel from one - coordinate to another coordinate. If the rover reaches this limit, the coordinate - is labeled as unreachable due to obstacle avoidance, and it will skip the - coordinate and move on to the next one. - - Parameter(s): - distance - the distance from the current coordinate to the target coordinate/point (in meters) - - Return(s): - time - the time it takes the rover to travel the distance between two coordinates (in seconds) - ''' - time = distance / SPEED - return time - -def calc_state_time(radius = 20) -> float: - ''' - Description: Calculates the maximum time it should take the rover to visit all - coordinates during the searching state. The spiral pattern uses 4 meters as a base, - so the distance between every coordinate must be a multiple of 4. - - Parameter(s): - radius - the radius of the square spiral (in meters), - correlates to the maximum distance the starting coordinate is to the target - - Return(s): - time - the time it takes the rover to visit all coordinates (in seconds) - ''' - total_distance = (0.5 * radius + 1) * (2 * radius + 4) - total_time = total_distance / SPEED - - return total_time - \ No newline at end of file diff --git a/src/wr_logic_ai/nodes/vision_target_detection.py b/src/wr_logic_ai/nodes/vision_target_detection.py deleted file mode 100755 index 1dd463bc..00000000 --- a/src/wr_logic_ai/nodes/vision_target_detection.py +++ /dev/null @@ -1,97 +0,0 @@ -#!/usr/bin/env python - -##@defgroup wr_shortrange_ai -# @{ -# @defgroup wr_shortrange_ai_vision_node Vision Target Detection Node -# @brief ROS Node for publishing vision data -# @details This ROS Node reads video input from a stream or camera. -# It reads a frame and processes it using [detect_aruco()](@ref wr_logic_ai.shortrange.aruco_lib.detect_aruco). -# Then, the data for all ArUco tags detected is published as a VisionTarget message. -# @{ - -import rospy -import cv2 as cv -import numpy as np -import wr_logic_ai.shortrange.aruco_lib as aruco_lib -from wr_logic_ai.msg import VisionTarget - -## Width of the camera frame, in pixels -CAMERA_WIDTH = 1280 -## Height of the camera frame, in pixels -CAMERA_HEIGHT = 720 -## Name of the VisionTarget topic to publish to -vision_topic = rospy.get_param("~vision_topic") - - -def process_corners(target_id: int, corners: np.ndarray) -> VisionTarget: - """ - Creates a VisionTarget message based on the detected ArUco tag - - @param target_id (int): ArUco tag ID - @param corners (np.ndarray): ArUco tag corners - @returns VisionTarget: VisionTarget message defined in msg/VisionTarget.msg - """ - # Find the middle of the ArUco tag in the frame - side_lengths = [] - min_x = corners[0][0] - max_x = corners[0][0] - for i in range(len(corners)): - side_lengths.append(np.linalg.norm(corners[i - 1] - corners[i])) - min_x = min(min_x, corners[i][0]) - max_x = max(max_x, corners[i][0]) - x_offset = (min_x + max_x - CAMERA_WIDTH) / 2 - - # Estimate the distance of the ArUco tag in meters - distance_estimate = aruco_lib.estimate_distance_m(corners) - - return VisionTarget(target_id, x_offset, distance_estimate, True) - - -def main(): - """ - @brief Vision processing node main function - - This function initializes and runs a node to read camera input and publish ArUco tag data to a topic. - """ - pub = rospy.Publisher(vision_topic, VisionTarget, queue_size=10) - rospy.init_node("vision_target_detection") - - rate = rospy.Rate(10) - - # Retrieve video stream from parameter server - # If no vision stream is specified, try to use camera directly - stream_url = rospy.get_param("~video_stream") - if stream_url is not None and stream_url != "": - cap = cv.VideoCapture(stream_url) - else: - cap = cv.VideoCapture(0) - cap.set(cv.CAP_PROP_FRAME_WIDTH, CAMERA_WIDTH) - cap.set(cv.CAP_PROP_FRAME_HEIGHT, CAMERA_HEIGHT) - - if not cap.isOpened(): - rospy.logerr("Failed to open camera") - exit() - - while not rospy.is_shutdown(): - # Read frame and publish detected targets - ret, frame = cap.read() - if not ret: - rospy.logerr("Failed to read frame") - else: - (corners, ids, _) = aruco_lib.detect_aruco(frame) - if ids is not None: - for i, target_id in enumerate(ids): - pub.publish(process_corners(target_id[0], corners[i][0])) - else: - # Publish even when no target is found to constantly run the navigation callback - pub.publish(VisionTarget(0, 0, 0, False)) - rate.sleep() - - cap.release() - - -if __name__ == "__main__": - main() - -## @} -# @} diff --git a/src/wr_logic_ai/src/wr_logic_ai/color_matrix.py b/src/wr_logic_ai/src/wr_logic_ai/color_matrix.py new file mode 100644 index 00000000..8da127ce --- /dev/null +++ b/src/wr_logic_ai/src/wr_logic_ai/color_matrix.py @@ -0,0 +1,30 @@ +import rospy +import time + +from wr_led_matrix.srv import ( + led_matrix as LEDMatrix, + led_matrixRequest as LEDMatrixRequest, +) + +## LED matrix color for when the rover is navigating towards the target using autonomous navigation +COLOR_AUTONOMOUS = LEDMatrixRequest(RED=255, GREEN=0, BLUE=0) +## LED matrix color for when the rover is navigating towards the previous target in teleop +COLOR_TELEOP = LEDMatrixRequest(RED=0, GREEN=0, BLUE=255) +## LED matrix color for when the rover has reached its target +COLOR_COMPLETE = LEDMatrixRequest(RED=0, GREEN=255, BLUE=0) +## LED matrix color for when the rover has encountered an error while executing autonomous navigation +COLOR_ERROR = LEDMatrixRequest(RED=255, GREEN=0, BLUE=255) +## Initial LED matrix color +COLOR_NONE = LEDMatrixRequest(RED=0, GREEN=0, BLUE=0) + + +def set_matrix_color(color: LEDMatrixRequest) -> None: + """Helper function for setting the LED matrix color + + @param color The color to set the LED matrix to + """ + matrix_srv = rospy.ServiceProxy("/led_matrix", LEDMatrix) + matrix_srv.wait_for_service() + matrix_srv.call(COLOR_NONE) + time.sleep(0.5) + matrix_srv.call(color) diff --git a/src/wr_logic_ai/src/wr_logic_ai/coordinate_manager.py b/src/wr_logic_ai/src/wr_logic_ai/coordinate_manager.py index dd631e68..8b947849 100644 --- a/src/wr_logic_ai/src/wr_logic_ai/coordinate_manager.py +++ b/src/wr_logic_ai/src/wr_logic_ai/coordinate_manager.py @@ -18,9 +18,9 @@ def previous_coordinate(): CoordinateManager.ptr = max(0, CoordinateManager.ptr - 1) @staticmethod - def next_coordinate() -> bool: + def has_next_coordinate() -> bool: CoordinateManager.ptr = CoordinateManager.ptr + 1 - return CoordinateManager.ptr >= len(CoordinateManager.coordinates) + return CoordinateManager.ptr < len(CoordinateManager.coordinates) @staticmethod def read_coordinates_file(): diff --git a/src/wr_logic_ai/src/wr_logic_ai/coordinates copy.json b/src/wr_logic_ai/src/wr_logic_ai/coordinates copy.json new file mode 100755 index 00000000..062ac3fe --- /dev/null +++ b/src/wr_logic_ai/src/wr_logic_ai/coordinates copy.json @@ -0,0 +1,50 @@ +[ + { + "lat": 38.42390688, + "long": -110.7852678, + "note": "Start", + "target": "none" + }, + { + "lat": 38.42426632, + "long": -110.7851849, + "note": "GNSS Post 1", + "target": "none" + }, + { + "lat": 38.42466641, + "long": -110.784802, + "note": "GNSS Post 2", + "target": "none" + }, + { + "lat": 38.42512274, + "long": -110.785219, + "note": "AR Post 1", + "target": "aruco 1" + }, + { + "lat": 38.42487533, + "long": -110.7856899, + "note": "AR Post 2", + "target": "aruco 2" + }, + { + "lat": 38.42447147, + "long": -110.7861446, + "note": "AR Post 3", + "target": "aruco 3" + }, + { + "lat": 38.42425466, + "long": -110.7855771, + "note": "Mallet", + "target": "mallet" + }, + { + "lat": 38.42437042, + "long": -110.7857335, + "note": "Water Bottle", + "target": "bottle" + } +] diff --git a/src/wr_logic_ai/src/wr_logic_ai/coordinates.json b/src/wr_logic_ai/src/wr_logic_ai/coordinates.json index 83ac3f39..a41d6756 100755 --- a/src/wr_logic_ai/src/wr_logic_ai/coordinates.json +++ b/src/wr_logic_ai/src/wr_logic_ai/coordinates.json @@ -1,14 +1,38 @@ [ { - "lat": 10, - "long": 10, - "note": "EHall Entrance", - "num_vision_posts": 0 + "lat": 38.3717950, + "long": -110.7045599, + "note": "Start", + "target": "none" }, { - "lat": 11, - "long": 10, - "note": "EHall Entrance", - "num_vision_posts": 0 + "lat": 38.3720316, + "long": -110.7045954, + "note": "Top", + "target": "none" + }, + { + "lat": 38.3718175, + "long": -110.7044866, + "note": "Middle", + "target": "aruco 1" + }, + { + "lat": 38.3718175, + "long": -110.7044866, + "note": "Middle Right", + "target": "arico 2" + }, + { + "lat": 38.3717950, + "long": -110.7045599, + "note": "Start", + "target": "mallet" + }, + { + "lat": 38.3718175, + "long": -110.7044866, + "note": "Middle", + "target": "bottle" } -] \ No newline at end of file +] diff --git a/src/wr_logic_ai/src/wr_logic_ai/shortrange/shortrange_state_machine.puml b/src/wr_logic_ai/src/wr_logic_ai/shortrange/shortrange_state_machine.puml deleted file mode 100755 index 9748bac5..00000000 --- a/src/wr_logic_ai/src/wr_logic_ai/shortrange/shortrange_state_machine.puml +++ /dev/null @@ -1,10 +0,0 @@ -@startuml ShortrangeStateMachine -hide empty description - -[*] -right-> shortrange_callback : ShortRangeGoal -shortrange_callback -right-> VisionNavigation : Vision -shortrange_callback -up-> success : GPS only -VisionNavigation -> success : Success -VisionNavigation -> fail : Fail - -@enduml diff --git a/src/wr_logic_ai/src/wr_logic_ai/shortrange/vision_navigation.py b/src/wr_logic_ai/src/wr_logic_ai/shortrange/vision_navigation.py deleted file mode 100644 index b018d858..00000000 --- a/src/wr_logic_ai/src/wr_logic_ai/shortrange/vision_navigation.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python - -##@defgroup wr_shortrange_ai -# @{ -# @defgroup wr_shortrange_ai_navigation Vision Navigation -# @brief ShortrangeState that subscribes to vision data and drives to the ArUco tag -# @{ - -import math -from enum import Enum -from typing import Tuple, Optional - -import rospy -from geometry_msgs.msg import PoseStamped - -from shortrange_util import ShortrangeStateEnum, ShortrangeState, TargetCache -from wr_logic_ai.msg import VisionTarget -from wr_drive_msgs.msg import DriveTrainCmd -from wr_logic_ai.shortrange.shortrange_util import ShortrangeStateEnum - -## Distance from target to stop at (in meters) -STOP_DISTANCE_M = 3 -## Base speed for robot to move at -SPEED = 0.1 -## Factor to for applying turn -kP = 0.01 - -## Name of the VisionTarget topic to subscribe to -vision_topic = rospy.get_param("~vision_topic") - -# TODO : Add code for mux -## Name of the drivetrain topic to publish to -drivetrain_topic = rospy.get_param("~motor_speeds") -## Publisher to set motor speeds -drivetrain_pub = rospy.Publisher(drivetrain_topic, DriveTrainCmd, queue_size=1) - -## Number of seconds to keep the cache -CACHE_EXPIRY_SECS = 1 - - -def drive(left: float, right: float): - """ - Helper function for publishing motor speeds - - @param left (float): Left motor speed - @param right (float): Right motor speed - """ - drivetrain_pub.publish(left, right) - - -class VisionNavigation(ShortrangeState): - """ - A ShortrangeState class for navigating to a ArUco tag - """ - - def __init__(self) -> None: - ## Boolean for state progress. - ## True if the state is done, false otherwise. - self.is_done = False - ## Boolean to state result or failure. - ## True if the state is successful, false otherwise. - self.success = False - ## TargetCache variable to store the last seen target data - self.target_cache = None - - def target_callback(self, msg: VisionTarget): - if msg.valid: - # Update cache if the VisionTarget message is valid - self.target_cache = TargetCache(rospy.get_time(), msg) - - if ( - self.target_cache is not None - and rospy.get_time() - self.target_cache.timestamp < CACHE_EXPIRY_SECS - ): - if self.target_cache.msg.distance_estimate < STOP_DISTANCE_M: - # Stop the rover when it is close to the ArUco tag - rospy.loginfo(f"Reached target {self.target_cache.msg.id}") - drive(0, 0) - - self.is_done = True - self.success = True - return - - # Drive the rover to the target if the cache was updated recently - turn = kP * self.target_cache.msg.x_offset - drive(SPEED + turn, SPEED - turn) - else: - # Turn the rover to look for the ArUco tag - drive(SPEED, -SPEED) - - def run(self) -> ShortrangeStateEnum: - """ - Run target navigation logic. - Creates a subscriber for the VisionTarget topic. - - @return ShortrangeStateEnum: The next state to execute - """ - rate = rospy.Rate(10) - - sub = rospy.Subscriber(vision_topic, VisionTarget, self.target_callback) - - # Wait for the rover to finish navigating - while not self.is_done: - rospy.sleep(rate) - - sub.unregister() - if self.success: - return ShortrangeStateEnum.SUCCESS - return ShortrangeStateEnum.FAIL - - -## @} -# @} diff --git a/src/wr_logic_ai/srv/SearchPatternService.srv b/src/wr_logic_ai/srv/SearchPatternService.srv deleted file mode 100644 index 7f98b39e..00000000 --- a/src/wr_logic_ai/srv/SearchPatternService.srv +++ /dev/null @@ -1,3 +0,0 @@ -bool input ---- -bool output \ No newline at end of file diff --git a/src/wr_logic_longrange/CMakeLists.txt b/src/wr_logic_longrange/CMakeLists.txt new file mode 100644 index 00000000..b20962e2 --- /dev/null +++ b/src/wr_logic_longrange/CMakeLists.txt @@ -0,0 +1,210 @@ +cmake_minimum_required(VERSION 3.0.2) +project(wr_logic_longrange) + +## Compile as C++11, supported in ROS Kinetic and newer +# add_compile_options(-std=c++11) + +## Find catkin macros and libraries +## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) +## is used, also find other catkin packages +find_package(catkin REQUIRED COMPONENTS + geometry_msgs + rospy + sensor_msgs + std_msgs + message_generation + actionlib_msgs +) + +## System dependencies are found with CMake's conventions +# find_package(Boost REQUIRED COMPONENTS system) + + +## Uncomment this if the package has a setup.py. This macro ensures +## modules and global scripts declared therein get installed +## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html +catkin_python_setup() + +################################################ +## Declare ROS messages, services and actions ## +################################################ + +## To declare and build messages, services or actions from within this +## package, follow these steps: +## * Let MSG_DEP_SET be the set of packages whose message types you use in +## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). +## * In the file package.xml: +## * add a build_depend tag for "message_generation" +## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET +## * If MSG_DEP_SET isn't empty the following dependency has been pulled in +## but can be declared for certainty nonetheless: +## * add a exec_depend tag for "message_runtime" +## * In this file (CMakeLists.txt): +## * add "message_generation" and every package in MSG_DEP_SET to +## find_package(catkin REQUIRED COMPONENTS ...) +## * add "message_runtime" and every package in MSG_DEP_SET to +## catkin_package(CATKIN_DEPENDS ...) +## * uncomment the add_*_files sections below as needed +## and list every .msg/.srv/.action file to be processed +## * uncomment the generate_messages entry below +## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) + +## Generate messages in the 'msg' folder +# add_message_files( +# FILES +# Message1.msg +# Message2.msg +# ) + +## Generate services in the 'srv' folder +# add_service_files( +# FILES +# Service1.srv +# Service2.srv +# ) + +## Generate actions in the 'action' folder +add_action_files( + FILES + LongRange.action + InitCompass.action +) + +## Generate added messages and services with any dependencies listed here +generate_messages( + DEPENDENCIES + std_msgs # Or other packages containing msgs + actionlib_msgs +) + +################################################ +## Declare ROS dynamic reconfigure parameters ## +################################################ + +## To declare and build dynamic reconfigure parameters within this +## package, follow these steps: +## * In the file package.xml: +## * add a build_depend and a exec_depend tag for "dynamic_reconfigure" +## * In this file (CMakeLists.txt): +## * add "dynamic_reconfigure" to +## find_package(catkin REQUIRED COMPONENTS ...) +## * uncomment the "generate_dynamic_reconfigure_options" section below +## and list every .cfg file to be processed + +## Generate dynamic reconfigure parameters in the 'cfg' folder +# generate_dynamic_reconfigure_options( +# cfg/DynReconf1.cfg +# cfg/DynReconf2.cfg +# ) + +################################### +## catkin specific configuration ## +################################### +## The catkin_package macro generates cmake config files for your package +## Declare things to be passed to dependent projects +## INCLUDE_DIRS: uncomment this if your package contains header files +## LIBRARIES: libraries you create in this project that dependent projects also need +## CATKIN_DEPENDS: catkin_packages dependent projects also need +## DEPENDS: system dependencies of this project that dependent projects also need +catkin_package( +# INCLUDE_DIRS include +# LIBRARIES wr_logic_longrange +# CATKIN_DEPENDS actionlib_msgs +# DEPENDS system_lib +) + +########### +## Build ## +########### + +## Specify additional locations of header files +## Your package locations should be listed before other locations +include_directories( +# include +# ${catkin_INCLUDE_DIRS} +) + +## Declare a C++ library +# add_library(${PROJECT_NAME} +# src/${PROJECT_NAME}/wr_logic_longrange.cpp +# ) + +## Add cmake target dependencies of the library +## as an example, code may need to be generated before libraries +## either from message generation or dynamic reconfigure +# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Declare a C++ executable +## With catkin_make all packages are built within a single CMake context +## The recommended prefix ensures that target names across packages don't collide +# add_executable(${PROJECT_NAME}_node src/wr_logic_longrange_node.cpp) + +## Rename C++ executable without prefix +## The above recommended prefix causes long target names, the following renames the +## target back to the shorter version for ease of user use +## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" +# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") + +## Add cmake target dependencies of the executable +## same as for the library above +# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Specify libraries to link a library or executable target against +# target_link_libraries(${PROJECT_NAME}_node +# ${catkin_LIBRARIES} +# ) + +############# +## Install ## +############# + +# all install targets should use catkin DESTINATION variables +# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html + +## Mark executable scripts (Python etc.) for installation +## in contrast to setup.py, you can choose the destination +# catkin_install_python(PROGRAMS +# scripts/my_python_script +# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark executables for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html +# install(TARGETS ${PROJECT_NAME}_node +# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark libraries for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html +# install(TARGETS ${PROJECT_NAME} +# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +# ) + +## Mark cpp header files for installation +# install(DIRECTORY include/${PROJECT_NAME}/ +# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} +# FILES_MATCHING PATTERN "*.h" +# PATTERN ".svn" EXCLUDE +# ) + +## Mark other files for installation (e.g. launch and bag files, etc.) +# install(FILES +# # myfile1 +# # myfile2 +# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +# ) + +############# +## Testing ## +############# + +## Add gtest based cpp test target and link libraries +# catkin_add_gtest(${PROJECT_NAME}-test test/test_wr_logic_longrange.cpp) +# if(TARGET ${PROJECT_NAME}-test) +# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) +# endif() + +## Add folders to be run by python nosetests +# catkin_add_nosetests(test) diff --git a/src/wr_logic_ai/nodes/README.md b/src/wr_logic_longrange/README.md similarity index 95% rename from src/wr_logic_ai/nodes/README.md rename to src/wr_logic_longrange/README.md index 76a72be7..0c93fcd7 100644 --- a/src/wr_logic_ai/nodes/README.md +++ b/src/wr_logic_longrange/README.md @@ -1,7 +1,6 @@ # Longrange AI -@defgroup wr_logic_ai_longrange_ai Longrange Navigation -@ingroup wr_logic_ai +@defgroup wr_logic_longrange wr_logic_longrange @brief Longrange Navigation Code This document describes the existing code and logic for long range navigation, which is primarily used in the autonomous navigation mission of URC. diff --git a/src/wr_logic_longrange/action/InitCompass.action b/src/wr_logic_longrange/action/InitCompass.action new file mode 100644 index 00000000..ce42cea5 --- /dev/null +++ b/src/wr_logic_longrange/action/InitCompass.action @@ -0,0 +1,5 @@ +#goal definition +--- +#result definition +--- +#feedback definition \ No newline at end of file diff --git a/src/wr_logic_ai/action/LongRange.action b/src/wr_logic_longrange/action/LongRange.action similarity index 100% rename from src/wr_logic_ai/action/LongRange.action rename to src/wr_logic_longrange/action/LongRange.action diff --git a/src/wr_logic_ai/launch/obstacle_avoidance.rviz b/src/wr_logic_longrange/config/obstacle_avoidance.rviz old mode 100644 new mode 100755 similarity index 80% rename from src/wr_logic_ai/launch/obstacle_avoidance.rviz rename to src/wr_logic_longrange/config/obstacle_avoidance.rviz index b0cab0f9..ed0462dd --- a/src/wr_logic_ai/launch/obstacle_avoidance.rviz +++ b/src/wr_logic_longrange/config/obstacle_avoidance.rviz @@ -10,7 +10,7 @@ Panels: - /Pose1 - /Pose2 Splitter Ratio: 0.5 - Tree Height: 476 + Tree Height: 482 - Class: rviz/Selection Name: Selection - Class: rviz/Tool Properties @@ -77,7 +77,7 @@ Visualization Manager: Size (Pixels): 3 Size (m): 0.019999999552965164 Style: Flat Squares - Topic: /scan_rviz + Topic: /scan Unreliable: false Use Fixed Frame: true Use rainbow: true @@ -111,7 +111,7 @@ Visualization Manager: Shaft Length: 0.5 Shaft Radius: 0.02500000037252903 Shape: Arrow - Topic: /debug_zero + Topic: /actual_heading Unreliable: false Value: true Enabled: true @@ -142,7 +142,7 @@ Visualization Manager: Views: Current: Class: rviz/Orbit - Distance: 4.086756229400635 + Distance: 3.107663154602051 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 @@ -158,17 +158,37 @@ Visualization Manager: Invert Z Axis: false Name: Current View Near Clip Distance: 0.009999999776482582 - Pitch: 1.2253977060317993 + Pitch: 1.1603977680206299 Target Frame: - Yaw: 4.53857946395874 - Saved: ~ + Yaw: 4.72858190536499 + Saved: + - Class: rviz/Orbit + Distance: 3.107663154602051 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: 0 + Y: 0 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Orbit + Near Clip Distance: 0.009999999776482582 + Pitch: 1.1603977680206299 + Target Frame: + Yaw: 4.72858190536499 Window Geometry: Displays: collapsed: false Height: 773 Hide Left Dock: false Hide Right Dock: false - QMainWindow State: 000000ff00000000fd00000004000000000000015600000267fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d00000267000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f00000267fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003d00000267000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000005580000003efc0100000002fb0000000800540069006d0065010000000000000558000003bc00fffffffb0000000800540069006d00650100000000000004500000000000000000000002e70000026700000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + QMainWindow State: 000000ff00000000fd0000000400000000000001560000026bfc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003b0000026b000000c700fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f0000026bfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003b0000026b000000a000fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000005580000003efc0100000002fb0000000800540069006d00650100000000000005580000030700fffffffb0000000800540069006d00650100000000000004500000000000000000000002e70000026b00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 Selection: collapsed: false Time: @@ -178,5 +198,5 @@ Window Geometry: Views: collapsed: false Width: 1368 - X: 72 - Y: 27 + X: -32 + Y: -32 diff --git a/src/wr_logic_ai/launch/obstacle_avoidance_mock.rviz b/src/wr_logic_longrange/config/obstacle_avoidance_mock.rviz old mode 100644 new mode 100755 similarity index 63% rename from src/wr_logic_ai/launch/obstacle_avoidance_mock.rviz rename to src/wr_logic_longrange/config/obstacle_avoidance_mock.rviz index 52770509..94a9638c --- a/src/wr_logic_ai/launch/obstacle_avoidance_mock.rviz +++ b/src/wr_logic_longrange/config/obstacle_avoidance_mock.rviz @@ -1,16 +1,15 @@ Panels: - Class: rviz/Displays - Help Height: 78 + Help Height: 0 Name: Displays Property Tree Widget: Expanded: - /Global Options1 - /Status1 - /LaserScan1 - - /Pose1 - - /Pose2 + - /Marker4 Splitter Ratio: 0.5 - Tree Height: 476 + Tree Height: 917 - Class: rviz/Selection Name: Selection - Class: rviz/Tool Properties @@ -75,9 +74,9 @@ Visualization Manager: Queue Size: 10 Selectable: true Size (Pixels): 3 - Size (m): 0.019999999552965164 + Size (m): 0.05000000074505806 Style: Flat Squares - Topic: /scan + Topic: /scan_rviz Unreliable: false Use Fixed Frame: true Use rainbow: true @@ -86,13 +85,13 @@ Visualization Manager: Axes Length: 1 Axes Radius: 0.10000000149011612 Class: rviz/Pose - Color: 255; 25; 0 + Color: 0; 0; 255 Enabled: true Head Length: 0.15000000596046448 Head Radius: 0.07500000298023224 Name: Pose Queue Size: 10 - Shaft Length: 0.5 + Shaft Length: 3 Shaft Radius: 0.02500000037252903 Shape: Arrow Topic: /debug_heading @@ -102,7 +101,7 @@ Visualization Manager: Axes Length: 1 Axes Radius: 0.10000000149011612 Class: rviz/Pose - Color: 0; 0; 255 + Color: 0; 255; 0 Enabled: true Head Length: 0.15000000596046448 Head Radius: 0.07500000298023224 @@ -111,9 +110,57 @@ Visualization Manager: Shaft Length: 0.5 Shaft Radius: 0.02500000037252903 Shape: Arrow - Topic: /debug_zero + Topic: /actual_heading + Unreliable: false + Value: true + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.10000000149011612 + Class: rviz/Pose + Color: 255; 0; 0 + Enabled: true + Head Length: 0.15000000596046448 + Head Radius: 0.07500000298023224 + Name: Pose + Queue Size: 10 + Shaft Length: 5 + Shaft Radius: 0.014999999664723873 + Shape: Arrow + Topic: /delta_heading Unreliable: false Value: true + - Class: rviz/Marker + Enabled: true + Marker Topic: /pose_marker + Name: Marker + Namespaces: + "": true + Queue Size: 94 + Value: true + - Class: rviz/Marker + Enabled: true + Marker Topic: /wRover_marker + Name: Marker + Namespaces: + "": true + Queue Size: 94 + Value: true + - Class: rviz/Marker + Enabled: true + Marker Topic: /pose_circle_marker + Name: Marker + Namespaces: + "": true + Queue Size: 100 + Value: true + - Class: rviz/Marker + Enabled: true + Marker Topic: flag_marker + Name: Marker + Namespaces: + "": true + Queue Size: 100 + Value: true Enabled: true Global Options: Background Color: 48; 48; 48 @@ -142,7 +189,7 @@ Visualization Manager: Views: Current: Class: rviz/Orbit - Distance: 15.243936538696289 + Distance: 10.580819129943848 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 @@ -150,25 +197,45 @@ Visualization Manager: Value: false Field of View: 0.7853981852531433 Focal Point: - X: 0 - Y: 0 - Z: 0 + X: -0.8854994177818298 + Y: 0.30706119537353516 + Z: 0.005089759826660156 Focal Shape Fixed Size: true Focal Shape Size: 0.05000000074505806 Invert Z Axis: false Name: Current View Near Clip Distance: 0.009999999776482582 - Pitch: 1.1603977680206299 + Pitch: 1.1347962617874146 Target Frame: - Yaw: 4.72858190536499 - Saved: ~ + Yaw: 4.589034080505371 + Saved: + - Class: rviz/Orbit + Distance: 15.243936538696289 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: 0 + Y: 0 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Orbit + Near Clip Distance: 0.009999999776482582 + Pitch: 1.1603977680206299 + Target Frame: + Yaw: 4.72858190536499 Window Geometry: Displays: collapsed: false - Height: 773 + Height: 1136 Hide Left Dock: false - Hide Right Dock: false - QMainWindow State: 000000ff00000000fd00000004000000000000015600000267fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d00000267000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f00000267fc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073010000003d00000267000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000005580000003efc0100000002fb0000000800540069006d0065010000000000000558000003bc00fffffffb0000000800540069006d00650100000000000004500000000000000000000002e70000026700000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + Hide Right Dock: true + QMainWindow State: 000000ff00000000fd00000004000000000000029e000003d2fc0200000008fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000001ed000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000003d000003d2000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261000000010000010f0000026bfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000003b0000026b000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000490000000a9fc0100000001fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000007380000003efc0100000002fb0000000800540069006d0065010000000000000738000003bc00fffffffb0000000800540069006d0065010000000000000450000000000000000000000494000003d200000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 Selection: collapsed: false Time: @@ -177,6 +244,6 @@ Window Geometry: collapsed: false Views: collapsed: false - Width: 1368 + Width: 1848 X: 72 Y: 27 diff --git a/src/wr_logic_longrange/launch/fake_data.launch b/src/wr_logic_longrange/launch/fake_data.launch new file mode 100755 index 00000000..a43810ed --- /dev/null +++ b/src/wr_logic_longrange/launch/fake_data.launch @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/src/wr_logic_longrange/launch/init_calibrate_compass.launch b/src/wr_logic_longrange/launch/init_calibrate_compass.launch new file mode 100644 index 00000000..c06f7da2 --- /dev/null +++ b/src/wr_logic_longrange/launch/init_calibrate_compass.launch @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/wr_logic_longrange/launch/long_range.launch b/src/wr_logic_longrange/launch/long_range.launch new file mode 100755 index 00000000..fd08bda2 --- /dev/null +++ b/src/wr_logic_longrange/launch/long_range.launch @@ -0,0 +1,22 @@ + + + + + + + + + + + + + + + + diff --git a/src/wr_logic_longrange/launch/test_lidar_w_imu.launch b/src/wr_logic_longrange/launch/test_lidar_w_imu.launch new file mode 100755 index 00000000..22c32270 --- /dev/null +++ b/src/wr_logic_longrange/launch/test_lidar_w_imu.launch @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/wr_logic_longrange/launch/test_lidar_w_state_machine.launch b/src/wr_logic_longrange/launch/test_lidar_w_state_machine.launch new file mode 100755 index 00000000..329f1a26 --- /dev/null +++ b/src/wr_logic_longrange/launch/test_lidar_w_state_machine.launch @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/wr_logic_ai/src/wr_logic_ai/shortrange/__init__.py b/src/wr_logic_longrange/nodes/__init__.py similarity index 100% rename from src/wr_logic_ai/src/wr_logic_ai/shortrange/__init__.py rename to src/wr_logic_longrange/nodes/__init__.py diff --git a/src/wr_logic_ai/nodes/angle_calculations.py b/src/wr_logic_longrange/nodes/angle_calculations.py similarity index 98% rename from src/wr_logic_ai/nodes/angle_calculations.py rename to src/wr_logic_longrange/nodes/angle_calculations.py index b0fdf78f..6bd8c75c 100755 --- a/src/wr_logic_ai/nodes/angle_calculations.py +++ b/src/wr_logic_longrange/nodes/angle_calculations.py @@ -1,7 +1,7 @@ """@file -@defgroup wr_logic_ai_longrange_ai +@defgroup wr_logic_longrange @{ -@defgroup wr_logic_ai_longrange_ai_angle_calculations Angle Calculations +@defgroup wr_logic_longrange_angle_calculations Angle Calculations @brief Helper class for angle calculation @details Package of functions for angle calculations using the Haversine formula. Takes account for the curvature of the Earth. diff --git a/src/wr_logic_ai/nodes/angle_to_drive_methods.py b/src/wr_logic_longrange/nodes/angle_to_drive_methods.py similarity index 90% rename from src/wr_logic_ai/nodes/angle_to_drive_methods.py rename to src/wr_logic_longrange/nodes/angle_to_drive_methods.py index 7e6b73fa..e2281946 100755 --- a/src/wr_logic_ai/nodes/angle_to_drive_methods.py +++ b/src/wr_logic_longrange/nodes/angle_to_drive_methods.py @@ -1,7 +1,7 @@ """@file -@defgroup wr_logic_ai_longrange_ai +@defgroup wr_logic_longrange @{ -@defgroup wr_logic_ai_longrange_ai_angle_to_drive_methods Angle to Drive Calculations +@defgroup wr_logic_longrange_angle_to_drive_methods Angle to Drive Calculations @brief Helper class for converting angle headings to motor drive power @details Intermediate logic layer used to convert rover target heading to signals to motor drive power. @{ @@ -47,12 +47,10 @@ def piecewise_linear(heading: float, target_angle: float) -> DriveTrainCmd: else: return DriveTrainCmd(left_value=1, right_value=max(-2 / 45 * x + 1, -1)) - """ - if x < 0: - return DriveTrainCmd(left_value = max(2/90*x+1,-1),right_value = 1) - else: - return DriveTrainCmd(left_value = 1, right_value = max(-2/90*x+1,-1)) - """ + # if x < 0: + # return DriveTrainCmd(left_value = max(2/90*x+1,-1),right_value = 1) + # else: + # return DriveTrainCmd(left_value = 1, right_value = max(-2/90*x+1,-1)) def logistic(heading: float, target_angle: float) -> DriveTrainCmd: @@ -67,8 +65,8 @@ def logistic(heading: float, target_angle: float) -> DriveTrainCmd: x = get_x(heading, target_angle) return DriveTrainCmd( - left_value=2 / (1 + math.exp(-(x + 45) / 10)) - 1, - right_value=2 / (1 + math.exp((x - 45) / 10)) - 1, + left_value=2 / (1 + math.exp(-(x + 30) / 9)) - 1, + right_value=2 / (1 + math.exp((x - 30) / 9)) - 1, ) diff --git a/src/wr_logic_longrange/nodes/calculate_heading_action_server.py b/src/wr_logic_longrange/nodes/calculate_heading_action_server.py new file mode 100755 index 00000000..270f0669 --- /dev/null +++ b/src/wr_logic_longrange/nodes/calculate_heading_action_server.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python3 + +import rospy +import math +import actionlib +from wr_logic_longrange.msg import InitCompassAction, InitCompassGoal +from wrevolution.srv import SetHeading +from wr_drive_msgs.msg import DriveTrainCmd +from wr_hsi_sensing.msg import CoordinateMsg + +## Timeout time for when we declare a long range navigation as failed +LONG_RANGE_TIMEOUT_TIME = rospy.Duration(3) + +DRIVE_SPEED = 0.15 + +class InitCompassActionServer(object): + def __init__(self, name) -> None: + rospy.loginfo("initing compass set action server") + # Publisher + self.drive_pub = rospy.Publisher( + rospy.get_param("~motor_speeds"), DriveTrainCmd, queue_size=10 + ) + + # Subscribe to GPS Data + self.subGPS = rospy.Subscriber( + "/gps_coord_data", CoordinateMsg, self.update_gps_coord + ) + + self.current_lat = 0 + self.current_long = 0 + self.coord_time = 0 + + self._action_name = name + self._as = actionlib.SimpleActionServer( + self._action_name, + InitCompassAction, + execute_cb=self.execute_callback, + auto_start=False, + ) + self._as.start() + + # update current position based on gps coordinates + def update_gps_coord(self, msg: CoordinateMsg) -> None: + self.current_lat = msg.latitude + self.current_long = msg.longitude + self.coord_time = rospy.get_time() + + def execute_callback(self, goal: InitCompassGoal): + rate = rospy.Rate(10) + drive_msg = DriveTrainCmd( + left_value=DRIVE_SPEED, + right_value=DRIVE_SPEED + ) + + # Wait for subscriber to get data + self.rate = 100 + r = rospy.Rate(self.rate) + while self.coord_time == 0: + r.sleep() + + # Get current lat and long before moving + lat_before = self.current_lat + long_before = self.current_long + #rospy.loginfo("Before: " + str(self.current_lat) + " " + str(self.current_long)) + # Driving forward for 2 seconds + start_time = rospy.get_rostime() + while ( + rospy.get_rostime() - start_time < LONG_RANGE_TIMEOUT_TIME + and not rospy.is_shutdown() + ): + rate.sleep() + self.drive_pub.publish(drive_msg) + #rospy.loginfo(drive_msg) + self.drive_pub.publish(0, 0) + #rospy.loginfo("MOTORS STOPPED") + #rospy.loginfo("Before: " + str(self.current_lat) + " " + str(self.current_long)) + + # TODO set failed if current coordinate is too old (check coord_time) + angle_heading = calculate_angle( + long_before, lat_before, self.current_long, self.current_lat + ) + success = set_heading_client(angle_heading) + if success: + rospy.loginfo("Heading set successfully!") + else: + rospy.loginfo("Failed to set heading.") + + return self._as.set_succeeded() + + +def set_heading_client(heading): + rospy.wait_for_service("/pigeon/set_heading") + try: + set_heading = rospy.ServiceProxy("/pigeon/set_heading", SetHeading) + return set_heading(heading) + except rospy.ServiceException as e: + rospy.logerr("Service call failed:", e) + + +def calculate_angle(x1, y1, x2, y2): + """ + Calculate the angle (in degrees) between two sets of coordinates in math coordinates. + """ + # Calculate the differences in x and y coordinates + dx = x2 - x1 + dy = y2 - y1 + + # Calculate the angle using arctan2, convert from radians to degrees + angle_radians = math.atan2(dy, dx) + angle_degrees = math.degrees(angle_radians) + + # Ensure angle is between 0 and 360 degrees + if angle_degrees < 0: + angle_degrees += 360 + + return angle_degrees + + +if __name__ == "__main__": + rospy.init_node("calculate_init_heading") + initComp = InitCompassActionServer("InitCompassActionServer") + rospy.spin() diff --git a/src/wr_logic_ai/nodes/finder.py b/src/wr_logic_longrange/nodes/finder.py similarity index 70% rename from src/wr_logic_ai/nodes/finder.py rename to src/wr_logic_longrange/nodes/finder.py index 35ce0dbf..c3f36008 100755 --- a/src/wr_logic_ai/nodes/finder.py +++ b/src/wr_logic_longrange/nodes/finder.py @@ -1,7 +1,7 @@ """@file -@defgroup wr_logic_ai_longrange_ai +@defgroup wr_logic_longrange @{ -@defgroup wr_logic_ai_longrange_ai_finder Finder +@defgroup wr_logic_longrange_finder Finder @brief Calculates the optimal LiDAR/heading to drive towards, taking obstacle avoidance into account @details @{ @@ -13,19 +13,18 @@ import rospy from sensor_msgs.msg import LaserScan from geometry_msgs.msg import PoseArray, Pose +from sensor_msgs.msg import LaserScan from copy import deepcopy import os import pdb import pickle -## Width of the rover (in meters) -ROVER_WIDTH = 0.5 +ROVER_WIDTH = 1.5 ## Publisher for LiDAR data for debugging on rviz scan_rviz_pub = rospy.Publisher("/scan_rviz", LaserScan, queue_size=10) # TODO (@bennowotny ) This should be disable-able for bandwidth -## Publisher for obstacle avoidance heading for debugging on rviz -window_pub = rospy.Publisher("/lidar_windows", PoseArray, queue_size=1) +# window_pub = rospy.Publisher("/lidar_windows", PoseArray, queue_size=1) def calculate_anti_window(d: float) -> int: @@ -82,28 +81,21 @@ def is_wide_valley(sector_i: int, sector_f: int, max_valley: int) -> bool: return 1 + sector_f - sector_i > max_valley -def offset_lidar_data(data, sector_angle, is_rviz=False): +# Transforms the raw lidar data from compass coordinates (0 at north, clockwise) to math coordinates(0 at east, counterclockwise) + + +def offset_lidar_data(data: LaserScan, sector_angle: float, is_rviz=False): """ - Transforms the raw lidar data from compass coordinates (0 at north, clockwise) to math - coordinates (0 at east, counterclockwise) + Returns + Lidar data comes as 0 being forward and going counterclockwise - @param data : The LiDAR data - @param sector_angle (float): The number of degrees contained within one sector - @param is_rviz (bool, optional): If the returned data is graphed in rviz or not. This is done as - @param rviz processes data different compare to the rest of our code. Defaults to False. - @return List[int]: The offsetted LiDAR data + @param sector_angle (float): transforms angle (0-360) to be based on angle increments from the lidar + @return offest_data: lidar data transformed so that the 0 angle is the right side of the lidar going counterclockwise """ - # TODO: Run the code to check if coordinate system is good. offset_data = [0] * len(data) - if is_rviz: - for i in range(len(data)): - offset_data[ - (i + math.floor(90 / sector_angle) + math.floor(len(data) / 2)) - % len(data) - ] = data[i] - else: - for i in range(len(data)): - offset_data[(i + math.floor(90 / sector_angle)) % len(data)] = data[i] + data = list(data) + offset_data = list((data[math.floor(270 / sector_angle) :])) + offset_data.extend(list((data[: (math.floor(270 / sector_angle))]))) return offset_data @@ -134,18 +126,20 @@ def get_valley( rviz_data = deepcopy(data) # rviz_data.ranges = gaussian_smooth.gaussian_filter1d(rviz_data.ranges, smoothing) - rviz_data.ranges = offset_lidar_data(rviz_data.ranges, sector_angle, is_rviz=True) - scan_rviz_pub.publish(rviz_data) - # rospy.loginfo(f"{data.ranges}") - # TODO: remove dependency on this variable by making the mock script more like real hardware input if rospy.get_param("~wrover_hw") == "REAL": # hist = offset_lidar_data(gaussian_smooth.gaussian_filter1d(data.ranges, smoothing), sector_angle) - hist = offset_lidar_data(data.ranges, sector_angle) + hist = offset_lidar_data(data.ranges, sector_angle, is_rviz=False) # For testing: else: # hist = gaussian_smooth.gaussian_filter1d(data.ranges, smoothing) - hist = data.ranges + hist = list(data.ranges) + + rviz_data.ranges = hist + scan_rviz_pub.publish(rviz_data) + + # Front of scanner + # rospy.loginfo(hist[math.floor(90/sector_angle)]) # This is to prevent expanding constant obstacles from behind the robot, which can # inadvertently and unpredictably (due to sensor noise) block out most of the view @@ -157,66 +151,18 @@ def get_valley( pickle.dump(hist, output_file) output_file.close() - # TODO : The names here are a little unclear, maybe some comments/renames (F2)? - # This chunk code will find obstacles, expand the edges of the obstacle for the length of half the robot (with some extra - # for redundecy), and make a list of all the obstacles. - # format: [[left bound index , right bound index], ...] - obstacle_list = list() - # this will be an array of two values with the beginning and ending index of the obstacle. - one_obstacle = [] - for i in range(len(hist)): - if hist[i] < threshold: - one_obstacle.append(i) - # This prevents single noisy points from blocking out large portions of the drive window - # TODO (@bennowotny ): This 'obstacle too small' magic number should be a named constant - elif len(one_obstacle) > 1: - left_bound = len(hist) - right_bound = 0 - for i in range(len(one_obstacle)): - # Calculate size of anti-window and add to obstacle bounds - # pass in distance to target to caculate angle that allows robot to pass through - angleToIncrease = ( - calculate_anti_window(hist[one_obstacle[i]]) / sector_angle - ) - - # Update left and right bound - left_bound = max(min(left_bound, one_obstacle[i] - angleToIncrease), 0) - right_bound = min( - max(right_bound, one_obstacle[i] + angleToIncrease), len(hist) - ) - - # Check to see if the obstacle we just found can actually be merged with a previous obstacle - while len(obstacle_list) > 0 and obstacle_list[-1][1] >= left_bound: - left_bound = min(left_bound, obstacle_list[-1][0]) - right_bound = max(right_bound, obstacle_list[-1][1]) - del obstacle_list[-1] - obstacle_list.append([left_bound, right_bound]) - one_obstacle.clear() - - # TODO (@bennowotny ): This code is the same as what's in the loop, so it should be abstracted out to its own function - if len(one_obstacle) != 0: - left_bound = len(hist) - right_bound = 0 - for i in range(len(one_obstacle)): - # Calculate size of anti-window and add to obstacle bounds - # pass in distance to target to caculate angle that allows robot to pass through - angleToIncrease = ( - calculate_anti_window(hist[one_obstacle[i]]) / sector_angle - ) + # get two obstacles lists for 2 thresholds in order to get rid of + # any obstacle that is far away. obstacle_list will grab all the obstacles at the normal + # threshold, obstacle list close will grab all the obstacles again, except will now not + # contain obstacles farther out + obstacle_list = get_obstacle_list(sector_angle, threshold, hist) + obstacle_list_close = get_obstacle_list(sector_angle, threshold / 2, hist) - # Update left and right bound - left_bound = max(min(left_bound, one_obstacle[i] - angleToIncrease), 0) - right_bound = min( - max(right_bound, one_obstacle[i] + angleToIncrease), len(hist) - ) - - # Check to see if the obstacle we just found can actually be merged with a previous obstacle - while len(obstacle_list) > 0 and obstacle_list[-1][1] >= left_bound: - left_bound = min(left_bound, obstacle_list[-1][0]) - right_bound = max(right_bound, obstacle_list[-1][1]) - del obstacle_list[-1] - obstacle_list.append([left_bound, right_bound]) - one_obstacle.clear() + # if the close obstacles are empty use the default list, this allows + # us to navigate around closer obstacles without worring about farther obstacles + # that are not in our way yet + if len(obstacle_list_close) != 0: + obstacle_list = obstacle_list_close # At this point we make an inverse list of the obstacles to have a 2d list of all # the places that we can drive through (our windows) @@ -252,7 +198,7 @@ def get_valley( pose.orientation.z = math.sin(math.radians(i * sector_angle) / 2) pose.orientation.w = math.cos(math.radians(i * sector_angle) / 2) window_msg.poses.append(pose) - window_pub.publish(window_msg) + # window_pub.publish(window_msg) # Initialize best valley array best_valley = [] @@ -271,6 +217,75 @@ def get_valley( return best_valley +def get_obstacle_list(sector_angle: float, threshold: float, hist: list) -> list: + """ + Using the laserscan data coming in, it will join objects together to make an obstacle list. This + will then be used to help find the best valley for the rover to go through. Can be varied through + changing the threshold (how close the obstacles need to be to get fit into the obstacle list) + + @param sector_angle (float): The number of degrees contained within one sector + @param threshold (float): The threshold distance which triggers obstacle avoidance (in meters) + @param hist (list): The LiDAR reading data as a list + @return List[List[int]]: The list of left bounds and right bounds on each obstacle + """ + # this will be an array of two values with the beginning and ending index of the obstacle. + obstacle_list = list() + + one_obstacle = [] + for i in range(len(hist)): + # This prevents single noisy points from blocking out large portions of the drive window + if hist[i] < threshold: + one_obstacle.append(i) + elif len(one_obstacle) >= 1: + obstacle_list, one_obstacle = update_obstacle_bound( + hist, one_obstacle, sector_angle, obstacle_list + ) + + # TODO (@bennowotny ): This code is the same as what's in the loop, so it should be abstracted out to its own function + if len(one_obstacle) != 0: + obstacle_list, one_obstacle = update_obstacle_bound( + hist, one_obstacle, sector_angle, obstacle_list + ) + + return obstacle_list + + +def update_obstacle_bound( + hist: list, one_obstacle: list, sector_angle: float, obstacle_list: list +): + """ + updates the list of obstacles with new bounds calculated from lidar data + + @param hist (list): The lidar data as a list + @param one_obstacle (list): Indicies in the list the relate to the current obstacles + @param sector_angle (float): Number of degrees in on sector + @param obstacle_list (list): List of all obstacles + @return list, list: updates list of obstacles and one obstacle that can be merged + """ + left_bound = len(hist) + right_bound = 0 + for i in range(len(one_obstacle)): + # Calculate size of anti-window and add to obstacle bounds + # pass in distance to target to caculate angle that allows robot to pass through + angleToIncrease = calculate_anti_window(hist[one_obstacle[i]]) / sector_angle + + # Update left and right bound + left_bound = max(min(left_bound, one_obstacle[i] - angleToIncrease), 0) + right_bound = min( + max(right_bound, one_obstacle[i] + angleToIncrease), len(hist) + ) + + # Check to see if the obstacle we just found can actually be merged with a previous obstacle + while len(obstacle_list) > 0 and obstacle_list[-1][1] >= left_bound: + left_bound = min(left_bound, obstacle_list[-1][0]) + right_bound = max(right_bound, obstacle_list[-1][1]) + del obstacle_list[-1] + obstacle_list.append([left_bound, right_bound]) + one_obstacle.clear() + + return obstacle_list, one_obstacle + + def get_navigation_angle( target: int, threshold: float, data: LaserScan, smoothing_constant: float = 3 ) -> float: @@ -311,7 +326,7 @@ def get_navigation_angle( if get_target_distance(best_valley[0], best_valley[1], target, sector_angle) == 0: # Report the current target angle; no adjustment needed # print("target * sector_angle = " + str(target * sector_angle)) - rospy.loginfo("In target valley") + # rospy.loginfo("In target valley") return target * sector_angle # If the valley is wide... @@ -334,7 +349,7 @@ def get_navigation_angle( ) # Aim for the center of this new max_valley valley (this helps avoid accidentally clipping an edge of the robot) - rospy.loginfo("Obstacle in the way, turning to wide valley") + # rospy.loginfo("Obstacle in the way, turning to wide valley") return ((nearest_sector + border_sector) / 2.0) * sector_angle # If the valley is narrow... @@ -342,7 +357,7 @@ def get_navigation_angle( # Follow the probotcol as defined above for narrow valleys # Aim for the center of the valley - rospy.loginfo("Obstacle in the way, turning to narrow valley") + # rospy.loginfo("Obstacle in the way, turning to narrow valley") return ((best_valley[0] + best_valley[1]) / 2.0) * sector_angle diff --git a/src/wr_logic_longrange/nodes/inputFakeData.py b/src/wr_logic_longrange/nodes/inputFakeData.py new file mode 100755 index 00000000..2f52e782 --- /dev/null +++ b/src/wr_logic_longrange/nodes/inputFakeData.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python3 +from numbers import Integral +import random + +from numpy import integer +import rospy +from std_msgs.msg import Float64 +from sensor_msgs.msg import LaserScan +from wr_hsi_sensing.msg import CoordinateMsg + +from wr_drive_msgs.msg import DriveTrainCmd +from geometry_msgs.msg import PoseStamped +from finder import offset_lidar_data +from copy import deepcopy + +import math + +t = 100 +# dist = 7 +RATE = 10 +ROBOT_WIDTH = 1.06 # In meters + + +# coord[x,y] +class Obstacle: + """ + Sends a row of random points to act as walls/obstacles for lidar testing + """ + + def __init__(self, coord1, coord2): + self.coord1 = coord1 + self.coord2 = coord2 + + def moveRight(self, x): + self.coord1[0] = self.coord1[0] + x + self.coord2[0] = self.coord2[0] + x + + def moveUp(self, y): + self.coord1[1] = self.coord1[1] + y + self.coord2[1] = self.coord2[1] + y + + def getAngle1(self) -> int: + return int(pointsToAngle(self.coord1[0], self.coord1[1])) + + def getAngle2(self) -> int: + return int(pointsToAngle(self.coord2[0], self.coord2[1])) + + def setObstacleFront(self): + self.coord1[0] = 3 + self.coord2[0] = -2 + + def testAngle(self): + self.coord1[0] = random.randint(-5, 5) + self.coord1[1] = random.randint(5, 10) + self.coord2[0] = random.randint(-5, 5) + self.coord2[1] = random.randint(5, 10) + # rospy.logerr("1:" + str(self.coord1) + "2:" + str(self.coord2)) + + # Switch which coord is one and which is two as algorithm works when coord 1 is the first one scanning from right to left + if self.getAngle1() > self.getAngle2(): + self.switchCoords() + + def getSlope(self): + if self.coord2[0] - self.coord1[0] == 0: + return 99999 + else: + return (self.coord2[1] - self.coord1[1]) / (self.coord2[0] - self.coord1[0]) + + def setCoord1(self, x, y): + self.coord1[0] = x + self.coord1[1] = y + + def setCoord2(self, x, y): + self.coord2[0] = x + self.coord2[1] = y + + def switchCoords(self): + placeholder = self.coord1 + self.coord1 = self.coord2 + self.coord2 = placeholder + + +obstacle = Obstacle([1, 5], [-1, 5]) +# obstacle = Obstacle([2,6],[3,9]) TODO:Test this error case + + +def get_laser_ranges(t=0): + inputData = [] + + angle1 = obstacle.getAngle1() + angle2 = obstacle.getAngle2() + + if angle1 > 180: + angle1 = 0 + + if obstacle.getAngle1() > obstacle.getAngle2() and obstacle.getAngle1() < 180: + obstacle.switchCoords() + + m_slope = obstacle.getSlope() + for t in range(180): + if t >= angle1 and t <= angle2: + if t != 0: + # Using point-slope formula and knowing the equation for the line between the two points and the line made by the angle, + # you can find the intersection of those lines and plot that point in rviz + + # y=(y_1+x_1*m_slope)/(1-(m_slope/m_angle)) + # x=y/m_angle + m_angle = math.tan(math.radians(t)) + + y = (obstacle.coord1[1] - obstacle.coord1[0] * m_slope) / ( + 1 - (m_slope / m_angle) + ) + x = y / m_angle + + inputData.append(cartesianToRadian(x, y)) + + else: + inputData.append(10) + # if(t >= angle1 and t <= angle1 + ((angle2 - angle1) / 2)): + # if(t != 0): + # #Using point-slope formula and knowing the equation for the line between the two points and the line made by the angle, + # #you can find the intersection of those lines and plot that point in rviz + + # #y=(y_1+x_1*m_slope)/(1-(m_slope/m_angle)) + # #x=y/m_angle + # m_angle = math.tan(math.radians(t)) + + # y = (obstacle.coord1[1] - obstacle.coord1[0]*m_slope * 2)/(1-(m_slope * 2/m_angle)) + # x = y/m_angle + + # inputData.append(cartesianToRadian(x,y)) + + # elif (t >= angle1 and t <= angle2): + # if(t != 0): + # #Using point-slope formula and knowing the equation for the line between the two points and the line made by the angle, + # #you can find the intersection of those lines and plot that point in rviz + + # #y=(y_1+x_1*m_slope)/(1-(m_slope/m_angle)) + # #x=y/m_angle + # m_angle = math.tan(math.radians(t)) + + # y = (obstacle.coord1[1] - obstacle.coord1[0]*m_slope)/(1-(m_slope/m_angle)) + # x = y/m_angle + + # inputData.append(cartesianToRadian(x,y)) + # else: + # inputData.append(10) + + for i in range(180): + inputData.append(0.3) + + return inputData + + +def pointsToAngle(x, y) -> float: + return math.degrees(math.atan2(y, x)) % 360 # the Mod 360 turns -160 to 200 + + +def cartesianToRadian(x, y) -> float: + return math.sqrt(x * x + y * y) + + +def run_mock_data() -> None: + global mock_heading + global laser_adjust + + distanceData = rospy.Publisher("/scan", LaserScan, queue_size=10) + mock_heading_pub = rospy.Publisher("/heading_data", Float64, queue_size=10) + mock_gps_pub = rospy.Publisher("/gps_coord_data", CoordinateMsg, queue_size=10) + zero_pub = rospy.Publisher("/debug_zero", PoseStamped, queue_size=10) + + zero_msg = PoseStamped() + zero_msg.pose.position.x = 0 + zero_msg.pose.position.y = 0 + zero_msg.pose.position.z = 0 + zero_msg.pose.orientation.x = 0 + zero_msg.pose.orientation.y = 0 + zero_msg.pose.orientation.z = 0 + zero_msg.pose.orientation.w = 1 + zero_msg.header.frame_id = "laser" + frameCount = 0 + + # Testing publishers and subscribers + rospy.Subscriber("/control/drive_system/cmd", DriveTrainCmd, updateHeading) + + laser = LaserScan() + laser.angle_max = 2 * math.pi + laser.angle_increment = math.pi / 180 + laser.time_increment = 0 + laser.scan_time = 1 + laser.range_min = 0 + laser.range_max = 150 + laser.ranges = get_laser_ranges(0) + laser.header.frame_id = "laser" + laser.intensities = [] + + mock_heading = 0 + laser_adjust = 0 + mock_gps = CoordinateMsg() + mock_gps.latitude = 0 + mock_gps.longitude = 0 + + rospy.loginfo("sent fake nav data") + + sleeper = rospy.Rate(RATE) + while not rospy.is_shutdown(): + laser.ranges = get_laser_ranges(t) + distanceData.publish(laser) + + zero_msg.header.seq = frameCount + zero_msg.header.stamp = rospy.get_rostime() + frameCount += 1 + + mock_heading_pub.publish(mock_heading) + mock_gps_pub.publish(mock_gps) + zero_pub.publish(zero_msg) + sleeper.sleep() + + +def updateHeading(data) -> None: + global mock_heading + global t + # global dist + # global height + + # Move obstacle up and down + # distance = velocity * time + # distance = amount of movement of obstacle + # velocity = average of wheel speeds * max velocity + # time = (1/heartz) [Heartz is rospy.sleep] + maxVelocity = 0.8 ### in meters per second + obstacle.moveUp((-(data.left_value + data.right_value)) * maxVelocity * (1 / RATE)) + + # amount rotated will depend on our angular velocity + # amount rotated = angular velocity * time + # amount rotated = laser_adjust + # ------------------ + # w(angular velocity) = v/r + # angular velocity = (velocity of left value + velocity of right value) / radius of robot + # angular velocity = (data.left_value*maxVelocity + data.right_value*maxVelocity) / radius of robot + # angular velocity will return in radians per second + + # ----------------- + laser_adjust = (data.left_value * maxVelocity - data.right_value * maxVelocity) / ( + ROBOT_WIDTH / 2 + ) + laser_adjust = laser_adjust * (1 / RATE) + # rospy.logerr(laser_adjust) + + mock_heading = mock_heading + math.degrees(laser_adjust) + + # rospy.logerr(mock_heading) + + # Rotate obstacle: + # Refer to this graph for the equation https://www.desmos.com/calculator/anccue0csk + + angle_adjust = -laser_adjust + + obstacle.setCoord1( + obstacle.coord1[0] * math.cos((angle_adjust)) + + obstacle.coord1[1] * math.sin((angle_adjust)), + obstacle.coord1[1] * math.cos((angle_adjust)) + - obstacle.coord1[0] * math.sin((angle_adjust)), + ) + + obstacle.setCoord2( + obstacle.coord2[0] * math.cos((angle_adjust)) + + obstacle.coord2[1] * math.sin((angle_adjust)), + obstacle.coord2[1] * math.cos((angle_adjust)) + - obstacle.coord2[0] * math.sin((angle_adjust)), + ) + + # rospy.logerr(str(obstacle.coord2[0]) + " " + str(obstacle.coord2[1])) + + # ******* Reset obstacle when behind us *************# + if obstacle.coord1[1] < 0.1 and obstacle.coord2[1] < 0.1: + obstacle.testAngle() + + +if __name__ == "__main__": + rospy.init_node("publish_fake_data", anonymous=False) + + if rospy.get_param("/long_range_action_server/wrover_hw") == "MOCK": + # Run fake data + rospy.loginfo("Running fake data") + run_mock_data() + + rospy.spin() diff --git a/src/wr_logic_ai/nodes/long_range_action_server.py b/src/wr_logic_longrange/nodes/long_range_action_server.py similarity index 61% rename from src/wr_logic_ai/nodes/long_range_action_server.py rename to src/wr_logic_longrange/nodes/long_range_action_server.py index 6cc1e0d3..6ca3d48a 100755 --- a/src/wr_logic_ai/nodes/long_range_action_server.py +++ b/src/wr_logic_longrange/nodes/long_range_action_server.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 """@file -@defgroup wr_logic_ai_longrange_ai +@defgroup wr_logic_longrange @{ -@defgroup wr_logic_ai_longrange_ai_action_server Long Range Action Server +@defgroup wr_logic_longrange_action_server Long Range Action Server @brief Action server for long range navigation @details This action server is used in conjunction with the state machine to drive the long range navigation logics. The action server determines if a navigation is successful (when it reaches the @@ -14,9 +14,20 @@ import rospy import actionlib -from wr_logic_ai.msg import LongRangeAction, LongRangeGoal +from wr_logic_longrange.msg import LongRangeAction, LongRangeGoal import obstacle_avoidance +# Temporary: +from std_msgs.msg import Float64 +import testing_rviz +from wr_logic_longrange.msg import ( + InitCompassAction, + InitCompassGoal, +) +from wr_drive_msgs.msg import DriveTrainCmd +from sensor_msgs.msg import LaserScan + + # TODO: check timeout time length validity ## Timeout time for when we declare a long range navigation as failed LONG_RANGE_TIMEOUT_TIME = rospy.Duration(1000) @@ -30,22 +41,30 @@ class LongRangeActionServer(object): """ def __init__(self, name) -> None: - """ - Initializes the action server - - @param name (String): Name of the action server - """ - + rospy.loginfo("initing long range action server") self._action_name = name + # Publisher + self.drive_pub = rospy.Publisher( + rospy.get_param("~motor_speeds"), DriveTrainCmd, queue_size=10 + ) obstacle_avoidance.initialize() + + # Subscribe to lidar data + #rospy.wait_for_message("/scan", LaserScan, timeout=None) + + self._as = actionlib.SimpleActionServer( self._action_name, LongRangeAction, execute_cb=self.execute_callback, auto_start=False, ) + #self._as.preempt_callback(self.stop_motors) self._as.start() + def stop_motors(self): + self.drive_pub.publish(0, 0) + def execute_callback(self, goal: LongRangeGoal): """ Executes the long range obstacle avoidance code, and triggers the corresponding state machine event @@ -54,14 +73,25 @@ def execute_callback(self, goal: LongRangeGoal): @param goal (LongRangeGoal): Goal for the navigation segment, which contains the GPS coordinates of the target """ - + rate = rospy.Rate(10) start_time = rospy.get_rostime() while ( rospy.get_rostime() - start_time < LONG_RANGE_TIMEOUT_TIME and not rospy.is_shutdown() + and not self._as.is_preempt_requested() ): + rate.sleep() if obstacle_avoidance.update_target(goal.target_lat, goal.target_long): + rospy.loginfo("SUCCESS LONG RANGE ACTION SERVER") + # Stop motors + self.stop_motors() return self._as.set_succeeded() + else: + # set motor to drive to target + msg = obstacle_avoidance.get_drive_power() + self.drive_pub.publish(msg) + + self.stop_motors() return self._as.set_aborted() @@ -69,6 +99,3 @@ def execute_callback(self, goal: LongRangeGoal): rospy.init_node("long_range_action_server") server = LongRangeActionServer("LongRangeActionServer") rospy.spin() - -## } -## } diff --git a/src/wr_logic_ai/nodes/obstacle_avoidance.py b/src/wr_logic_longrange/nodes/obstacle_avoidance.py similarity index 52% rename from src/wr_logic_ai/nodes/obstacle_avoidance.py rename to src/wr_logic_longrange/nodes/obstacle_avoidance.py index b509a9db..e66db158 100755 --- a/src/wr_logic_ai/nodes/obstacle_avoidance.py +++ b/src/wr_logic_longrange/nodes/obstacle_avoidance.py @@ -14,43 +14,46 @@ import math import rospy import time +from visualization_msgs.msg import Marker from finder import get_navigation_angle from angle_calculations import AngleCalculations import angle_to_drive_methods as angle_calc - from sensor_msgs.msg import LaserScan - from wr_hsi_sensing.msg import CoordinateMsg from wr_drive_msgs.msg import DriveTrainCmd -from geometry_msgs.msg import PoseStamped from std_msgs.msg import Float64 +import testing_rviz # Navigation parameters -## distance before obstacle avoidance logics is triggered (in meters) -LIDAR_THRESH_DISTANCE = 1 -## distance before rover believes it has reached the target (in meters) -NAV_THRESH_DISTANCE = 0.5 +# distance before obstacle avoidance logics is triggered (in meters) +LIDAR_THRESH_DISTANCE = 5 +# distance before rover believes it has reached the target (in meters) +NAV_THRESH_DISTANCE = 1 + +# Set the speed factor +speed_factor = 0.2 # initialize target angle to move forward +# current location current_lat = 0 current_long = 0 - -##Value robot is heading currently. East is 0. (Counterclockwise). Extected as a value from 0 to 360. cur_heading = 0 -##Gives target angle relative to East (counter-clockwise) + +# target location target_angle = 0 +target_sector = 0 -# target_sector = 0 -##Used to smooth over lidar data +# used for obtaining navigation angle and valleys smoothing_constant = 3 -# Get the speed multiplier of the current runtime for the obstacle_avoidance -# 0.2 is bugged -speed_factor = 0 -is_active = False -# Start the tasks managed to drive autonomously +# global speed factor updated through navigation +# speed_factor = 0.3 + +# final angle to drive to +result = 0 +# Start the tasks managed to drive autonomously def initialize() -> None: """Initialize publisher and subscribers for nodes @@ -67,41 +70,18 @@ def initialize() -> None: scan: Get values from lidar scan """ - global drive_pub - global smoothing_constant - global speed_factor - global heading_pub - global frameCount - global heading_msg - global raw_heading_pub - - # Publisher - drive_pub = rospy.Publisher( - rospy.get_param("~motor_speeds"), DriveTrainCmd, queue_size=1 - ) # Subscribe to gps coordinate data rospy.Subscriber("/gps_coord_data", CoordinateMsg, update_gps_coord) # Subscribe to heading data - rospy.Subscriber("/heading_data", Float64, update_heading) + rospy.Subscriber("/heading", Float64, update_heading) # Subscribe to lidar data rospy.Subscriber("/scan", LaserScan, update_navigation) - # TESING - heading_pub = rospy.Publisher("/debug_heading", PoseStamped, queue_size=1) - heading_msg = PoseStamped() - heading_msg.pose.position.x = 0 - heading_msg.pose.position.y = 0 - heading_msg.pose.position.z = 0 - heading_msg.pose.orientation.x = 0 - heading_msg.pose.orientation.y = 0 - heading_msg.header.frame_id = "laser" - frameCount = 0 - raw_heading_pub = rospy.Publisher("/target_heading_raw", Float64, queue_size=1) - +# update current position based on gps coordinates def update_gps_coord(msg: CoordinateMsg) -> None: global current_lat global current_long @@ -109,33 +89,34 @@ def update_gps_coord(msg: CoordinateMsg) -> None: current_long = msg.longitude +# extends from 0 to 360 degrees, heading is shifted based on east (90 is north -> 0) def update_heading(msg: Float64) -> None: """ Shifted from compass coordinates to math coordinates. - @param msg (Float64): Heading value received as value from 0 to 360. North is 0 clockwise. + @param msg (Float64): Heading value received as value from 0 to 360. North is 0 counter-clockwise. @param cur_heading East is 0. (Counterclockwise). Extected as a value from 0 to 360. """ global cur_heading - cur_heading = (90 - msg.data) % 360 # Shifting to East + # cur_heading = (90 + msg.data) % 360 # Shifting to East + cur_heading = msg.data # Init calibration shifts to east the initial heading + # rviz_sim_cur_heading_pub.publish(cur_heading) +# calculates difference of angles from -180 to 180 degrees def angle_diff(heading1: float, heading2: float) -> float: """ Returns relative angle difference from heading of robot to heading of target @param heading1 (float): Value of target relative to East (Counter-clockwise) @param heading2 (float): Value of heading relative to East (Counter-clockwise) - @return float: Value from -180 to 180. Starting from bottom of robot (Counter-Clockwise) + @return float: Value from -180 to 180. Starting from bottom of robot (Negatives in left hand, positive in the right side) """ diff = (heading1 - heading2 + 360) % 360 return (diff + 180) % 360 - 180 -# Calculate current heading and the planar target angle -# TODO: this should now be part of a action server callback function - - +# update angle from rover to target based on new current position def update_target(target_lat, target_long) -> bool: """ Updates target_angle with values from IMU which use gps coords @@ -148,13 +129,24 @@ def update_target(target_lat, target_long) -> bool: # Construct the planar target angle relative to east, accounting for curvature imu = AngleCalculations(current_lat, current_long, target_lat, target_long) + target_angle = imu.get_angle() % 360 + # rospy.loginfo("Lat_current: " + str(current_lat) + " Long_current: " + str(current_long)) + + # rospy.loginfo("Lat_goal: " + str(target_lat) + " Long_target: " + str(target_long)) + + # rospy.loginfo("Target Angle: " + str(target_angle)) + + # rospy.loginfo(imu.get_distance()) + # check if we are close to the target if imu.get_distance() < NAV_THRESH_DISTANCE: return True else: return False +# TODO handle cases where lidar data is not publishing, and update_navigation() does not get called +# Update the robot's navigation and drive it towards the target angle based on our best valley def update_navigation(data: LaserScan) -> None: """ Update the robot's navigation and drive it towards the target angle @@ -164,59 +156,57 @@ def update_navigation(data: LaserScan) -> None: @param msg: Get the DriveTrainCmd(motor values) relating to the heading of the robot and the resulting best navigation angle @param data: Lidar data received """ - - global frameCount + global result # rospy.loginfo(f"target angle: {target_angle}, current heading: {cur_heading}") - data_avg = sum(cur_range for cur_range in data.ranges) / len(data.ranges) - # print("Data Avg: " + str(data_avg)) - # TODO: data threshold might depend of lidar model, double check - # Change if units/lidar changes - # data_avg is above 0.5 almost always, but result stays the same (?) - # TODO (@bennowotny ): This depended on data_avg, why? - if True: - ## Gets best possible angle, considering obstacles - delta_heading = angle_diff(target_angle, cur_heading) - - result = get_navigation_angle( - ((((90 + delta_heading) % 360) + 360) % 360) - / math.degrees( - data.angle_increment - ), # sector angle, #Changes delta_heading to be based on the right of the robot as 0 (Counter-clockwise), and go in increments of sector angle. - LIDAR_THRESH_DISTANCE, - data, - smoothing_constant, - ) - - raw_heading_pub.publish(result) - # rospy.loginfo(f"raw heading: {result}") - - # Set the bounds of the speed multiplier - speed_factor = 0.3 - speed_factor = 0 if speed_factor < 0 else speed_factor - speed_factor = 1 if speed_factor > 1 else speed_factor - # Get the DriveTrainCmd relating to the heading of the robot and the resulting best navigation angle - # Reason we do 90 - result is to get a value where 0 is up, + is clockwise, and - is counterclockwise - msg = angle_calc.piecewise_linear(angle_diff(90, result), 0) - # rospy.loginfo(f"left drive value: {msg.left_value}, right drive value: {msg.right_value}") - # t += 2 - # if t > 90: # t for debugging purposes - # t = -90 - # Scale the resultant DriveTrainCmd by the speed multiplier - msg.left_value *= speed_factor # Right value was inverted, -1 "fixes" - msg.right_value *= speed_factor - # Publish the DriveTrainCmd to the topic - # print("Left Value: " + str(msg.left_value)) - # print("Right Value: " + str(msg.right_value)) - drive_pub.publish(msg) - - # TESTING - heading_msg.header.seq = frameCount - heading_msg.header.stamp = rospy.get_rostime() - frameCount += 1 - heading_msg.pose.orientation.z = math.sin(math.radians(result) / 2) - heading_msg.pose.orientation.w = math.cos(math.radians(result) / 2) - heading_pub.publish(heading_msg) + + ## Gets best possible angle, considering obstacles + delta_heading = angle_diff(target_angle, cur_heading) + + result = get_navigation_angle( + ((90 + delta_heading) % 360) + / math.degrees( + data.angle_increment + ), # sector angle, #Changes delta_heading to be based on the right of the robot as 0 (Counter-clockwise), and go in increments of sector angle. + LIDAR_THRESH_DISTANCE, + data, + smoothing_constant, + ) + + # rospy.loginfo(f"raw heading: {result}") + + # TESTING + # testing_rviz.update_navigation_rviz_sim(delta_heading, result, cur_heading) + + +def get_drive_power() -> DriveTrainCmd: + """ + Calculate and return the motor powers to drive towards the target angle + + @param delta_heading: Relative value of target from robot from -180 to 180. Starting from bottom of robot (Counter-Clockwise) + @param result: Angle to drive to based on target angle, current heading, and obstacles. Value given as a sector angle with right of robot being 0 (Counterclockwise). + @param msg: Get the DriveTrainCmd(motor values) relating to the heading of the robot and the resulting best navigation angle + @param data: Lidar data received + """ + # Get the DriveTrainCmd relating to the heading of the robot and the resulting best navigation angle + # Reason we do 90 - result is to get a value where 0 is up, + is clockwise, and - is counterclockwise + msg = angle_calc.logistic(angle_diff(90, result), 0) + + # rospy.loginfo(f"left drive value: {msg.left_value}, right drive value: {msg.right_value}") + # Scale the resultant DriveTrainCmd by the speed multiplier + msg.left_value *= speed_factor + msg.right_value *= speed_factor + + # if msg.left_value > msg.right_value: + # valueToTurn = "Turn right" + # elif msg.left_value < msg.right_value: + # valueToTurn = "Turn left" + # else: + # valueToTurn = "Stay straight" + # rospy.loginfo("Drive to: " + str(valueToTurn)) + # rospy.loginfo("Target Value: " + str(target_angle)) + + return msg # If this file was executed... @@ -224,6 +214,7 @@ def update_navigation(data: LaserScan) -> None: try: # Initialize the running environment for this program initialize() + # testing_rviz.initialize() # Spin RosPy to the next update cycle rospy.spin() @@ -234,11 +225,6 @@ def update_navigation(data: LaserScan) -> None: # If there is some other error (i.e. ROS Termination) finally: # Stop the drive motors before shutting down - print("Stopping motors") - msg_stop = DriveTrainCmd() - msg_stop.left_value = 0 - msg_stop.right_value = 0 - drive_pub.publish(msg_stop) time.sleep(0.1) ## @} diff --git a/src/wr_logic_longrange/nodes/testing_rviz.py b/src/wr_logic_longrange/nodes/testing_rviz.py new file mode 100644 index 00000000..524350a7 --- /dev/null +++ b/src/wr_logic_longrange/nodes/testing_rviz.py @@ -0,0 +1,277 @@ +#!/usr/bin/env python3 + +"""@file +@defgroup wr_logic_ai_longrange_ai +@{ +@defgroup wr_logic_ai_longrange_ai_testing_rviz Testing Rviz +@brief +@details +@{ +""" + +import math +import rospy +import time +from visualization_msgs.msg import Marker +from geometry_msgs.msg import PoseStamped +from std_msgs.msg import Float64 + +# Navigation parameters +# distance before obstacle avoidance logics is triggered (in meters) +LIDAR_THRESH_DISTANCE = 5 +# distance before rover believes it has reached the target (in meters) +NAV_THRESH_DISTANCE = 0.5 + +# initialize target angle to move forward +# current location +current_lat = 0 +current_long = 0 + +# target location +target_angle = 0 +target_sector = 0 +frameCount = 0 + +# used for obtaining navigation angle and valleys +smoothing_constant = 3 + +# global speed factor updated through navigation +speed_factor = 0 + +# count to handle whent to update poses for simulation +frameCount = 0 + +# Publish data out to the marker +wRover_pub = rospy.Publisher("wRover_marker", Marker, queue_size=10) + +# set of poses for rviz, also creates publishing topic for the poses +# create pose for debug heading +heading_pub = rospy.Publisher("/debug_heading", PoseStamped, queue_size=1) +heading_msg = PoseStamped() +heading_msg.pose.position.x = 0 +heading_msg.pose.position.y = 0 +heading_msg.pose.position.z = 0 +heading_msg.pose.orientation.x = 0 +heading_msg.pose.orientation.y = 0 +heading_msg.header.frame_id = "laser" + +# create pose for actual heading +actual_heading_pub = rospy.Publisher("/actual_heading", PoseStamped, queue_size=1) +actual_heading_msg = PoseStamped() +actual_heading_msg.pose.position.x = 0 +actual_heading_msg.pose.position.y = 0 +actual_heading_msg.pose.position.z = 0 +actual_heading_msg.pose.orientation.x = 0 +actual_heading_msg.pose.orientation.y = 0 +actual_heading_msg.header.frame_id = "laser" + +# create pose for delta heading +laser_adjuster_pub = rospy.Publisher("/laser_adjuster", Float64, queue_size=1) +delta_heading_pub = rospy.Publisher("/delta_heading", PoseStamped, queue_size=1) +delta_heading_msg = PoseStamped() +delta_heading_msg.pose.position.x = 0 +delta_heading_msg.pose.position.y = 0 +delta_heading_msg.pose.position.z = 0 +delta_heading_msg.pose.orientation.x = 0 +delta_heading_msg.pose.orientation.y = 0 +delta_heading_msg.header.frame_id = "laser" + +# instantiate all directional markers +marker_pub = rospy.Publisher("pose_marker", Marker, queue_size=1) +marker = Marker() +marker.header.frame_id = "laser" # Set your frame_id +marker.type = Marker.CUBE +marker.action = Marker.ADD +marker.scale.x = 10.0 # Set the dimensions of the plane +marker.scale.y = 10.0 +marker.scale.z = 0.01 # Thickness of the plane +marker.color.a = 0.5 +marker.color.r = 0.1 +marker.color.g = 0.6 +marker.color.b = 0.1 +marker.pose = ( + delta_heading_msg.pose +) # Set the position and orientation based on your pose + +marker_circle_pub = rospy.Publisher("pose_circle_marker", Marker, queue_size=1) +marker_circle = Marker() +marker_circle.header.frame_id = "laser" # Set your frame_id +marker_circle.type = Marker.SPHERE +marker_circle.action = Marker.ADD +marker_circle.scale.x = LIDAR_THRESH_DISTANCE * 2 # Set the dimensions of the plane +marker_circle.scale.y = LIDAR_THRESH_DISTANCE * 2 +marker_circle.scale.z = 0.01 # Thickness of the plane +marker_circle.color.a = 0.5 +marker_circle.color.r = 0.1 +marker_circle.color.g = 0.3 +marker_circle.color.b = 0.6 +marker_circle.pose = ( + delta_heading_msg.pose +) # Set the position and orientation based on your pose + +wRover = Marker() +wRover.header.frame_id = "laser" # Replace with your fixed frame +wRover.type = Marker.MESH_RESOURCE +wRover.mesh_resource = "package://wr_logic_ai/meshes/Eclipse_Base_Simple.stl" # Replace with your 3D object path +wRover.pose.position.x = 0.0 # Replace with your desired position +wRover.pose.position.y = 0.0 +wRover.pose.position.z = 0.11 +wRover.pose.orientation.x = 0.0 # Replace with your desired orientation +wRover.pose.orientation.y = 0.0 +wRover.pose.orientation.z = 0.0 +wRover.pose.orientation.w = 1.0 +wRover.scale.x = 0.01 # Replace with your desired scale +wRover.scale.y = 0.01 +wRover.scale.z = 0.01 +wRover.color.a = 1.0 +wRover.color.r = 1.0 +wRover.color.g = 0.0 +wRover.color.b = 0.0 + +marker_flag_pub = rospy.Publisher("flag_marker", Marker, queue_size=1) +marker_flag = Marker() +marker_flag.header.frame_id = "laser" # Replace with your fixed frame +marker_flag.type = Marker.MESH_RESOURCE +marker_flag.mesh_resource = ( + "package://wr_logic_ai/meshes/flag.stl" # Replace with your 3D object path +) +marker_flag.pose.position.x = 0.0 # Replace with your desired position +marker_flag.pose.position.y = 5.0 +marker_flag.pose.position.z = 0.11 +marker_flag.pose.orientation.x = 0.0 # Replace with your desired orientation +marker_flag.pose.orientation.y = 0.0 +marker_flag.pose.orientation.z = 0.0 +marker_flag.pose.orientation.w = 1.0 +marker_flag.scale.x = 0.02 # Replace with your desired scale +marker_flag.scale.y = 0.02 +marker_flag.scale.z = 0.02 +marker_flag.color.a = 1.0 +marker_flag.color.r = 1.0 +marker_flag.color.g = 0.0 +marker_flag.color.b = 0.0 + + +def update_navigation_rviz_sim( + delta_heading: Float64, result: Float64, cur_heading: Float64 +) -> None: + """ + Configures heading and poses for rviz simulation. Values either passed from fake data or real data + are used to created headings/arrows in rviz. Used in obsracle avoidance to get input parameters. + + @param delta_heading (Float64): Relative value of target from robot from -180 to 180. Starting from bottom of robot (Counter-Clockwise) + @param result (Float64): Angle to drive to based on target angle, current heading, and obstacles. Value given as a sector angle with right + of robot being 0 (Counterclockwise). + @param cur_heading(Float64): Value of the current direction of the rover relative to East at 0 + """ + + global frameCount + + if rospy.get_param("~wrover_hw") == "MOCK": + heading_msg.header.seq = frameCount + heading_msg.header.stamp = rospy.get_rostime() + + actual_heading_msg.header.seq = frameCount + actual_heading_msg.header.stamp = rospy.get_rostime() + + delta_heading_msg.header.seq = frameCount + delta_heading_msg.header.stamp = rospy.get_rostime() + + frameCount += 1 + # negative sign on the pose is hardcoded, may not model how the actual robot will act + heading_msg.pose.orientation.z = math.sin(math.radians(result) / 2) + heading_msg.pose.orientation.w = math.cos(math.radians(result) / 2) + heading_pub.publish(heading_msg) + + actual_heading_msg.pose.orientation.z = math.sin(math.radians(cur_heading) / 2) + actual_heading_msg.pose.orientation.w = math.cos(math.radians(cur_heading) / 2) + actual_heading_pub.publish(actual_heading_msg) + + delta_heading_msg.pose.orientation.z = math.sin( + math.radians(delta_heading + 90) / 2 + ) + delta_heading_msg.pose.orientation.w = math.cos( + math.radians(delta_heading + 90) / 2 + ) + delta_heading_pub.publish(delta_heading_msg) + + # Adding the nums is cuz the flag is off center, I'm not sure why??? I downloaded this from a random website :D + marker_flag.pose.position.x = 5 * math.sin(math.radians(delta_heading)) + 0.57 + marker_flag.pose.position.y = -5 * math.cos(math.radians(delta_heading)) + 0.3 + marker_flag_pub.publish(marker_flag) + + laser_adjuster_pub.publish(delta_heading) # Used for inputFakeData + + marker.pose.orientation.z = math.sin(math.radians(delta_heading) / 2) + marker.pose.orientation.w = math.cos(math.radians(delta_heading) / 2) + # Set the position and orientation based on delta-heading + + # Publish the Marker message + marker_pub.publish(marker) + + # Flipped lines to match laser scan from rpLIDAR + elif rospy.get_param("~wrover_hw") == "REAL": + + heading_msg.header.seq = frameCount + heading_msg.header.stamp = rospy.get_rostime() + + actual_heading_msg.header.seq = frameCount + actual_heading_msg.header.stamp = rospy.get_rostime() + + delta_heading_msg.header.seq = frameCount + delta_heading_msg.header.stamp = rospy.get_rostime() + + frameCount += 1 + + heading_msg.pose.orientation.z = math.sin(math.radians(result + 180) / 2) + heading_msg.pose.orientation.w = math.cos(math.radians(result + 180) / 2) + heading_pub.publish(heading_msg) + + actual_heading_msg.pose.orientation.z = math.sin(math.radians(-cur_heading) / 2) + actual_heading_msg.pose.orientation.w = math.cos(math.radians(cur_heading) / 2) + actual_heading_pub.publish(actual_heading_msg) + + delta_heading_msg.pose.orientation.z = math.sin( + math.radians(delta_heading - 90) / 2 + ) + delta_heading_msg.pose.orientation.w = math.cos( + math.radians(delta_heading + 90) / 2 + ) + delta_heading_pub.publish(delta_heading_msg) + + # Adding the nums is cuz the flag is off center, I'm not sure why??? I downloaded this from a random website :D + marker_flag.pose.position.x = 5 * math.sin(math.radians(delta_heading)) + 0.57 + marker_flag.pose.position.y = -5 * math.cos(math.radians(delta_heading)) + 0.3 + marker_flag_pub.publish(marker_flag) + + laser_adjuster_pub.publish(delta_heading) # Used for inputFakeData + + marker.pose.orientation.z = math.sin(math.radians(delta_heading) / 2) + marker.pose.orientation.w = math.cos(math.radians(delta_heading) / 2) + # Set the position and orientation based on delta-heading + + # Publish the Marker message + marker_pub.publish(marker) + + if frameCount < 10: + wRover_pub.publish(wRover) + marker_circle_pub.publish(marker_circle) + + +# If this file was executed... +if __name__ == "__main__": + try: + # Initialize the running environment for this program + # Spin RosPy to the next update cycle + rospy.spin() + + # Ignore ROS Interrupt Exceptions + except rospy.ROSInterruptException: + pass + + # If there is some other error (i.e. ROS Termination) + finally: + # Stop the drive motors before shutting down + time.sleep(0.1) + +## @} +## @} diff --git a/src/wr_logic_longrange/package.xml b/src/wr_logic_longrange/package.xml new file mode 100644 index 00000000..51820870 --- /dev/null +++ b/src/wr_logic_longrange/package.xml @@ -0,0 +1,70 @@ + + + wr_logic_longrange + 0.0.0 + The wr_logic_longrange package + + + + + arthur + + + + + + TODO + + + + + + + + + + + + + + + + + + + + + + + + message_generation + + + + + + message_runtime + + + + + catkin + geometry_msgs + rospy + sensor_msgs + std_msgs + geometry_msgs + rospy + sensor_msgs + std_msgs + geometry_msgs + rospy + message_runtime + + + + + + + + diff --git a/src/wr_logic_longrange/setup.py b/src/wr_logic_longrange/setup.py new file mode 100644 index 00000000..f2289b80 --- /dev/null +++ b/src/wr_logic_longrange/setup.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python3 +from distutils.core import setup + +from catkin_pkg.python_setup import generate_distutils_setup + +setup_args = generate_distutils_setup(packages=["wr_logic_longrange"], package_dir={"": "src"}) + +setup(**setup_args) diff --git a/src/wr_logic_search/CMakeLists.txt b/src/wr_logic_search/CMakeLists.txt new file mode 100644 index 00000000..34e532e4 --- /dev/null +++ b/src/wr_logic_search/CMakeLists.txt @@ -0,0 +1,207 @@ +cmake_minimum_required(VERSION 3.0.2) +project(wr_logic_search) + +## Compile as C++11, supported in ROS Kinetic and newer +# add_compile_options(-std=c++11) + +## Find catkin macros and libraries +## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) +## is used, also find other catkin packages +find_package(catkin REQUIRED COMPONENTS + rospy + std_msgs + message_generation + actionlib_msgs + std_srvs +) + +## System dependencies are found with CMake's conventions +# find_package(Boost REQUIRED COMPONENTS system) + + +## Uncomment this if the package has a setup.py. This macro ensures +## modules and global scripts declared therein get installed +## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html +catkin_python_setup() + +################################################ +## Declare ROS messages, services and actions ## +################################################ + +## To declare and build messages, services or actions from within this +## package, follow these steps: +## * Let MSG_DEP_SET be the set of packages whose message types you use in +## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). +## * In the file package.xml: +## * add a build_depend tag for "message_generation" +## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET +## * If MSG_DEP_SET isn't empty the following dependency has been pulled in +## but can be declared for certainty nonetheless: +## * add a exec_depend tag for "message_runtime" +## * In this file (CMakeLists.txt): +## * add "message_generation" and every package in MSG_DEP_SET to +## find_package(catkin REQUIRED COMPONENTS ...) +## * add "message_runtime" and every package in MSG_DEP_SET to +## catkin_package(CATKIN_DEPENDS ...) +## * uncomment the add_*_files sections below as needed +## and list every .msg/.srv/.action file to be processed +## * uncomment the generate_messages entry below +## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) + +## Generate messages in the 'msg' folder +# add_message_files( +# FILES +# Message1.msg +# Message2.msg +# ) + +## Generate services in the 'srv' folder +# add_service_files( +# FILES +# ) + +## Generate actions in the 'action' folder +add_action_files( + FILES + Spin.action + SearchState.action +) + +## Generate added messages and services with any dependencies listed here +generate_messages( + DEPENDENCIES + std_msgs # Or other packages containing msgs + actionlib_msgs +) + +################################################ +## Declare ROS dynamic reconfigure parameters ## +################################################ + +## To declare and build dynamic reconfigure parameters within this +## package, follow these steps: +## * In the file package.xml: +## * add a build_depend and a exec_depend tag for "dynamic_reconfigure" +## * In this file (CMakeLists.txt): +## * add "dynamic_reconfigure" to +## find_package(catkin REQUIRED COMPONENTS ...) +## * uncomment the "generate_dynamic_reconfigure_options" section below +## and list every .cfg file to be processed + +## Generate dynamic reconfigure parameters in the 'cfg' folder +# generate_dynamic_reconfigure_options( +# cfg/DynReconf1.cfg +# cfg/DynReconf2.cfg +# ) + +################################### +## catkin specific configuration ## +################################### +## The catkin_package macro generates cmake config files for your package +## Declare things to be passed to dependent projects +## INCLUDE_DIRS: uncomment this if your package contains header files +## LIBRARIES: libraries you create in this project that dependent projects also need +## CATKIN_DEPENDS: catkin_packages dependent projects also need +## DEPENDS: system dependencies of this project that dependent projects also need +catkin_package( +# INCLUDE_DIRS include +# LIBRARIES wr_logic_search +# CATKIN_DEPENDS other_catkin_pkg +# DEPENDS system_lib +) + +########### +## Build ## +########### + +## Specify additional locations of header files +## Your package locations should be listed before other locations +include_directories( +# include +# ${catkin_INCLUDE_DIRS} +) + +## Declare a C++ library +# add_library(${PROJECT_NAME} +# src/${PROJECT_NAME}/wr_logic_search.cpp +# ) + +## Add cmake target dependencies of the library +## as an example, code may need to be generated before libraries +## either from message generation or dynamic reconfigure +# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Declare a C++ executable +## With catkin_make all packages are built within a single CMake context +## The recommended prefix ensures that target names across packages don't collide +# add_executable(${PROJECT_NAME}_node src/wr_logic_search_node.cpp) + +## Rename C++ executable without prefix +## The above recommended prefix causes long target names, the following renames the +## target back to the shorter version for ease of user use +## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" +# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") + +## Add cmake target dependencies of the executable +## same as for the library above +# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Specify libraries to link a library or executable target against +# target_link_libraries(${PROJECT_NAME}_node +# ${catkin_LIBRARIES} +# ) + +############# +## Install ## +############# + +# all install targets should use catkin DESTINATION variables +# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html + +## Mark executable scripts (Python etc.) for installation +## in contrast to setup.py, you can choose the destination +# catkin_install_python(PROGRAMS +# scripts/my_python_script +# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark executables for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html +# install(TARGETS ${PROJECT_NAME}_node +# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark libraries for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html +# install(TARGETS ${PROJECT_NAME} +# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +# ) + +## Mark cpp header files for installation +# install(DIRECTORY include/${PROJECT_NAME}/ +# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} +# FILES_MATCHING PATTERN "*.h" +# PATTERN ".svn" EXCLUDE +# ) + +## Mark other files for installation (e.g. launch and bag files, etc.) +# install(FILES +# # myfile1 +# # myfile2 +# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +# ) + +############# +## Testing ## +############# + +## Add gtest based cpp test target and link libraries +# catkin_add_gtest(${PROJECT_NAME}-test test/test_wr_logic_search.cpp) +# if(TARGET ${PROJECT_NAME}-test) +# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) +# endif() + +## Add folders to be run by python nosetests +# catkin_add_nosetests(test) diff --git a/src/wr_logic_search/README.md b/src/wr_logic_search/README.md new file mode 100644 index 00000000..a659e109 --- /dev/null +++ b/src/wr_logic_search/README.md @@ -0,0 +1,13 @@ +# Search AI + +@defgroup wr_logic_search wr_logic_search +@brief Search Pattern logic + +## State Machine +The State Machine has a state just for searching, which will be ran a minimum of five times for the three aruco tags, the orange mallet, and the water bottle. Long Range will receive the starting coordinate, and once the rover has reached this coordinate, it will switch to the Search state. In the Search state, the rover will travel to coordinates in a square spiral with a radius of 20 meters since target objects can be at most 20 meters away from the starting coordinate. At each coordinate, a service for the rover's camera will do a 360-degree scan of the surrounding area. If the target is found, the rover will go from the Search state to the Short Range state, if the target is not found, the rover will continue to the next coordinate, and if the target is not found after the rover scans its surroundings at the last coordinate, the rover will go to the starting coordinate, and the state will start over. + +## Coordinate Calculations +The Autonomous Navigation mission is run on GPS coordinates. The starting coordinates for each state are given, but for Search Pattern, since we need to look for the target objects at coordinates that are not the starting coordinate to actually find the objects, we need a systemized way to find these other coordinates. In the python file coord_calculations.py, we calculate GPS coordinates considering the curvature of the Earth. Even though the distances the rover will travel are not large, this will maximize accuracy. The search pattern is a square spiral, so beginning from the starting coordinate, the rover travels north, east, south, and west for a set number of vertices/coordinates. A list of dictionaries, representing one coordinate, are returned and will be iterated through for searching. + +## Travel Timer +Search Pattern uses the obstacle avoidance from Long Range, so we need a way to determine if the rover has reached an obstacle that has completely blocked off the rover's path set by the list of calculated coordinates. This is done by using a timer. There is a timer for each specific coordinate traveled too based on the distance between the current coordinate and the target coordinate. If the rover takes longer than the max time, then the rover will skip to the next coordinate, assuming that the coordinate is unreachable. There is also a timer for the entire state, which is determined by the sum of the distances between each of the calculated coordinates. If the rover takes longer than the max time, it is assumed that the rover's path is obstructed by an obstacle that makes the entire path unusable. In this case, the rover goes back to the starting coordinate and restarts the searching process. \ No newline at end of file diff --git a/src/wr_logic_search/action/SearchState.action b/src/wr_logic_search/action/SearchState.action new file mode 100644 index 00000000..d5e57f6c --- /dev/null +++ b/src/wr_logic_search/action/SearchState.action @@ -0,0 +1,18 @@ +#goal definition +float64 initial_lat # Initial GPS coordinate +float64 initial_long +int8 target_id +uint8 ARUCO_ID_0 = 0 +uint8 ARUCO_ID_1 = 1 +uint8 ARUCO_ID_2 = 2 +uint8 ARUCO_ID_3 = 3 +uint8 ARUCO_ID_4 = 4 +uint8 OBJ_MALLET = 5 +uint8 OBJ_BOTTLE = 6 +uint8 ANY = 9 +--- +#result definition +float64 final_lat # Final GPS coordinate +float64 final_long +--- +#feedback definition diff --git a/src/wr_logic_search/action/Spin.action b/src/wr_logic_search/action/Spin.action new file mode 100644 index 00000000..f7d28dee --- /dev/null +++ b/src/wr_logic_search/action/Spin.action @@ -0,0 +1,14 @@ +#goal definition +int8 target_id +uint8 ARUCO_ID_0 = 0 +uint8 ARUCO_ID_1 = 1 +uint8 ARUCO_ID_2 = 2 +uint8 ARUCO_ID_3 = 3 +uint8 ARUCO_ID_4 = 4 +uint8 OBJ_MALLET = 5 +uint8 OBJ_BOTTLE = 6 +uint8 ANY = 9 +--- +#result definition +--- +#feedback definition diff --git a/src/wr_logic_search/launch/search_pattern_test.launch b/src/wr_logic_search/launch/search_pattern_test.launch new file mode 100644 index 00000000..83ebec08 --- /dev/null +++ b/src/wr_logic_search/launch/search_pattern_test.launch @@ -0,0 +1,49 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/wr_logic_search/launch/spin_test.launch b/src/wr_logic_search/launch/spin_test.launch new file mode 100644 index 00000000..3061ae10 --- /dev/null +++ b/src/wr_logic_search/launch/spin_test.launch @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/wr_logic_search/nodes/__init__.py b/src/wr_logic_search/nodes/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/wr_logic_search/nodes/search_pattern_action_server.py b/src/wr_logic_search/nodes/search_pattern_action_server.py new file mode 100755 index 00000000..d0a4c42d --- /dev/null +++ b/src/wr_logic_search/nodes/search_pattern_action_server.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python3 + +"""@file +@defgroup wr_logic_search +@{ +@defgroup wr_logic_search_search_action_server Search Action Server +@brief Action server for the rover search pattern +@details Tells the rover to travel to specific coordinates that follow the square +spiral search pattern. Keeps track of time to know when a coordinate should be +skipped due to large, unavoidable obstacles. At each coordinate, the camera scans +the surrounding area, searching for the target object of the current round. +@{ +""" + +import rospy + +from actionlib import SimpleActionServer, SimpleActionClient, GoalStatus + +import wr_logic_search.coord_calculations as coord_calculations +import wr_logic_search.travel_timer as travel_timer + +from wr_logic_longrange.msg import LongRangeAction, LongRangeGoal +from wr_logic_search.msg import ( + SearchStateAction, + SearchStateGoal, + SearchStateResult, + SpinAction, + SpinGoal, +) + + +class SearchActionServer: + def __init__(self, name) -> None: + """ + Initializes the action server + + @param name (String): Name of the action server + """ + self.long_range_client = SimpleActionClient( + "LongRangeActionServer", LongRangeAction + ) + + self.spin_client = SimpleActionClient("SpinActionServer", SpinAction) + + self._action_name = name + self._as = SimpleActionServer( + self._action_name, + SearchStateAction, + execute_cb=self.execute_callback, + auto_start=False, + ) + self._as.start() + + def execute_callback(self, goal: SearchStateGoal): + """ + Executes the long range obstacle avoidance code, and triggers the + corresponding state machine event depending on the result of the + navigation. + + @param goal (SearchGoal): Goal for the navigation segment, which contains + the GPS coordinates of the target. + """ + distance = 8 # in meters, the shortest unit of movement + num_vertices = 25 + object_detected = False + + # get list of coordinates + coords = coord_calculations.get_coords( + goal.initial_lat, goal.initial_long, distance, num_vertices + ) + + # NOTE: in the future, set timeouts using the action client, not the server + # in seconds, get max time for a single state + # SEARCH_TIMEOUT_TIME = travel_timer.calc_state_time() + + self.long_range_client.wait_for_server() + + i = 0 + # start timer for the state and the first coordinate + # state_time = rospy.get_rostime() + # point_time = rospy.get_rostime() + while not rospy.is_shutdown() and i < num_vertices: + # TODO figure out timeout + timeout = travel_timer.calc_point_time(coords[i]["distance"]) + 15 + + next_lat, next_long = coords[i]["lat"], coords[i]["long"] + self.long_range_client.send_goal( + LongRangeGoal( + target_lat=next_lat, target_long=next_long + ) + ) + rospy.logerr(f"Next GPS coord: {next_lat} {next_long}") + self.long_range_client.wait_for_result(rospy.Duration(timeout)) + # self.long_range_client.wait_for_result() + self.long_range_client.cancel_goal() + + if self.long_range_client.get_state() == GoalStatus.SUCCEEDED: + rospy.logerr(f"Reached GPS coordinate") + else: + rospy.logerr(f"Timed out, skipping GPS coordinate") + # TODO define timeout for spin + # spin to search for vision target + self.spin_client.send_goal(SpinGoal( + target_id=goal.target_id + )) + self.spin_client.wait_for_result(rospy.Duration(60)) + object_detected = self.spin_client.get_state() == GoalStatus.SUCCEEDED + if object_detected: + break + + i += 1 # next coordinate + + # Set result + if object_detected: + self._as.set_succeeded( + SearchStateResult( + final_lat=coords[i]["lat"], final_long=coords[i]["long"] + ) + ) + else: + self._as.set_aborted(SearchStateResult(0, 0)) + + +if __name__ == "__main__": + try: + rospy.init_node("search_action_server") + server = SearchActionServer("SearchActionServer") + rospy.spin() + except rospy.ROSInterruptException: + pass + +## @} +## @} diff --git a/src/wr_logic_search/nodes/search_pattern_client.py b/src/wr_logic_search/nodes/search_pattern_client.py new file mode 100755 index 00000000..6f3eaa7e --- /dev/null +++ b/src/wr_logic_search/nodes/search_pattern_client.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python3 + +"""@file +@defgroup wr_logic_search +@{ +@defgroup wr_logic_search_search_pattern_client Search Pattern Client +@brief The client for the camera that scans the rover's surroundings for the target object. +@details This client is currently waiting for the response from the server and sending the +result (boolean), which seems a little redundant. This might be changed or completely +removed in the future. +@{ +""" + +import rospy +from actionlib import SimpleActionClient + +from wr_logic_longrange.msg import InitCompassAction, InitCompassGoal +from wr_logic_search.msg import SearchStateAction, SearchStateGoal + + +def main(): + rospy.init_node("search_pattern_client") + compass_client = SimpleActionClient("InitCompassActionServer", InitCompassAction) + spin_client = SimpleActionClient("SearchActionServer", SearchStateAction) + + # Input test coordinates + test_lat, test_long = 43.0724525, -89.4106448 + test_target = SearchStateGoal.ANY + + compass_client.wait_for_server() + compass_client.send_goal(InitCompassGoal()) + compass_client.wait_for_result() + + spin_client.wait_for_server() + spin_client.send_goal( + SearchStateGoal( + initial_lat=test_lat, initial_long=test_long, target_id=test_target + ) + ) + spin_client.wait_for_result() + spin_client.get_state() + rospy.init_node("search_pattern_client") + + +if __name__ == "__main__": + main() + +## @} +## @} diff --git a/src/wr_logic_search/nodes/spin_action_server.py b/src/wr_logic_search/nodes/spin_action_server.py new file mode 100755 index 00000000..c64bfbab --- /dev/null +++ b/src/wr_logic_search/nodes/spin_action_server.py @@ -0,0 +1,127 @@ +#!/usr/bin/env python3 + +"""@file +@defgroup wr_logic_search +@{ +@defgroup wr_logic_search_spin Spin Action Server +@brief Subscriber for the coordinate topic +@details Spins the rover and searches for an object +@{ +""" + +import rospy +import actionlib + +from std_msgs.msg import Float64 + +from wr_drive_msgs.msg import DriveTrainCmd +from wr_logic_search.msg import SpinAction, SpinGoal +from wr_logic_shortrange.msg import VisionTarget + + +class SpinActionServer: + TURN_SPEED = 0.2 + + def __init__(self, name) -> None: + + self.current_heading = 0 + self.heading_time = 0 + + heading_topic = rospy.get_param("~heading_topic") + self.heading_sub = rospy.Subscriber( + heading_topic, Float64, self.heading_callback + ) + + self.vision_target = None + self.vision_time = 0 + + vision_topic = rospy.get_param("~vision_topic") + self.vision_sub = rospy.Subscriber( + vision_topic, VisionTarget, self.vision_callback + ) + + # Name of the drivetrain topic to publish to + drivetrain_topic = rospy.get_param("~motor_speeds") + # Publisher to set motor speeds + self.drive_pub = rospy.Publisher(drivetrain_topic, DriveTrainCmd, queue_size=1) + + self._action_name = name + self._as = actionlib.SimpleActionServer( + self._action_name, + SpinAction, + execute_cb=self.execute_callback, + auto_start=False, + ) + # self._as.preempt_callback(self.preempt_callback) + self._as.start() + + def heading_callback(self, msg: Float64): + self.current_heading = msg.data + self.heading_time = rospy.get_time() + + def vision_callback(self, msg: VisionTarget): + if self.vision_target and ( + self.vision_target == msg.id or self.vision_target == SpinGoal.ANY + ): + self.vision_time = rospy.get_time() + + def execute_callback(self, goal: SpinGoal): + success = False + rate = rospy.Rate(10) + + self.vision_target = goal.target_id + + while not rospy.is_shutdown() and self.heading_time == 0: + rate.sleep() + + initial_heading = self.current_heading + start_time = self.heading_time + while not rospy.is_shutdown() and not self._as.is_preempt_requested(): + if self.vision_time != 0: + # Check that target is visible for a certain amount of time + self.last_time = self.vision_time + self.drive_pub.publish(0, 0) + for _ in range(5): + rospy.sleep(0.5) + if self.last_time == self.vision_time: + self.vision_time = 0 + break + else: + # If loop exited normally, we have a good view of the vision target + success = True + break + elif ( + abs(self.current_heading - initial_heading) < 5 + # TODO don't use a magical constant here + and self.heading_time - start_time > 10 + ): + break + + self.drive_pub.publish(-self.TURN_SPEED, self.TURN_SPEED) + rate.sleep() + + self.drive_pub.publish(0, 0) + self.vision_target = None + self.vision_time = 0 + + if success: + self._as.set_succeeded() + else: + self._as.set_aborted() + + def preempt_callback(self): + self.drive_pub.publish(0, 0) + self.vision_target = None + self.vision_time = 0 + + +if __name__ == "__main__": + try: + rospy.init_node("spin_action_server") + server = SpinActionServer("SpinActionServer") + rospy.spin() + except rospy.ROSInterruptException: + pass + +## @} +## @} diff --git a/src/wr_logic_search/nodes/spin_client.py b/src/wr_logic_search/nodes/spin_client.py new file mode 100755 index 00000000..6cac459e --- /dev/null +++ b/src/wr_logic_search/nodes/spin_client.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 + +import rospy +from actionlib import SimpleActionClient +from wr_logic_search.msg import SpinAction, SpinGoal + + +# Spin for 60 seconds +SPIN_TIMEOUT = 60 + + +def main(): + rospy.init_node("spin_action_client") + spin_client = SimpleActionClient("SpinActionServer", SpinAction) + + spin_client.wait_for_server() + spin_client.send_goal(SpinGoal(target_id=SpinGoal.ANY)) + spin_client.wait_for_result(rospy.Duration(SPIN_TIMEOUT)) + spin_client.cancel_goal() + spin_client.get_state() + + +if __name__ == "__main__": + main() diff --git a/src/wr_logic_search/package.xml b/src/wr_logic_search/package.xml new file mode 100644 index 00000000..4d86db8b --- /dev/null +++ b/src/wr_logic_search/package.xml @@ -0,0 +1,64 @@ + + + wr_logic_search + 0.0.0 + The wr_logic_search package + + + + + arthur + + + + + + TODO + + + + + + + + + + + + + + + + + + + + + + + + message_generation + + + + + + message_runtime + + + + + catkin + rospy + std_msgs + rospy + std_msgs + rospy + message_runtime + + + + + + + diff --git a/src/wr_logic_search/setup.py b/src/wr_logic_search/setup.py new file mode 100644 index 00000000..7a411bff --- /dev/null +++ b/src/wr_logic_search/setup.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 +from distutils.core import setup + +from catkin_pkg.python_setup import generate_distutils_setup + +setup_args = generate_distutils_setup( + packages=["wr_logic_search"], package_dir={"": "src"} +) + +setup(**setup_args) diff --git a/src/wr_logic_search/src/wr_logic_search/__init__.py b/src/wr_logic_search/src/wr_logic_search/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/wr_logic_ai/nodes/coord_calculations.py b/src/wr_logic_search/src/wr_logic_search/coord_calculations.py similarity index 59% rename from src/wr_logic_ai/nodes/coord_calculations.py rename to src/wr_logic_search/src/wr_logic_search/coord_calculations.py index 00562867..a09f6536 100644 --- a/src/wr_logic_ai/nodes/coord_calculations.py +++ b/src/wr_logic_search/src/wr_logic_search/coord_calculations.py @@ -1,79 +1,93 @@ -import math +#!/usr/bin/env python3 -''' -@ingroup wr_logic_ai -@defgroup wr_logic_ai Coordinate Calculations +"""@file +@defgroup wr_logic_search +@{ +@defgroup wr_logic_search_coord_calculations Coordinate Calculations @brief Calculates the coordinates the rover travels to when searching. - +@details Attributes(s): EARTH_RADIUS - radius of the Earth Methods(s): get_coords(start_lat, start_long, distance, num_vertices) - gets the list of coordinates calc_dest_coord(curr_lat, curr_long, distance, bearing) - calculates the target coordinate -''' - -EARTH_RADIUS = 6378100 # in meters +@{ +""" + +import math +from typing import * -def get_coords(start_lat, start_long, distance, num_vertices) -> list: - ''' - Description: 'Main' method that gets the coordinates the rover will travel to in the current +EARTH_RADIUS = 6378100 # in meters + + +def get_coords(start_lat, start_long, distance, num_vertices) -> List[Dict[str, float]]: + """ + Description: 'Main' method that gets the coordinates the rover will travel to in the current searching round based on the parameters. We only need to call this method. - Parameter(s): + Parameter(s): start_lat - the rover's starting latitude in the current searching round start_long - the rover's starting longitude in the current searching round - distance - the distance traveled between the starting coordinate and the next + distance - the distance traveled between the starting coordinate and the next immediate coordinate num_vertices - the maximum number of coordinates the rover could travel to Return(s): coords - list of coordinates (dictionaries) the rover will travel to during the search state - ''' + """ cumulative_dist = distance coords = [] bearing = 0 - mult = 0 # create starting coordinate as the first coordinate - coords.append({'lat': start_lat, 'long': start_long}) - - for i in range(num_vertices): + coords.append({"lat": start_lat, "long": start_long, "distance": 0}) + count = 0 + while count < num_vertices: # update the distance after it has been traveled twice to create the square spiral shape - if (i > 1) and (i % 2) == 0: + if count == 2 or count == 6 or count == 12: cumulative_dist += distance - coords.append(calc_dest_coord(coords[i]['lat'], coords[i]['long'], - cumulative_dist, bearing)) + for a in range(int(cumulative_dist/distance)): + #print(a) + #print(int(cumulative_dist/distance - 1)) + coords.append( + calc_dest_coord( + coords[count]["lat"], coords[count]["long"], distance, bearing + ) + ) + count = count + 1 + bearing += 90 return coords + def calc_dest_coord(curr_lat, curr_long, distance, bearing) -> dict: - ''' - Description: Calculates the target latitude and longitude for each cardinal direction + """ + Description: Calculates the target latitude and longitude for each cardinal direction based on the current latitude and longitude, distance, and bearing. - The formula 'Destination point given distance and bearing from start point' from - https://www.movable-type.co.uk/scripts/latlong.html. Based on these formulas, north - has a bearing of 0°, east has a bearing of 90°, south has a bearing of 180°, - and west has a bearing of 270°. - - With the variables curr_lat, curr_long, distance, bearing, and EARTH_RADIUS, + The formula 'Destination point given distance and bearing from start point' from + https://www.movable-type.co.uk/scripts/latlong.html. Based on these formulas, north + has a bearing of 0°, east has a bearing of 90°, south has a bearing of 180°, + and west has a bearing of 270°. + + With the variables curr_lat, curr_long, distance, bearing, and EARTH_RADIUS, target_lat and target_long can be calculated using these formulas: - + target_lat = asin( - sin(curr_lat) - ⋅ cos(distance / EARTH_RADIUS) - + cos(curr_lat) - ⋅ sin(distance / EARTH_RADIUS) + sin(curr_lat) + ⋅ cos(distance / EARTH_RADIUS) + + cos(curr_lat) + ⋅ sin(distance / EARTH_RADIUS) ⋅ cos(bearing)) - + target_long = curr_long + atan2( - sin(bearing) - ⋅ sin(distance / EARTH_RADIUS) - ⋅ cos(curr_lat), - cos(distance / EARTH_RADIUS) + sin(bearing) + ⋅ sin(distance / EARTH_RADIUS) + ⋅ cos(curr_lat), + cos(distance / EARTH_RADIUS) − sin(curr_lat)^2)) Parameter(s): @@ -89,7 +103,7 @@ def calc_dest_coord(curr_lat, curr_long, distance, bearing) -> dict: (what it took to get to the target latitude and longitude) distance - the distance that was traveled bearing - the bearing/direction that was traveled toward - ''' + """ target_lat_rad = 0 target_long_rad = 0 @@ -100,39 +114,31 @@ def calc_dest_coord(curr_lat, curr_long, distance, bearing) -> dict: # north if bearing % 360 == 0: target_lat_rad = target_lat_rad = math.asin( - math.sin(curr_lat_rad) - * math.cos(distance / EARTH_RADIUS) - + math.cos(curr_lat_rad) - * math.sin(distance / EARTH_RADIUS) - * 1) - target_long_rad = curr_long_rad # does not change + math.sin(curr_lat_rad) * math.cos(distance / EARTH_RADIUS) + + math.cos(curr_lat_rad) * math.sin(distance / EARTH_RADIUS) * 1 + ) + target_long_rad = curr_long_rad # does not change # east elif bearing % 360 == 90: - target_lat_rad = curr_lat_rad # does not change + target_lat_rad = curr_lat_rad # does not change target_long_rad = curr_long_rad + math.atan2( - 1 - * math.sin(distance / EARTH_RADIUS) - * math.cos(curr_lat_rad), - math.cos(distance / EARTH_RADIUS) - - math.sin(curr_lat_rad) ** 2) + 1 * math.sin(distance / EARTH_RADIUS) * math.cos(curr_lat_rad), + math.cos(distance / EARTH_RADIUS) - math.sin(curr_lat_rad) ** 2, + ) # south elif bearing % 360 == 180: target_lat_rad = math.asin( - math.sin(curr_lat_rad) - * math.cos(distance / EARTH_RADIUS) - + math.cos(curr_lat_rad) - * math.sin(distance / EARTH_RADIUS) - * -1) - target_long_rad = curr_long_rad # does not change + math.sin(curr_lat_rad) * math.cos(distance / EARTH_RADIUS) + + math.cos(curr_lat_rad) * math.sin(distance / EARTH_RADIUS) * -1 + ) + target_long_rad = curr_long_rad # does not change # west elif bearing % 360 == 270: - target_lat_rad = curr_lat_rad # does not change + target_lat_rad = curr_lat_rad # does not change target_long_rad = curr_long_rad + math.atan2( - -1 - * math.sin(distance / EARTH_RADIUS) - * math.cos(curr_lat_rad), - math.cos(distance / EARTH_RADIUS) - - math.sin(curr_lat_rad) ** 2) + -1 * math.sin(distance / EARTH_RADIUS) * math.cos(curr_lat_rad), + math.cos(distance / EARTH_RADIUS) - math.sin(curr_lat_rad) ** 2, + ) else: return {} @@ -141,6 +147,15 @@ def calc_dest_coord(curr_lat, curr_long, distance, bearing) -> dict: target_long = math.degrees(target_long_rad) # create the new coordinate - coord = {'lat': target_lat, 'long': target_long, 'distance': distance, 'bearing': bearing % 360} + coord = { + "lat": target_lat, + "long": target_long, + "distance": distance, + "bearing": bearing % 360, + } return coord + + +## @} +## @} diff --git a/src/wr_logic_search/src/wr_logic_search/travel_timer.py b/src/wr_logic_search/src/wr_logic_search/travel_timer.py new file mode 100644 index 00000000..906ea578 --- /dev/null +++ b/src/wr_logic_search/src/wr_logic_search/travel_timer.py @@ -0,0 +1,63 @@ +#!/usr/bin/env python3 + +"""@file +@defgroup wr_logic_search +@{ +@defgroup wr_logic_search_travel_timer Travel Timer +@brief Calculates the maximum travel time between two or all coordinates to know when +to skip the coordinate or to restart searching + +Attributes(s): + speed - speed of the rover in meters per second + +Methods(s): + calc_point_time(distance) - Calculates maximum travel time between two coordinates + calc_state_time(distance) - Calculates maximum travel time between all coordinates +@{ +""" + +SPEED = 0.5 # in meters per second, +# purposefully slow to give more time for obstacle avoidance +# and because true speed is inknown +# (motor power outputs are set to some value between 0.3 and 0.5, +# which does not correlate to speed) + + +def calc_point_time(distance) -> float: + """ + Description: Calculates the maximum time it should take the rover to travel from one + coordinate to another coordinate. If the rover reaches this limit, the coordinate + is labeled as unreachable due to obstacle avoidance, and it will skip the + coordinate and move on to the next one. + + Parameter(s): + distance - the distance from the current coordinate to the target coordinate/point (in meters) + + Return(s): + time - the time it takes the rover to travel the distance between two coordinates (in seconds) + """ + time = distance / SPEED + return time + + +def calc_state_time(radius=20) -> float: + """ + Description: Calculates the maximum time it should take the rover to visit all + coordinates during the searching state. The spiral pattern uses 4 meters as a base, + so the distance between every coordinate must be a multiple of 4. + + Parameter(s): + radius - the radius of the square spiral (in meters), + correlates to the maximum distance the starting coordinate is to the target + + Return(s): + time - the time it takes the rover to visit all coordinates (in seconds) + """ + total_distance = (0.5 * radius + 1) * (2 * radius + 4) + total_time = total_distance / SPEED + + return total_time + + +## @} +## @} diff --git a/src/wr_logic_shortrange/CMakeLists.txt b/src/wr_logic_shortrange/CMakeLists.txt new file mode 100644 index 00000000..3c1d7465 --- /dev/null +++ b/src/wr_logic_shortrange/CMakeLists.txt @@ -0,0 +1,206 @@ +cmake_minimum_required(VERSION 3.0.2) +project(wr_logic_shortrange) + +## Compile as C++11, supported in ROS Kinetic and newer +# add_compile_options(-std=c++11) + +## Find catkin macros and libraries +## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz) +## is used, also find other catkin packages +find_package(catkin REQUIRED COMPONENTS + rospy + std_msgs + message_generation + actionlib_msgs +) + +## System dependencies are found with CMake's conventions +# find_package(Boost REQUIRED COMPONENTS system) + + +## Uncomment this if the package has a setup.py. This macro ensures +## modules and global scripts declared therein get installed +## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html +catkin_python_setup() + +################################################ +## Declare ROS messages, services and actions ## +################################################ + +## To declare and build messages, services or actions from within this +## package, follow these steps: +## * Let MSG_DEP_SET be the set of packages whose message types you use in +## your messages/services/actions (e.g. std_msgs, actionlib_msgs, ...). +## * In the file package.xml: +## * add a build_depend tag for "message_generation" +## * add a build_depend and a exec_depend tag for each package in MSG_DEP_SET +## * If MSG_DEP_SET isn't empty the following dependency has been pulled in +## but can be declared for certainty nonetheless: +## * add a exec_depend tag for "message_runtime" +## * In this file (CMakeLists.txt): +## * add "message_generation" and every package in MSG_DEP_SET to +## find_package(catkin REQUIRED COMPONENTS ...) +## * add "message_runtime" and every package in MSG_DEP_SET to +## catkin_package(CATKIN_DEPENDS ...) +## * uncomment the add_*_files sections below as needed +## and list every .msg/.srv/.action file to be processed +## * uncomment the generate_messages entry below +## * add every package in MSG_DEP_SET to generate_messages(DEPENDENCIES ...) + +## Generate messages in the 'msg' folder +add_message_files( + FILES + VisionTarget.msg +) + +## Generate services in the 'srv' folder +# add_service_files( +# FILES +# Service1.srv +# Service2.srv +# ) + +## Generate actions in the 'action' folder +add_action_files( + FILES + ShortRange.action +) + +## Generate added messages and services with any dependencies listed here +generate_messages( + DEPENDENCIES + std_msgs # Or other packages containing msgs + actionlib_msgs +) + +################################################ +## Declare ROS dynamic reconfigure parameters ## +################################################ + +## To declare and build dynamic reconfigure parameters within this +## package, follow these steps: +## * In the file package.xml: +## * add a build_depend and a exec_depend tag for "dynamic_reconfigure" +## * In this file (CMakeLists.txt): +## * add "dynamic_reconfigure" to +## find_package(catkin REQUIRED COMPONENTS ...) +## * uncomment the "generate_dynamic_reconfigure_options" section below +## and list every .cfg file to be processed + +## Generate dynamic reconfigure parameters in the 'cfg' folder +# generate_dynamic_reconfigure_options( +# cfg/DynReconf1.cfg +# cfg/DynReconf2.cfg +# ) + +################################### +## catkin specific configuration ## +################################### +## The catkin_package macro generates cmake config files for your package +## Declare things to be passed to dependent projects +## INCLUDE_DIRS: uncomment this if your package contains header files +## LIBRARIES: libraries you create in this project that dependent projects also need +## CATKIN_DEPENDS: catkin_packages dependent projects also need +## DEPENDS: system dependencies of this project that dependent projects also need +catkin_package( +# INCLUDE_DIRS include +# LIBRARIES wr_logic_shortrange +# CATKIN_DEPENDS other_catkin_pkg +# DEPENDS system_lib +) + +########### +## Build ## +########### + +## Specify additional locations of header files +## Your package locations should be listed before other locations +include_directories( +# include +# ${catkin_INCLUDE_DIRS} +) + +## Declare a C++ library +# add_library(${PROJECT_NAME} +# src/${PROJECT_NAME}/wr_logic_shortrange.cpp +# ) + +## Add cmake target dependencies of the library +## as an example, code may need to be generated before libraries +## either from message generation or dynamic reconfigure +# add_dependencies(${PROJECT_NAME} ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Declare a C++ executable +## With catkin_make all packages are built within a single CMake context +## The recommended prefix ensures that target names across packages don't collide +# add_executable(${PROJECT_NAME}_node src/wr_logic_shortrange_node.cpp) + +## Rename C++ executable without prefix +## The above recommended prefix causes long target names, the following renames the +## target back to the shorter version for ease of user use +## e.g. "rosrun someones_pkg node" instead of "rosrun someones_pkg someones_pkg_node" +# set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX "") + +## Add cmake target dependencies of the executable +## same as for the library above +# add_dependencies(${PROJECT_NAME}_node ${${PROJECT_NAME}_EXPORTED_TARGETS} ${catkin_EXPORTED_TARGETS}) + +## Specify libraries to link a library or executable target against +# target_link_libraries(${PROJECT_NAME}_node +# ${catkin_LIBRARIES} +# ) + +############# +## Install ## +############# + +# all install targets should use catkin DESTINATION variables +# See http://ros.org/doc/api/catkin/html/adv_user_guide/variables.html + +## Mark executable scripts (Python etc.) for installation +## in contrast to setup.py, you can choose the destination +# catkin_install_python(PROGRAMS +# scripts/my_python_script +# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark executables for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_executables.html +# install(TARGETS ${PROJECT_NAME}_node +# RUNTIME DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +# ) + +## Mark libraries for installation +## See http://docs.ros.org/melodic/api/catkin/html/howto/format1/building_libraries.html +# install(TARGETS ${PROJECT_NAME} +# ARCHIVE DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# LIBRARY DESTINATION ${CATKIN_PACKAGE_LIB_DESTINATION} +# RUNTIME DESTINATION ${CATKIN_GLOBAL_BIN_DESTINATION} +# ) + +## Mark cpp header files for installation +# install(DIRECTORY include/${PROJECT_NAME}/ +# DESTINATION ${CATKIN_PACKAGE_INCLUDE_DESTINATION} +# FILES_MATCHING PATTERN "*.h" +# PATTERN ".svn" EXCLUDE +# ) + +## Mark other files for installation (e.g. launch and bag files, etc.) +# install(FILES +# # myfile1 +# # myfile2 +# DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +# ) + +############# +## Testing ## +############# + +## Add gtest based cpp test target and link libraries +# catkin_add_gtest(${PROJECT_NAME}-test test/test_wr_logic_shortrange.cpp) +# if(TARGET ${PROJECT_NAME}-test) +# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME}) +# endif() + +## Add folders to be run by python nosetests +# catkin_add_nosetests(test) diff --git a/src/wr_logic_ai/src/wr_logic_ai/shortrange/README.md b/src/wr_logic_shortrange/README.md similarity index 100% rename from src/wr_logic_ai/src/wr_logic_ai/shortrange/README.md rename to src/wr_logic_shortrange/README.md diff --git a/src/wr_logic_shortrange/action/ShortRange.action b/src/wr_logic_shortrange/action/ShortRange.action new file mode 100644 index 00000000..df89a5ff --- /dev/null +++ b/src/wr_logic_shortrange/action/ShortRange.action @@ -0,0 +1,11 @@ +uint8 target_id +uint8 ARUCO_ID_0 = 0 +uint8 ARUCO_ID_1 = 1 +uint8 ARUCO_ID_2 = 2 +uint8 ARUCO_ID_3 = 3 +uint8 ARUCO_ID_4 = 4 +uint8 OBJ_MALLET = 5 +uint8 OBJ_BOTTLE = 6 +uint8 ANY = 9 +--- +--- diff --git a/src/wr_logic_shortrange/launch/local_test.launch b/src/wr_logic_shortrange/launch/local_test.launch new file mode 100644 index 00000000..55395899 --- /dev/null +++ b/src/wr_logic_shortrange/launch/local_test.launch @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/wr_logic_shortrange/launch/vision_test.launch b/src/wr_logic_shortrange/launch/vision_test.launch new file mode 100755 index 00000000..447a16ac --- /dev/null +++ b/src/wr_logic_shortrange/launch/vision_test.launch @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src/wr_logic_shortrange/launch/yolo_test.launch b/src/wr_logic_shortrange/launch/yolo_test.launch new file mode 100755 index 00000000..303b0d42 --- /dev/null +++ b/src/wr_logic_shortrange/launch/yolo_test.launch @@ -0,0 +1,16 @@ + + + + + + + + + + + + l + + + + diff --git a/src/wr_logic_shortrange/msg/VisionTarget.msg b/src/wr_logic_shortrange/msg/VisionTarget.msg new file mode 100644 index 00000000..db4c83be --- /dev/null +++ b/src/wr_logic_shortrange/msg/VisionTarget.msg @@ -0,0 +1,11 @@ +uint8 id +float32 x_offset +float32 distance_estimate +uint8 ARUCO_ID_0 = 0 +uint8 ARUCO_ID_1 = 1 +uint8 ARUCO_ID_2 = 2 +uint8 ARUCO_ID_3 = 3 +uint8 ARUCO_ID_4 = 4 +uint8 OBJ_MALLET = 5 +uint8 OBJ_BOTTLE = 6 +uint8 ANY = 9 diff --git a/src/wr_logic_shortrange/nodes/shortrange_action_server.py b/src/wr_logic_shortrange/nodes/shortrange_action_server.py new file mode 100755 index 00000000..a4a5651a --- /dev/null +++ b/src/wr_logic_shortrange/nodes/shortrange_action_server.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python3 + +##@defgroup wr_shortrange_ai +# @{ +# @defgroup wr_shortrange_ai_action_server Shortrange Action Server +# @brief Shortrange action server +# @details The shortrange action server drives the rover to a vision target. +# +# @{ + +from typing import * + +import rospy +import actionlib +from actionlib_msgs.msg import GoalStatus + +from wr_logic_shortrange.shortrange_util import TargetCache +from wr_logic_shortrange.vision_navigation import VisionNavigation +from wr_logic_shortrange.shortrange_util import ShortrangeStateEnum +from wr_logic_shortrange.msg import ShortRangeAction, ShortRangeGoal, VisionTarget +from wr_drive_msgs.msg import DriveTrainCmd + +## Distance from target to stop at (in meters) +STOP_DISTANCE_M = 2 +## Base speed for robot to move at +SPEED = 0.1 +## Factor to for applying turn to x offset value (-1 to 1) +kP = 0.05 + +# Number of seconds to keep the cache +CACHE_EXPIRY_SECS = 1 + + +class ShortrangeActionServer: + """ + @brief Shortrange ActionServer + + The shortrange state machine is implemented as an ActionServer. + """ + + def __init__(self, name: str) -> None: + """ + Initialize the shortrange state machine + + @param name (str): The name of the action + """ + + # Name of the drivetrain topic to publish to + drivetrain_topic = rospy.get_param("~motor_speeds") + # Publisher to set motor speeds + self.drive_pub = rospy.Publisher(drivetrain_topic, DriveTrainCmd, queue_size=1) + + # Name of the VisionTarget topic to subscribe to + vision_topic = rospy.get_param("~vision_topic") + self.vision_sub = rospy.Subscriber( + vision_topic, VisionTarget, self.target_callback + ) + + # TODO Reorganize to handle if we know which target is expected + self.targets_list: List[TargetCache] = [TargetCache(0, None)] * 10 + + ## The name of the action + self._action_name = name + ## SimpleActionServer using shortrange_callback to execute ShortrangeGoals + self._as = actionlib.SimpleActionServer( + self._action_name, + ShortRangeAction, + execute_cb=self.shortrange_callback, + auto_start=False, + ) + # self._as.register_preempt_callback(self.preempt_callback) + self._as.start() + + def target_callback(self, msg: VisionTarget): + # if msg.valid: # Update cache if the VisionTarget message is valid + target_data = TargetCache(rospy.get_time(), msg) + if msg.id < len(self.targets_list): + self.targets_list[msg.id] = target_data + self.targets_list[ShortRangeGoal.ANY] = target_data + + def shortrange_callback(self, goal: ShortRangeGoal): + """ + Sets the shortrange state based on the ShortRangeGoal message + + @param goal (ShortRangeGoal): ShortRangeGoal message defined in action/ShortRange.action + """ + rate = rospy.Rate(10) + success = False + target_id = goal.target_id + + # Loop for running ArUco tag approach + while not rospy.is_shutdown() and not self._as.is_preempt_requested(): + target_data = self.targets_list[target_id] + + if ( + target_data.msg is not None + and rospy.get_time() - target_data.timestamp < CACHE_EXPIRY_SECS + ): + if target_data.msg.distance_estimate < STOP_DISTANCE_M: + # Stop the rover when it is close to the ArUco tag + rospy.logerr(f"Shortrange reached target {target_data.msg.id}") + self.drive_pub.publish(0, 0) + + success = True + break + else: + # Drive the rover to the target if the cache was updated recently + turn = kP * target_data.msg.x_offset + self.drive_pub.publish(SPEED + turn, SPEED - turn) + else: + # Stop the rover and wait for data + self.drive_pub.publish(0, 0) + # TODO add abort condition/timeout + rospy.logerr(f"Shortrange waiting for data") + + rate.sleep() + + self.drive_pub.publish(0, 0) + + # Set result + if success: + self._as.set_succeeded() + else: + self._as.set_aborted() + + def preempt_callback(self): + self.drive_pub(0, 0) + + +if __name__ == "__main__": + rospy.init_node("shortrange_action_server") + ShortrangeActionServer("ShortRangeActionServer") + rospy.spin() + +## @} +# @} diff --git a/src/wr_logic_shortrange/nodes/shortrange_client.py b/src/wr_logic_shortrange/nodes/shortrange_client.py new file mode 100755 index 00000000..df3f143f --- /dev/null +++ b/src/wr_logic_shortrange/nodes/shortrange_client.py @@ -0,0 +1,19 @@ +#!/usr/bin/env python3 + +import rospy +from actionlib import SimpleActionClient, GoalStatus +from wr_logic_shortrange.msg import ShortRangeAction, ShortRangeGoal + + +def main(): + rospy.init_node("shortrange_action_client") + shortrange_client = SimpleActionClient("ShortRangeActionServer", ShortRangeAction) + + shortrange_client.wait_for_server() + shortrange_client.send_goal(ShortRangeGoal(target_id=0)) + shortrange_client.wait_for_result() + print(shortrange_client.get_state() == GoalStatus.SUCCEEDED) + + +if __name__ == "__main__": + main() diff --git a/src/wr_logic_shortrange/nodes/vision_aruco_detection.py b/src/wr_logic_shortrange/nodes/vision_aruco_detection.py new file mode 100755 index 00000000..09abbbe7 --- /dev/null +++ b/src/wr_logic_shortrange/nodes/vision_aruco_detection.py @@ -0,0 +1,156 @@ +#!/usr/bin/env python3 + +##@defgroup wr_shortrange_ai +# @{ +# @defgroup wr_shortrange_ai_vision_node Vision Target Detection Node +# @brief ROS Node for publishing vision data +# @details This ROS Node reads video input from a stream or camera. +# It reads a frame and processes it using [detect_aruco()](@ref wr_logic_ai.shortrange.aruco_lib.detect_aruco). +# Then, the data for all ArUco tags detected is published as a VisionTarget message. +# @{ + +import subprocess + +import rospy +import cv2 as cv +import numpy as np + +import wr_logic_shortrange.aruco_lib as aruco_lib +from wr_logic_shortrange.msg import VisionTarget + +## Width of the camera frame, in pixels +CAMERA_WIDTH = 1280 +## Height of the camera frame, in pixels +CAMERA_HEIGHT = 720 +## Frames per second +CAMERA_FPS = 5 + + +def process_corners(target_id: int, corners: np.ndarray) -> VisionTarget: + """ + Creates a VisionTarget message based on the detected ArUco tag + + @param target_id (int): ArUco tag ID + @param corners (np.ndarray): ArUco tag corners + @returns VisionTarget: VisionTarget message defined in msg/VisionTarget.msg + """ + # Find the middle of the ArUco tag in the frame + side_lengths = [] + min_x = corners[0][0] + max_x = corners[0][0] + for i in range(len(corners)): + side_lengths.append(np.linalg.norm(corners[i - 1] - corners[i])) + min_x = min(min_x, corners[i][0]) + max_x = max(max_x, corners[i][0]) + # x offset is returned as -1 to 1 + x_offset = (min_x + max_x - CAMERA_WIDTH) / CAMERA_WIDTH + + # Estimate the distance of the ArUco tag in meters + distance_estimate = aruco_lib.estimate_distance_m(corners) + + return VisionTarget(target_id, x_offset, distance_estimate) + + +def main(): + """ + @brief Vision processing node main function + + This function initializes and runs a node to read camera input and publish ArUco tag data to a topic. + """ + rospy.init_node("vision_aruco_detection") + + rate = rospy.Rate(10) + + # Set up publisher + vision_topic = rospy.get_param("~vision_topic") + pub = rospy.Publisher(vision_topic, VisionTarget, queue_size=10) + + # Retrieve video stream from parameter server + # If no video capture is specified, try to use /dev/video0 + stream_path = rospy.get_param("~video_stream") + if stream_path is not None and stream_path != "": + cap = cv.VideoCapture(stream_path) + else: + cap = cv.VideoCapture(0) + + cap.set(cv.CAP_PROP_FPS, CAMERA_FPS) + cap.set(cv.CAP_PROP_FRAME_WIDTH, CAMERA_WIDTH) + cap.set(cv.CAP_PROP_FRAME_HEIGHT, CAMERA_HEIGHT) + + # Stream video if debugging + stream = None + debug = rospy.get_param("~debug", False) + stream_port = rospy.get_param("~debug_port", 8000) + if debug: + ffmpeg_command = [ + "ffmpeg", + # Input format + "-f", + "rawvideo", + # OpenCV stores images as BGR + "-pixel_format", + "bgr24", + # Video resolution + "-s", + f"{CAMERA_WIDTH}x{CAMERA_HEIGHT}", + # Framerate + "-r", + f"{CAMERA_FPS}", + # Pipe video to ffmpeg + "-i", + "-", + # Encode video with h264 + "-vcodec", + "libx264", + "-preset", + "ultrafast", + "-tune", + "zerolatency", + "-b:v", + "8M", + # Stream using MPEG-TS over UDP + "-f", + "mpegts", + f"udp://192.168.1.44:{stream_port}", + ] + stream = subprocess.Popen(ffmpeg_command, stdin=subprocess.PIPE) + + # Command for viewing stream + # ffplay -fflags nobuffer -flags low_delay -probesize 32 -analyzeduration 1 -strict experimental -framedrop -f mpegts -vf setpts=0 udp://192.168.1.134:8000 + + if not cap.isOpened(): + rospy.logerr("Failed to open camera") + exit() + + while not rospy.is_shutdown(): + # Read frame and publish detected targets + ret, frame = cap.read() + if not ret: + rospy.logerr("Failed to read frame") + else: + (corners, ids, _) = aruco_lib.detect_aruco(frame) + if ids is not None: + for i, target_id in enumerate(ids): + pub.publish(process_corners(target_id[0], corners[i][0])) + + closest_tag = aruco_lib.estimate_distance_m(corners[i][0]) + frame = aruco_lib.mark_aruco_tag( + frame, corners[i][0], False, closest_tag + ) + + if debug: + stream.stdin.write(frame.tobytes()) + + rate.sleep() + + if debug: + stream.stdin.close() + stream.wait() + cap.release() + + +if __name__ == "__main__": + main() + +## @} +# @} diff --git a/src/wr_logic_shortrange/nodes/vision_yolo_detection.py b/src/wr_logic_shortrange/nodes/vision_yolo_detection.py new file mode 100755 index 00000000..f6eaf8d0 --- /dev/null +++ b/src/wr_logic_shortrange/nodes/vision_yolo_detection.py @@ -0,0 +1,177 @@ +#!/usr/bin/env python3 + +import subprocess + +import cv2 as cv +import numpy as np +import rospy +import torch +from ultralytics import YOLO + +from wr_logic_shortrange.msg import VisionTarget + +## Width of the camera frame, in pixels +# CAMERA_WIDTH = 640 +CAMERA_WIDTH = 1280 +## Height of the camera frame, in pixels +# CAMERA_HEIGHT = 480 +CAMERA_HEIGHT = 720 +## Frames per second +CAMERA_FPS = 5 + +FOCAL_LENGTH_MM = 1360.17 + +OBJECT_NAME_TO_ID = { + "hammer": VisionTarget.OBJ_MALLET, + "bottle": VisionTarget.OBJ_BOTTLE, +} + + +def draw_bounding_box(frame: np.ndarray, box: torch.Tensor, label: str) -> np.ndarray: + x1, y1, x2, y2 = box + x1, y1, x2, y2 = int(x1), int(y1), int(x2), int(y2) + + frame = cv.rectangle(frame, (x1, y1), (x2, y2), (0, 255, 0), 2) + + return cv.putText( + frame, + text=label, + org=(x1, y1), + fontFace=cv.FONT_HERSHEY_PLAIN, + fontScale=1.5, + color=(0, 0, 255), + ) + + +def generate_vision_msg(box: torch.Tensor, label: str): + x1, y1, x2, y2 = box + x1, y1, x2, y2 = int(x1), int(y1), int(x2), int(y2) + + # Find long edge and short edge + x_edge = abs(x2 - x1) + y_edge = abs(y2 - y1) + long_edge = max(x_edge, y_edge) + short_edge = min(x_edge, y_edge) + + # x offset is returned as -1 to 1 + x_offset = (x1 + x2 - CAMERA_WIDTH) / CAMERA_WIDTH + + distance = 0 + target_id = VisionTarget.ANY + if label in OBJECT_NAME_TO_ID: + target_id = OBJECT_NAME_TO_ID[label] + if target_id == VisionTarget.OBJ_MALLET: + # length of mallet handle in mm + long_edge_distance = 305 * FOCAL_LENGTH_MM / (long_edge * 1000) + # length of mallet head in mm + short_edge_distance = 100 * FOCAL_LENGTH_MM / (short_edge * 1000) + distance = max(long_edge_distance, short_edge_distance) + elif target_id == VisionTarget.OBJ_BOTTLE: + # height of water bottle in mm + long_edge_distance = 215 * FOCAL_LENGTH_MM / (long_edge * 1000) + # diameter of water bottle in mm + short_edge_distance = 90 * FOCAL_LENGTH_MM / (short_edge * 1000) + distance = max(long_edge_distance, short_edge_distance) + + # assume we are close to object + return VisionTarget(id=target_id, x_offset=x_offset, distance=distance) + + +def main(): + rospy.init_node("vision_aruco_detection") + + rate = rospy.Rate(10) + + # Set up publisher + vision_topic = rospy.get_param("~vision_topic") + pub = rospy.Publisher(vision_topic, VisionTarget, queue_size=10) + + # Retrieve video stream from parameter server + # If no video capture is specified, try to use /dev/video0 + stream_path = rospy.get_param("~video_stream") + if stream_path is not None and stream_path != "": + cap = cv.VideoCapture(stream_path) + else: + cap = cv.VideoCapture(0) + + cap.set(cv.CAP_PROP_FPS, CAMERA_FPS) + cap.set(cv.CAP_PROP_FRAME_WIDTH, CAMERA_WIDTH) + cap.set(cv.CAP_PROP_FRAME_HEIGHT, CAMERA_HEIGHT) + + # Load model + # TODO add scripts/documentation for TensorRT + model_path = rospy.get_param("~model_path", "yolov8n.engine") + model = YOLO(model_path) + + # Stream video if debugging + stream = None + debug = rospy.get_param("~debug", False) + stream_port = rospy.get_param("~debug_port", 8000) + if debug: + ffmpeg_command = [ + "ffmpeg", + # Input format + "-f", + "rawvideo", + # OpenCV stores images as BGR + "-pixel_format", + "bgr24", + # Video resolution + "-s", + f"{CAMERA_WIDTH}x{CAMERA_HEIGHT}", + # Framerate + "-r", + f"{CAMERA_FPS}", + # Pipe video to ffmpeg + "-i", + "-", + # Encode video with h264 + "-vcodec", + "libx264", + "-preset", + "ultrafast", + "-tune", + "zerolatency", + "-b:v", + "8M", + # Stream using MPEG-TS over UDP + "-f", + "mpegts", + f"udp://192.168.1.44:{stream_port}", + ] + stream = subprocess.Popen(ffmpeg_command, stdin=subprocess.PIPE) + + # Command for viewing stream + # ffplay -fflags nobuffer -flags low_delay -probesize 32 -analyzeduration 1 -strict experimental -framedrop -f mpegts -vf setpts=0 udp://192.168.1.134:8000 + + if not cap.isOpened(): + rospy.logerr("Failed to open camera") + exit() + + while not rospy.is_shutdown(): + # Read frame and publish detected targets + ret, frame = cap.read() + if not ret: + rospy.logerr("Failed to read frame") + else: + results = model(frame, stream=True) + if results is not None: + for r in results: + for box in r.boxes: + box_label = model.names[int(box.cls[0])] + if debug: + frame = draw_bounding_box(frame, box.xyxy[0], box_label) + + if debug: + stream.stdin.write(frame.tobytes()) + + rate.sleep() + + if debug: + stream.stdin.close() + stream.wait() + cap.release() + + +if __name__ == "__main__": + main() diff --git a/src/wr_logic_shortrange/package.xml b/src/wr_logic_shortrange/package.xml new file mode 100644 index 00000000..e1e72cf4 --- /dev/null +++ b/src/wr_logic_shortrange/package.xml @@ -0,0 +1,60 @@ + + + wr_logic_shortrange + 0.0.0 + The wr_logic_shortrange package + + + + + arthur + + + + + + TODO + + + + + + + + + + + + + + + + + + + + + + + + message_generation + + + rospy + + + + message_runtime + + + + + catkin + + + + + + + + diff --git a/src/wr_logic_shortrange/setup.py b/src/wr_logic_shortrange/setup.py new file mode 100644 index 00000000..38bf0daa --- /dev/null +++ b/src/wr_logic_shortrange/setup.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python3 +from distutils.core import setup + +from catkin_pkg.python_setup import generate_distutils_setup + +setup_args = generate_distutils_setup( + packages=["wr_logic_shortrange"], package_dir={"": "src"} +) + +setup(**setup_args) diff --git a/src/wr_logic_shortrange/src/wr_logic_shortrange/__init__.py b/src/wr_logic_shortrange/src/wr_logic_shortrange/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/wr_logic_ai/src/wr_logic_ai/shortrange/aruco_lib.py b/src/wr_logic_shortrange/src/wr_logic_shortrange/aruco_lib.py similarity index 65% rename from src/wr_logic_ai/src/wr_logic_ai/shortrange/aruco_lib.py rename to src/wr_logic_shortrange/src/wr_logic_shortrange/aruco_lib.py index ee19fd59..d6aea89b 100644 --- a/src/wr_logic_ai/src/wr_logic_ai/shortrange/aruco_lib.py +++ b/src/wr_logic_shortrange/src/wr_logic_shortrange/aruco_lib.py @@ -4,8 +4,12 @@ # @brief Functions for detecting ArUco tags # @{ +import math + import numpy as np import cv2 as cv +from typing import * +import os ## Initialize ArUco 4x4 marker dictionary ARUCO_DICT = cv.aruco.getPredefinedDictionary(cv.aruco.DICT_4X4_50) @@ -16,6 +20,10 @@ ## Constant for converting feet to meters FT_TO_M = 0.3048 +FOCAL_LENGTH_MM = 1360.17 + +REAL_WORLD_ARUCO_DIM = 150 # in mm + def detect_aruco(img: np.ndarray) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: """Detects ArUco markers @@ -136,6 +144,8 @@ def estimate_distance_ft(corners: np.ndarray) -> float: return SIDE_LENGTH_1FT / max(side_lengths) +# might do a perspective transform to get a better idea of pixel area n so distance +# TODO document a calibration procedure def estimate_distance_m(corners: np.ndarray) -> float: """ Estimate the distance to an ArUco tag in meters @@ -143,7 +153,76 @@ def estimate_distance_m(corners: np.ndarray) -> float: @param corners (np.ndarray): array containing the corners of the ArUco tag @return float: estimated distance to vision target in meters """ - return FT_TO_M * estimate_distance_ft(corners) + # return FT_TO_M * estimate_distance_ft(corners) + return estimate_distance_with_measured_focal_length(corners) + + +def estimate_distance_with_measured_focal_length(corners: np.ndarray) -> float: + """ + Estimate the distance to an ArUco tag in meters using a measured focal length + + @param corners (np.ndarray): array containing the corners of the ArUco tag + @return float: estimated distance to vision target in meters + """ + # make sure that the corners are in (4, 1, 2) shape + if corners.shape[0] == 1: + c = corners.reshape((-1, 1, 2)) + else: + c = corners + + side_lengths = max([np.linalg.norm(c[i - 1] - c[i]) for i in range(len(c))]) + + return REAL_WORLD_ARUCO_DIM * FOCAL_LENGTH_MM / (side_lengths * 1000) + + +def mark_aruco_tag(img, corners, isolate=False, tag_distance=math.inf): + """ + For a given image, this surrounds the aruco tag with a green box. If + isolate is true, then a black mask is applied to non aruco tag parts of + image. + + @param img (np.ndarray): array containing image + @param corners (np.ndarray): array containing the corners of the ArUco tag + @param isolate (bool): boolean to decide whether the rest of the image needs to masked + @return np.ndarray: edited image + """ + if img is None: + raise Exception("bad image inputted") + if len(corners) <= 0: + raise Exception("bad corners, so no aruco tag detected") + + # make sure that the corners are in (4, 1, 2) shape + if corners.shape[0] == 1: + c = corners.reshape((-1, 1, 2)) + else: + c = corners + + # use a copy of the image + res_img = img.copy() + + # get the contours from the corners and then cast it to int + contours = [c.astype(np.int32)] + + # draw the green box + cv.polylines(res_img, contours, True, (0, 255, 0), 4) + + # create a black mask around the aruco tag and add it to the image + if isolate: + mask = np.zeros(res_img.shape, dtype=np.uint8) + cv.fillPoly(mask, pts=contours, color=(255, 255, 255)) + res_img = cv.bitwise_and(res_img, mask) + + if tag_distance != math.inf: + res_img = cv.putText( + res_img, + text=f"Distance: {tag_distance:.2f} meters", + org=(contours[0][0][0], contours[0][0][1]), + fontFace=cv.FONT_HERSHEY_PLAIN, + fontScale=1.5, + color=(0, 0, 255), + ) + + return res_img ## @} diff --git a/src/wr_logic_ai/src/wr_logic_ai/shortrange/shortrange_util.py b/src/wr_logic_shortrange/src/wr_logic_shortrange/shortrange_util.py similarity index 96% rename from src/wr_logic_ai/src/wr_logic_ai/shortrange/shortrange_util.py rename to src/wr_logic_shortrange/src/wr_logic_shortrange/shortrange_util.py index 25692a45..bbbd50a7 100644 --- a/src/wr_logic_ai/src/wr_logic_ai/shortrange/shortrange_util.py +++ b/src/wr_logic_shortrange/src/wr_logic_shortrange/shortrange_util.py @@ -7,7 +7,7 @@ from enum import Enum, Flag from typing import Tuple -from wr_logic_ai.msg import VisionTarget +from wr_logic_shortrange.msg import VisionTarget class ShortrangeStateEnum(Flag): diff --git a/src/wr_logic_shortrange/src/wr_logic_shortrange/vision_navigation.py b/src/wr_logic_shortrange/src/wr_logic_shortrange/vision_navigation.py new file mode 100755 index 00000000..98d65bfb --- /dev/null +++ b/src/wr_logic_shortrange/src/wr_logic_shortrange/vision_navigation.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python + +##@defgroup wr_shortrange_ai +# @{ +# @defgroup wr_shortrange_ai_navigation Vision Navigation +# @brief ShortrangeState that subscribes to vision data and drives to the ArUco tag +# @{ + +from typing import * + +import rospy + +from wr_logic_shortrange.shortrange_util import ( + ShortrangeStateEnum, + ShortrangeState, + TargetCache, +) +from wr_logic_shortrange.msg import VisionTarget +from wr_drive_msgs.msg import DriveTrainCmd +from wr_logic_shortrange.vision_navigation import ShortrangeStateEnum + +## Distance from target to stop at (in meters) +STOP_DISTANCE_M = 1.5 +## Base speed for robot to move at +SPEED = 0.1 +## Factor to for applying turn +kP = 0.01 + +# Number of seconds to keep the cache +CACHE_EXPIRY_SECS = 3 + + +class VisionNavigation(ShortrangeState): + """ + A ShortrangeState class for navigating to a ArUco tag + """ + + def __init__(self) -> None: + ## Boolean for state progress. + ## True if the state is done, false otherwise. + self.is_done = False + ## Boolean to state result or failure. + ## True if the state is successful, false otherwise. + self.success = False + ## TargetCache variable to store the last seen target data + self.target_cache = None + + def target_callback(self, msg: VisionTarget): + if msg.valid: + # Update cache if the VisionTarget message is valid + self.target_cache = TargetCache(rospy.get_time(), msg) + + def run(self) -> ShortrangeStateEnum: + """ + Run target navigation logic. + Creates a subscriber for the VisionTarget topic. + + @return ShortrangeStateEnum: The next state to execute + """ + rate = rospy.Rate(10) + + # Name of the VisionTarget topic to subscribe to + vision_topic = rospy.get_param("~vision_topic") + sub = rospy.Subscriber(vision_topic, VisionTarget, self.target_callback) + + # Name of the drivetrain topic to publish to + drivetrain_topic = rospy.get_param("~motor_speeds") + # Publisher to set motor speeds + drivetrain_pub = rospy.Publisher(drivetrain_topic, DriveTrainCmd, queue_size=1) + + # Loop for running ArUco tag approach + while not rospy.is_shutdown() and not self.is_done: + if ( + self.target_cache is not None + and rospy.get_time() - self.target_cache.timestamp < CACHE_EXPIRY_SECS + ): + if self.target_cache.msg.distance_estimate < STOP_DISTANCE_M: + # Stop the rover when it is close to the ArUco tag + rospy.loginfo(f"Reached target {self.target_cache.msg.id}") + drivetrain_pub.publish(0, 0) + + self.is_done = True + self.success = True + else: + # Drive the rover to the target if the cache was updated recently + # turn = kP * self.target_cache.msg.x_offset + # drivetrain_pub.publish(SPEED + turn, SPEED - turn) + + if self.target_cache.msg.x_offset > 50: + drivetrain_pub.publish(SPEED, 0) + elif self.target_cache.msg.x_offset < -50: + drivetrain_pub.publish(0, SPEED) + else: + drivetrain_pub.publish(SPEED, SPEED) + else: + # Turn the rover to look for the ArUco tag + # drivetrain_pub.publish(SPEED, -SPEED) + drivetrain_pub.publish(0, 0) + + rate.sleep() + + sub.unregister() + drivetrain_pub.unregister() + + if self.success: + return ShortrangeStateEnum.SUCCESS + return ShortrangeStateEnum.FAIL + + +## @} +# @} diff --git a/src/wr_logic_teleop_arm/nodes/bad_arm_driver.py b/src/wr_logic_teleop_arm/nodes/bad_arm_driver.py index cae8b151..18a1f676 100755 --- a/src/wr_logic_teleop_arm/nodes/bad_arm_driver.py +++ b/src/wr_logic_teleop_arm/nodes/bad_arm_driver.py @@ -6,6 +6,13 @@ from std_msgs.msg import Bool, Float32, Int16 import time +TURNTABLE_SPEED_FACTOR = 0.2 +SHOULDER_SPEED_FACTOR = 0.4 +ELBOW_SPEED_FACTOR = 0.3 +FOREARM_SPEED = 16384 +# WRIST_SPEED = 12288 +WRIST_SPEED = 14746 + class Watchdog: def __init__(self, timeout: float): self._timeout = timeout @@ -29,8 +36,10 @@ def _msg_cb(self, msg: Any): self.data = cast(T, msg.data) w.pet() -def float_to_int16_msg(value: float) -> Int16: - return Int16(round(value * 32767)) +def float_to_int16_msg(value: float, factor: float = 1.0) -> Int16: + factor = min(abs(factor), 1.0) + return Int16(round(factor * value * 32767)) + def main(): rospy.init_node('bad_arm_driver') @@ -52,9 +61,10 @@ def main(): bumper_r: SubBuf[bool] = SubBuf(f'{controller_ns}/button/shoulder_r', Bool) pov_x: SubBuf[float] = SubBuf(f'{controller_ns}/axis/pov_x', Float32) pov_y: SubBuf[float] = SubBuf(f'{controller_ns}/axis/pov_y', Float32) - btn_a: SubBuf[bool] = SubBuf(f'{controller_ns}/button/a', Bool) - btn_b: SubBuf[bool] = SubBuf(f'{controller_ns}/button/b', Bool) - btn_y: SubBuf[bool] = SubBuf(f'{controller_ns}/button/y', Bool) + #btn_a: SubBuf[bool] = SubBuf(f'{controller_ns}/button/a', Bool) + #btn_b: SubBuf[bool] = SubBuf(f'{controller_ns}/button/b', Bool) + btn_x: SubBuf[bool] = SubBuf(f'{controller_ns}/button/x', Bool) + #btn_y: SubBuf[bool] = SubBuf(f'{controller_ns}/button/y', Bool) # create wroboclaw pubs pub_turntable = rospy.Publisher(f'{claw_ns_0}/cmd/left', Int16, queue_size=4) @@ -69,22 +79,31 @@ def main(): sleeper = rospy.Rate(spin_rate) while not rospy.is_shutdown(): if not w.isMad(): + shoulder_max_speed = SHOULDER_SPEED_FACTOR + elbow_max_speed = ELBOW_SPEED_FACTOR + + # Speed control for arm shoulder and elbow joints + if btn_x.data is not None and btn_x.data: + shoulder_max_speed = 1 + elbow_max_speed = 1 + if trigger_l.data is not None and trigger_r.data is not None: - pub_turntable.publish(float_to_int16_msg(trigger_r.data - trigger_l.data)) + pub_turntable.publish(float_to_int16_msg(trigger_r.data - trigger_l.data, TURNTABLE_SPEED_FACTOR)) if stick_l.data is not None: - pub_shoulder.publish(float_to_int16_msg(stick_l.data)) + pub_shoulder.publish(float_to_int16_msg(stick_l.data, shoulder_max_speed)) if stick_r.data is not None: - pub_elbow.publish(float_to_int16_msg(stick_r.data)) + pub_elbow.publish(float_to_int16_msg(stick_r.data, elbow_max_speed)) + # Bumpers used for cam mast control if bumper_l.data is not None and bumper_r.data is not None: if bumper_l.data: if bumper_r.data: pub_forearm.publish(Int16(0)) - pub_forearm.publish(Int16(32767)) + pub_forearm.publish(Int16(-FOREARM_SPEED)) elif bumper_r.data: - pub_forearm.publish(Int16(-32768)) + pub_forearm.publish(Int16(FOREARM_SPEED)) else: pub_forearm.publish(Int16(0)) @@ -92,19 +111,21 @@ def main(): wrist_spd_a = 0 wrist_spd_b = 0 if pov_x.data > 0: - wrist_spd_a = 1 - wrist_spd_b = 1 - elif pov_x.data < 0: wrist_spd_a = -1 wrist_spd_b = -1 + elif pov_x.data < 0: + wrist_spd_a = 1 + wrist_spd_b = 1 elif pov_y.data > 0: wrist_spd_a = 1 wrist_spd_b = -1 elif pov_y.data < 0: wrist_spd_a = -1 wrist_spd_b = 1 - pub_wrist_a.publish(Int16(24576 * wrist_spd_a)) - pub_wrist_b.publish(Int16(-24576 * wrist_spd_b)) + pub_wrist_a.publish(Int16(WRIST_SPEED * wrist_spd_a)) + pub_wrist_b.publish(Int16(-WRIST_SPEED * wrist_spd_b)) + + # End effector is handled by end_effector_controller.py # if btn_a.data is not None and btn_b.data is not None: # if btn_a.data: diff --git a/src/wr_logic_teleop_arm/src/end_effector_controller.py b/src/wr_logic_teleop_arm/nodes/end_effector_controller.py similarity index 57% rename from src/wr_logic_teleop_arm/src/end_effector_controller.py rename to src/wr_logic_teleop_arm/nodes/end_effector_controller.py index 5d5b22fc..31e27630 100755 --- a/src/wr_logic_teleop_arm/src/end_effector_controller.py +++ b/src/wr_logic_teleop_arm/nodes/end_effector_controller.py @@ -3,6 +3,7 @@ import std_msgs.msg as std_msgs import time + class ClawController: OPEN_SPEED = 32767 CLOSE_SPEED = -32768 @@ -21,22 +22,55 @@ def close_claw(self, close): self._publish_claw_speed() def _publish_claw_speed(self): - speed = std_msgs.Int16(ClawController.OPEN_SPEED if self._open_pressed else (ClawController.CLOSE_SPEED if self._close_pressed else 0)) + speed = std_msgs.Int16( + ClawController.OPEN_SPEED + if self._open_pressed + else (ClawController.CLOSE_SPEED if self._close_pressed else 0) + ) self._publisher.publish(speed) + +class SolenoidController: + OFF = 0 + ON = 32767 + + def __init__(self, topicName: str): + self._publisher = rospy.Publisher(topicName, std_msgs.Int16, queue_size=1) + + def actuate_solenoid(self, open): + speed = SolenoidController.ON if open else SolenoidController.OFF + self._publisher.publish(speed) + + +""" def actuateSolenoid(msg: std_msgs.Bool): with open("/sys/class/gpio/gpio6/value", "w") as gpioFile: gpioFile.write("1" if msg.data else "0") gpioFile.flush() +""" if __name__ == "__main__": rospy.init_node("end_effector_controller") claw = ClawController("/hsi/roboclaw/aux3/cmd/left") - openSubscriber = rospy.Subscriber("/hci/arm/gamepad/button/a", std_msgs.Bool, lambda msg: claw.open_claw(msg.data)) - closeSubscriber = rospy.Subscriber("/hci/arm/gamepad/button/b", std_msgs.Bool, lambda msg: claw.close_claw(msg.data)) + openSubscriber = rospy.Subscriber( + "/hci/arm/gamepad/button/a", std_msgs.Bool, lambda msg: claw.open_claw(msg.data) + ) + closeSubscriber = rospy.Subscriber( + "/hci/arm/gamepad/button/b", + std_msgs.Bool, + lambda msg: claw.close_claw(msg.data), + ) + solenoid = SolenoidController("/hsi/roboclaw/aux3/cmd/right") + solenoidSubscriber = rospy.Subscriber( + "/hci/arm/gamepad/button/y", + std_msgs.Bool, + lambda msg: solenoid.actuate_solenoid(msg.data), + ) + + """ rospy.loginfo("Starting GPIO setup...") with open("/sys/class/gpio/export", "w") as exportFile: @@ -49,8 +83,11 @@ def actuateSolenoid(msg: std_msgs.Bool): rospy.loginfo("GPIO setup complete!") solenoidSubscriber = rospy.Subscriber("/hci/arm/gamepad/button/y", std_msgs.Bool, actuateSolenoid, queue_size=1) + """ rospy.spin() - + + """ with open("/sys/class/gpio/unexport", "w") as unexportFile: - unexportFile.write("6") \ No newline at end of file + unexportFile.write("6") + """ diff --git a/src/wr_logic_teleop_science/CMakeLists.txt b/src/wr_logic_teleop_science/CMakeLists.txt index 856b2cbd..3d87c0a9 100644 --- a/src/wr_logic_teleop_science/CMakeLists.txt +++ b/src/wr_logic_teleop_science/CMakeLists.txt @@ -10,7 +10,7 @@ project(wr_logic_teleop_science) find_package(catkin REQUIRED COMPONENTS roscpp rospy - wresponsecontrol + #wresponsecontrol ) ## System dependencies are found with CMake's conventions diff --git a/src/wrevolution b/src/wrevolution index 1157b9c8..482403d5 160000 --- a/src/wrevolution +++ b/src/wrevolution @@ -1 +1 @@ -Subproject commit 1157b9c85b4004aa3a521b364b9562890aa70b6b +Subproject commit 482403d5c064c4c191c55cadfd2322c3db5e0dfb