From 3187a5a70253797b00ab616e3c831caa9af898a4 Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 11:38:42 +0200 Subject: [PATCH 1/9] refactor remove docker compose Will move it inside the nano folder --- docker-compose.yml | 161 --------------------------------------------- 1 file changed, 161 deletions(-) delete mode 100644 docker-compose.yml diff --git a/docker-compose.yml b/docker-compose.yml deleted file mode 100644 index 0377c775..00000000 --- a/docker-compose.yml +++ /dev/null @@ -1,161 +0,0 @@ -version: '2' -volumes: - volume_zerotier_config: - volume_wireguard_config: - volume_ftpd_config: - volume_mongodb_config: - volume_mongodb_data: - volume_byodr_config: - volume_byodr_sockets: - volume_byodr_sessions: -services: - zerotier: - cpuset: '0' - image: zyclonite/zerotier:1.6.6 - restart: always - network_mode: host - devices: - - '/dev/net/tun' - cap_add: - - SYS_ADMIN - - NET_ADMIN - - CAP_SYS_RAWIO - volumes: - - volume_zerotier_config:/var/lib/zerotier-one:rw - wireguard: - cpuset: '0' - image: masipcat/wireguard-go - container_name: wireguard - restart: always - network_mode: host - devices: - - '/dev/net/tun' - cap_add: - - SYS_ADMIN - - NET_ADMIN - - CAP_SYS_RAWIO - volumes: - - volume_wireguard_config:/etc/wireguard:rw - httpd: - cpuset: '0' - build: - context: . - dockerfile: httpd/Dockerfile - restart: always - network_mode: host - command: ['python', 'wrap.py'] - stop_signal: SIGKILL - volumes: - - volume_byodr_config:/config:rw - ftpd: - cpuset: '0' - build: - context: . - dockerfile: ftpd/Dockerfile - restart: always - command: ['python3', 'wrap.py'] - stop_signal: SIGKILL - ports: - - '21:21' - - '30000-30009:30000-30009' - volumes: - - volume_ftpd_config:/etc/pureftpd:rw - - volume_byodr_sessions:/home/ftpuser:rw - rosnode: - cpuset: '0' - build: - context: . - dockerfile: rosnode/Dockerfile - restart: always - command: ['python3', 'app.py', '--name', 'rosnode'] - network_mode: host - stop_signal: SIGKILL - volumes: - - volume_byodr_sockets:/byodr:rw - - volume_byodr_config:/config:ro - mongodb: - cpuset: '0' - build: - context: . - dockerfile: mongodb/Dockerfile - restart: always - command: ['python3', 'wrap.py'] - network_mode: host - stop_signal: SIGKILL - environment: - MONGO_INITDB_ROOT_USERNAME: admin - MONGO_INITDB_ROOT_PASSWORD: robot - volumes: - - volume_mongodb_config:/config:rw - - volume_mongodb_data:/data/db:rw - teleop: - cpuset: '0' - build: - context: . - dockerfile: teleop/Dockerfile - restart: always - command: ['sh', '-c', 'python3 -m teleop.app --name teleop --routes /sessions/routes'] - network_mode: host - depends_on: - - mongodb - environment: - LD_PRELOAD: libgomp.so.1 - volumes: - - volume_byodr_sockets:/byodr:rw - - volume_byodr_config:/config:rw - - volume_byodr_sessions:/sessions:rw - vehicle: - cpuset: '1' - build: - context: . - dockerfile: vehicles/rover/Dockerfile - restart: always - privileged: true # NvMedia device creation for omx decoder. - network_mode: host - environment: - LD_PRELOAD: libgomp.so.1 - volumes: - - volume_byodr_sockets:/byodr:rw - - volume_byodr_config:/config:rw - pilot: - cpuset: '2' - build: - context: . - dockerfile: pilot/Dockerfile - restart: always - privileged: true # Access to usb devices without udev rules. - command: ['python3', '-m', 'pilot.app', '--name', 'pilot', '--routes', '/sessions/routes'] - ports: - - '8082:8082' - environment: - LD_PRELOAD: libgomp.so.1 - volumes: - - volume_byodr_sockets:/byodr:rw - - volume_byodr_config:/config:ro - - volume_byodr_sessions:/sessions:ro - inference: - # cpuset: "3" # - not compatible with onnx runtime - use env omp_places. - build: - context: . - dockerfile: inference/runtime-cp36-jp441.dockerfile - restart: always - privileged: true - command: ['python3', '-m', 'inference.app', '--user', '/sessions/models', '--routes', '/sessions/routes'] - environment: - LD_PRELOAD: libgomp.so.1 - OMP_PLACES: '{3}' - volumes: - - volume_byodr_sockets:/byodr:rw - - volume_byodr_config:/config:ro - following: - cpuset: '3,2' - build: - context: . - dockerfile: following/Dockerfile - restart: always - privileged: true - network_mode: host - stop_signal: SIGKILL - volumes: - - volume_byodr_sockets:/byodr:rw - - volume_byodr_config:/config:rw From fb1eceb83d6804ca146eb4b0f60c3fabaf907edd Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 13:03:01 +0200 Subject: [PATCH 2/9] feat: to handle common folder I explained what the file does in the pr --- cli_wrapper.py | 131 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 131 insertions(+) create mode 100644 cli_wrapper.py diff --git a/cli_wrapper.py b/cli_wrapper.py new file mode 100644 index 00000000..2ed2d133 --- /dev/null +++ b/cli_wrapper.py @@ -0,0 +1,131 @@ +import os +import shutil +import sys +import subprocess +import filecmp + +# Optional: For colored output in the terminal +try: + from colorama import Fore, Style + + colorama_installed = True +except ImportError: + colorama_installed = False + +common_folder_name = "BYODR_utils" + + +def compare_common_dirs(dir1, dir2): + differences = [] + dirs_cmp = filecmp.dircmp(dir1, dir2) + # Files that differ + for file_name in dirs_cmp.diff_files: + differences.append(f"Files differ: {os.path.join(dir1, file_name)} and {os.path.join(dir2, file_name)}") + # Files only in dir1 + for file_name in dirs_cmp.left_only: + differences.append(f"File only in original {common_folder_name} folder: {os.path.join(dir1, file_name)}") + # Files only in dir2 + for file_name in dirs_cmp.right_only: + differences.append(f"File only in '{dir2}' folder: {os.path.join(dir2, file_name)}") + # Recursively compare subdirectories + for sub_dir in dirs_cmp.common_dirs: + sub_differences = compare_common_dirs(os.path.join(dir1, sub_dir), os.path.join(dir2, sub_dir)) + differences.extend(sub_differences) + return differences + + +def compare_common_dirs_main(common_dir, dst_common_dir): + differences = compare_common_dirs(common_dir, dst_common_dir) + if differences: + if colorama_installed: + print(Fore.RED + f"Error: {common_folder_name} folder already exists in '{os.path.dirname(dst_common_dir)}', and there are differences between the original and the copied folder." + Style.RESET_ALL) + else: + print(f"Error: {common_folder_name} folder already exists in '{os.path.dirname(dst_common_dir)}', and there are differences between the original and the copied folder.") + print("Differences found:") + for diff in differences: + print(diff) + return False # Do not proceed + else: + print(f"Warning: {common_folder_name} folder already exists in '{os.path.dirname(dst_common_dir)}', but the files are the same. Proceeding.") + return True # Proceed + + +def compare_and_handle_on_exit(common_dir, dst_common_dir): + differences = compare_common_dirs(common_dir, dst_common_dir) + if differences: + # Differences found; do not delete the copied 'common' folder + if colorama_installed: + print(Fore.RED + f"Warning: Changes were made to the {common_folder_name} folder in '{os.path.dirname(dst_common_dir)}'. It will not be deleted." + Style.RESET_ALL) + else: + print(f"Warning: Changes were made to the {common_folder_name} folder in '{os.path.dirname(dst_common_dir)}'. It will not be deleted.") + print("Differences found:") + for diff in differences: + print(diff) + else: + # No differences; safe to delete the copied 'common' folder + if os.path.exists(dst_common_dir): + shutil.rmtree(dst_common_dir) + print(f"Removed {common_folder_name} folder from '{os.path.basename(dst_common_dir)}'.") + else: + print(f"No {common_folder_name} folder to remove from '{os.path.basename(dst_common_dir)}'.") + + +def main(): + # Check if at least two arguments are provided (script name, target directory) + if len(sys.argv) < 2: + print("Usage: python cli_wrapper.py [balena_command] [balena_args]") + sys.exit(1) + + # Get the target directory from the first argument + target_dir_name = sys.argv[1] + target_dir = os.path.join(os.getcwd(), target_dir_name) + # Ensure the target directory exists + if not os.path.isdir(target_dir): + print(f"Error: Target directory '{target_dir_name}' does not exist.") + sys.exit(1) + + # Define paths + common_dir = os.path.join(os.getcwd(), common_folder_name) + dst_common_dir = os.path.join(target_dir, common_folder_name) + + # Extract the Balena CLI command and arguments + balena_command_args = sys.argv[2:] + if not balena_command_args: + print("Error: No Balena CLI command provided.") + print("Usage: python cli_wrapper.py [balena_command] [balena_args]") + sys.exit(1) + + # Step 1: Check if 'common' directory exists in target directory + if os.path.exists(dst_common_dir): + proceed = compare_common_dirs_main(common_dir, dst_common_dir) + if not proceed: + # Exit the script due to differences + sys.exit(1) + else: + # No existing 'common_folder_name' folder in the target directory + pass + + try: + # Step 2: Copy the 'common_folder_name' directory into the target directory + # Remove the existing 'common' folder in target directory if it exists + if os.path.exists(dst_common_dir): + shutil.rmtree(dst_common_dir) + # Copy the 'common' folder + shutil.copytree(common_dir, dst_common_dir) + + # Step 3: Run the Balena CLI command + # Build the Balena CLI command + balena_command = ["balena"] + balena_command_args + + # Set the working directory to the target directory + subprocess.run(balena_command, cwd=target_dir) + except KeyboardInterrupt: + # Handle graceful shutdown (Ctrl+C) + print("\nScript interrupted by user.") + finally: + # Compare and handle the 'common' folder upon exit + compare_and_handle_on_exit(common_dir, dst_common_dir) + + +if __name__ == "__main__": + main() From 38ffdfe497e2292e70fc7e19e6448987e1a7f734 Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 13:12:10 +0200 Subject: [PATCH 3/9] Change directory and add readme for each runtime folder --- .../JETSON_specific}/gpio_relay.py | 0 .../JETSON_specific}/utilities.py | 0 .../PI_specific/gpio_relay.py | 0 .../PI_specific/utilities.py | 0 .../common/__init__.py | 0 .../BYODR_utils => BYODR_utils}/common/ipc.py | 0 .../utils => BYODR_utils/common}/location.py | 0 .../common/navigate.py | 0 .../utils => BYODR_utils/common}/option.py | 0 .../common/protocol.py | 0 .../BYODR_utils => BYODR_utils}/common/ssh.py | 0 .../utils => BYODR_utils/common}/testing.py | 0 .../common/usbrelay.py | 0 .../utils => BYODR_utils/common}/video.py | 0 .../utils => BYODR_utils/common}/websocket.py | 0 README.md | 5 + .../docker}/bionic-cp39-gstreamer.dockerfile | 0 .../bullseye-cp39-gstreamer.dockerfile | 0 .../docker}/jp42-nano-cp36-base.dockerfile | 0 .../jp42-nano-cp36-tensorrt.dockerfile | 0 .../jp42-nano-cp36-tf115-opencv440.dockerfile | 0 .../docker}/jp42-nano-cp36-tf115.dockerfile | 0 .../docker}/pi-cp27-gpio.dockerfile | 0 .../docker}/pi-cp37-odrive.dockerfile | 0 .../docker}/pure-ftpd.dockerfile | 0 .../docker}/tf113-jp43.dockerfile | 0 .../docker}/zerotier.dockerfile | 0 cli_wrapper.py | 272 ++++++++----- common/byodr/utils/__init__.py | 200 ---------- common/byodr/utils/gpio_relay.py | 43 -- common/byodr/utils/ipc.py | 309 --------------- common/byodr/utils/navigate.py | 373 ------------------ common/byodr/utils/protocol.py | 58 --- common/byodr/utils/ssh.py | 146 ------- common/byodr/utils/usbrelay.py | 252 ------------ docker-compose.carla.yml | 64 --- docker-compose.override.yml | 5 - docker-compose.test.yml | 53 --- docs/Services_Documentation.txt | 222 ----------- jetson_runtime/README.md | 106 +++++ jetson_runtime/docker-compose.yml | 161 ++++++++ .../following}/Dockerfile | 0 .../following}/app.py | 0 .../following}/botsort.yaml | 0 .../following}/fol_utils.py | 0 .../following}/models/480_20k.pt | Bin ...v8_20240717_coco(imgsz480x640_FP16).engine | Bin .../models/yolov8n(256*320_FB16).engine | Bin .../following}/models/yolov8n.engine | Bin .../yolov8n_d_20240722(256*320_FB16).engine | Bin .../yolov8n_d_20240722(480*640_FB16).engine | Bin .../following}/models/yolov8n_d_20240722.yaml | 0 {ftpd => jetson_runtime/ftpd}/Dockerfile | 0 {ftpd => jetson_runtime/ftpd}/create_user.sh | 0 {ftpd => jetson_runtime/ftpd}/wrap.py | 0 {httpd => jetson_runtime/httpd}/Dockerfile | 0 .../httpd}/certs/lan.conf | 0 .../httpd}/certs/readme.txt | 0 .../httpd}/haproxy.template | 0 .../httpd}/haproxy_ssl.template | 0 {httpd => jetson_runtime/httpd}/wrap.py | 0 .../archive/tf113-cp27-jp43.dockerfile | 0 .../archive/tf113-cp27-x86.dockerfile | 0 .../archive/tf115-cp36-jp42.dockerfile | 0 .../archive/tf115-cp36-x86.dockerfile | 0 .../inference/inference}/__init__.py | 0 .../inference}/inference/app.py | 0 .../inference}/inference/image.py | 0 .../inference}/inference/tests.py | 0 .../inference}/inference/torched.py | 0 .../inference}/pytest.ini | 0 .../inference}/runtime-cp36-jp441.dockerfile | 0 .../inference}/runtime-cp36-x86.dockerfile | 0 .../mongodb}/Dockerfile | 0 {mongodb => jetson_runtime/mongodb}/wrap.py | 0 {pilot => jetson_runtime/pilot}/Dockerfile | 0 .../pilot/pilot}/__init__.py | 0 {pilot => jetson_runtime/pilot}/pilot/app.py | 0 {pilot => jetson_runtime/pilot}/pilot/core.py | 0 .../pilot}/pilot/relay.py | 0 .../pilot}/pilot/tests.py | 0 .../pilot}/pilot/tests_relay.py | 0 {pilot => jetson_runtime/pilot}/pilot/web.py | 0 {pilot => jetson_runtime/pilot}/pytest.ini | 0 .../rosnode}/Dockerfile | 0 {rosnode => jetson_runtime/rosnode}/app.py | 0 {teleop => jetson_runtime/teleop}/Dockerfile | 0 .../teleop}/htm/jmuxer/jmuxer-21.07.min.js | 0 .../teleop}/htm/jmuxer/z_index_video_mux.js | 0 .../teleop}/htm/static/CSS/menu_controls.css | 0 .../teleop}/htm/static/CSS/menu_logbox.css | 0 .../teleop}/htm/static/CSS/menu_settings.css | 0 .../htm/static/CSS/mobileController.css | 0 .../teleop}/htm/static/CSS/style.css | 0 .../teleop}/htm/static/CSS/theme_mode.css | 0 .../teleop}/htm/static/CSS/user_menu.css | 0 .../htm/static/JS/Index/index_a_utils.js | 0 .../htm/static/JS/Index/index_b_gamepad.js | 0 .../htm/static/JS/Index/index_c_screen.js | 0 .../htm/static/JS/Index/index_d_navigator.js | 0 .../htm/static/JS/Index/index_e_teleop.js | 0 .../htm/static/JS/Index/index_video_hlp.js | 0 .../htm/static/JS/Index/index_video_mjpeg.js | 0 .../teleop}/htm/static/JS/index.js | 0 .../mobileController_f_auto_navigation.js | 0 .../feature/mobileController_f_confidence.js | 0 .../feature/mobileController_f_following.js | 0 .../mobileController_f_maneuver_training.js | 0 .../mobileController_a_app.js | 0 .../mobileController_b_shape_square.js | 0 .../mobileController_c_logic.js | 0 .../mobileController_z_state.js | 0 .../htm/static/JS/performance-polyfill.js | 0 .../teleop}/htm/static/JS/router.js | 0 .../htm/static/JS/userMenu/menu_controls.js | 0 .../htm/static/JS/userMenu/menu_logbox.js | 0 .../htm/static/JS/userMenu/menu_settings.js | 0 .../static/assets/pictures/VOR_Logo_dark.png | Bin .../static/assets/pictures/VOR_Logo_light.png | Bin .../assets/pictures/VR - pattern tree 1.png | Bin .../assets/pictures/VR-pattern_tree_1.png | Bin .../assets/pictures/ai_training_mode.png | Bin .../static/assets/pictures/autopilot_mode.png | Bin .../htm/static/assets/pictures/caret.png | Bin .../assets/pictures/expand_collapse.png | Bin .../static/assets/pictures/follow_mode.png | Bin .../assets/pictures/im_no_image_available.png | Bin .../static/assets/pictures/manual_mode.png | Bin .../assets/pictures/manual_mode_nav.png | Bin .../assets/pictures/map_recognition.png | Bin .../assets/pictures/nav_advanved_settings.png | Bin .../static/assets/pictures/nav_background.png | Bin .../assets/pictures/nav_control_by_phone.png | Bin .../assets/pictures/nav_controls_menu.png | Bin .../assets/pictures/nav_training_events.png | Bin .../static/assets/pictures/ps4_mapping.jpg | Bin .../static/assets/static_sora/Sora-Bold.ttf | Bin .../assets/static_sora/Sora-ExtraBold.ttf | Bin .../assets/static_sora/Sora-ExtraLight.ttf | Bin .../static/assets/static_sora/Sora-Light.ttf | Bin .../static/assets/static_sora/Sora-Medium.ttf | Bin .../assets/static_sora/Sora-Regular.ttf | Bin .../assets/static_sora/Sora-SemiBold.ttf | Bin .../static/assets/static_sora/Sora-Thin.ttf | Bin .../static_sora/Sora-VariableFont_wght.ttf | Bin .../htm/static/assets/svg/MC_VOR_logo.svg | 0 .../htm/static/assets/svg/NC_VOR_logo.svg | 0 .../assets/svg/advanced_settings_menu.svg | 0 .../static/assets/svg/ai_training_mode.svg | 0 .../htm/static/assets/svg/autopilot_mode.svg | 0 .../static/assets/svg/control_by_phone.svg | 0 .../assets/svg/controls_settings_menu.svg | 0 .../htm/static/assets/svg/follow_mode.svg | 0 .../htm/static/assets/svg/manual_mode.svg | 0 .../htm/static/assets/svg/map_recognition.svg | 0 .../assets/svg/training_settings_menu.svg | 0 .../static/external/bootstrap-theme.min.css | 0 .../htm/static/external/bootstrap.min.css | 0 .../htm/static/external/bootstrap.min.js | 0 .../htm/static/external/font-awesome.min.css | 0 .../htm/static/external/http-live-player.js | 0 .../htm/static/external/jquery-1.12.4.min.js | 0 .../htm/static/external/jquery-3.4.1.min.js | 0 .../htm/static/external/jquery-3.7.1.min.js | 0 .../static/external/jquery-ui-1.12.1.min.css | 0 .../static/external/jquery-ui-1.12.1.min.js | 0 .../jquery.radioslider-1.0.0_b1.min.js | 0 .../external/leaflet.awesome-markers.css | 0 .../external/leaflet.awesome-markers.js | 0 .../external/leaflet.awesome.rotate.min.css | 0 .../teleop}/htm/static/external/leaflet.css | 0 .../teleop}/htm/static/external/leaflet.js | 0 .../static/external/performance-polyfill.js | 0 .../external/radioslider-1.0.0_b1.min.css | 0 .../teleop}/htm/templates/index.html | 0 .../htm/templates/mobile_controller_ui.html | 0 .../teleop}/htm/templates/normal_ui.html | 0 .../htm/templates/userMenu/menu_controls.html | 0 .../htm/templates/userMenu/menu_logbox.html | 0 .../htm/templates/userMenu/menu_settings.html | 0 .../teleop}/logbox/__init__.py | 0 .../teleop}/logbox/app.py | 0 .../teleop}/logbox/core.py | 0 .../teleop}/logbox/store.py | 0 .../teleop}/logbox/web.py | 0 {teleop => jetson_runtime/teleop}/pytest.ini | 0 .../teleop/teleop}/__init__.py | 0 .../teleop}/teleop/app.py | 0 .../teleop}/teleop/server.py | 0 .../teleop}/teleop/tel_utils.py | 0 .../teleop}/teleop/tests.py | 0 .../vehicles}/carla09/Dockerfile | 0 .../vehicles}/carla09/app.py | 0 .../vehicles}/carla09/config.template | 0 .../vehicles}/carla09/vehicle.py | 0 .../vehicles}/carla09/video.py | 0 .../vehicles}/rover/Dockerfile | 0 .../vehicles}/rover/app.py | 0 .../vehicles}/rover/config.template | 0 .../vehicles}/rover/core.py | 0 .../vehicles}/rover/pytest.ini | 0 .../vehicles}/rover/tests_rover.py | 0 pi4_runtime/README.md | 36 ++ {raspi => pi4_runtime}/docker-compose.yml | 15 +- {raspi/ras => pi4_runtime/servos}/Dockerfile | 0 {raspi/ras => pi4_runtime/servos}/__init__.py | 0 .../servos.py => pi4_runtime/servos/app.py | 2 +- {raspi/ras => pi4_runtime/servos}/core.py | 0 .../servos}/driver.template | 0 .../stream/Dockerfile | 0 {raspi => pi4_runtime}/stream/__init__.py | 0 {raspi => pi4_runtime}/stream/camera.py | 0 {raspi => pi4_runtime}/stream/camera.template | 0 raspi/BYODR_utils/common/location.py | 106 ----- raspi/BYODR_utils/common/option.py | 63 --- raspi/BYODR_utils/common/testing.py | 140 ------- raspi/BYODR_utils/common/video.py | 121 ------ raspi/BYODR_utils/common/websocket.py | 99 ----- teleop/teleop/__init__.py | 0 219 files changed, 476 insertions(+), 2375 deletions(-) rename {raspi/BYODR_utils/NANO_specific => BYODR_utils/JETSON_specific}/gpio_relay.py (100%) rename {raspi/BYODR_utils/NANO_specific => BYODR_utils/JETSON_specific}/utilities.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/PI_specific/gpio_relay.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/PI_specific/utilities.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/common/__init__.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/common/ipc.py (100%) rename {common/byodr/utils => BYODR_utils/common}/location.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/common/navigate.py (100%) rename {common/byodr/utils => BYODR_utils/common}/option.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/common/protocol.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/common/ssh.py (100%) rename {common/byodr/utils => BYODR_utils/common}/testing.py (100%) rename {raspi/BYODR_utils => BYODR_utils}/common/usbrelay.py (100%) rename {common/byodr/utils => BYODR_utils/common}/video.py (100%) rename {common/byodr/utils => BYODR_utils/common}/websocket.py (100%) rename {docker/archive => archived/docker}/bionic-cp39-gstreamer.dockerfile (100%) rename {docker/archive => archived/docker}/bullseye-cp39-gstreamer.dockerfile (100%) rename {docker/archive => archived/docker}/jp42-nano-cp36-base.dockerfile (100%) rename {docker/archive => archived/docker}/jp42-nano-cp36-tensorrt.dockerfile (100%) rename {docker/archive => archived/docker}/jp42-nano-cp36-tf115-opencv440.dockerfile (100%) rename {docker/archive => archived/docker}/jp42-nano-cp36-tf115.dockerfile (100%) rename {docker/archive => archived/docker}/pi-cp27-gpio.dockerfile (100%) rename {docker/archive => archived/docker}/pi-cp37-odrive.dockerfile (100%) rename {docker/archive => archived/docker}/pure-ftpd.dockerfile (100%) rename {docker/archive => archived/docker}/tf113-jp43.dockerfile (100%) rename {docker/archive => archived/docker}/zerotier.dockerfile (100%) delete mode 100644 common/byodr/utils/__init__.py delete mode 100644 common/byodr/utils/gpio_relay.py delete mode 100644 common/byodr/utils/ipc.py delete mode 100644 common/byodr/utils/navigate.py delete mode 100644 common/byodr/utils/protocol.py delete mode 100644 common/byodr/utils/ssh.py delete mode 100644 common/byodr/utils/usbrelay.py delete mode 100644 docker-compose.carla.yml delete mode 100644 docker-compose.override.yml delete mode 100644 docker-compose.test.yml delete mode 100644 docs/Services_Documentation.txt create mode 100644 jetson_runtime/README.md create mode 100644 jetson_runtime/docker-compose.yml rename {following => jetson_runtime/following}/Dockerfile (100%) rename {following => jetson_runtime/following}/app.py (100%) rename {following => jetson_runtime/following}/botsort.yaml (100%) rename {following => jetson_runtime/following}/fol_utils.py (100%) rename {following => jetson_runtime/following}/models/480_20k.pt (100%) rename {following => jetson_runtime/following}/models/yolov8_20240717_coco(imgsz480x640_FP16).engine (100%) rename {following => jetson_runtime/following}/models/yolov8n(256*320_FB16).engine (100%) rename {following => jetson_runtime/following}/models/yolov8n.engine (100%) rename {following => jetson_runtime/following}/models/yolov8n_d_20240722(256*320_FB16).engine (100%) rename {following => jetson_runtime/following}/models/yolov8n_d_20240722(480*640_FB16).engine (100%) rename {following => jetson_runtime/following}/models/yolov8n_d_20240722.yaml (100%) rename {ftpd => jetson_runtime/ftpd}/Dockerfile (100%) rename {ftpd => jetson_runtime/ftpd}/create_user.sh (100%) mode change 100755 => 100644 rename {ftpd => jetson_runtime/ftpd}/wrap.py (100%) rename {httpd => jetson_runtime/httpd}/Dockerfile (100%) rename {httpd => jetson_runtime/httpd}/certs/lan.conf (100%) rename {httpd => jetson_runtime/httpd}/certs/readme.txt (100%) rename {httpd => jetson_runtime/httpd}/haproxy.template (100%) rename {httpd => jetson_runtime/httpd}/haproxy_ssl.template (100%) rename {httpd => jetson_runtime/httpd}/wrap.py (100%) rename {inference => jetson_runtime/inference}/archive/tf113-cp27-jp43.dockerfile (100%) rename {inference => jetson_runtime/inference}/archive/tf113-cp27-x86.dockerfile (100%) rename {inference => jetson_runtime/inference}/archive/tf115-cp36-jp42.dockerfile (100%) rename {inference => jetson_runtime/inference}/archive/tf115-cp36-x86.dockerfile (100%) rename {common/byodr => jetson_runtime/inference/inference}/__init__.py (100%) rename {inference => jetson_runtime/inference}/inference/app.py (100%) rename {inference => jetson_runtime/inference}/inference/image.py (100%) rename {inference => jetson_runtime/inference}/inference/tests.py (100%) rename {inference => jetson_runtime/inference}/inference/torched.py (100%) rename {inference => jetson_runtime/inference}/pytest.ini (100%) rename {inference => jetson_runtime/inference}/runtime-cp36-jp441.dockerfile (100%) rename {inference => jetson_runtime/inference}/runtime-cp36-x86.dockerfile (100%) rename {mongodb => jetson_runtime/mongodb}/Dockerfile (100%) rename {mongodb => jetson_runtime/mongodb}/wrap.py (100%) rename {pilot => jetson_runtime/pilot}/Dockerfile (100%) rename {inference/inference => jetson_runtime/pilot/pilot}/__init__.py (100%) rename {pilot => jetson_runtime/pilot}/pilot/app.py (100%) rename {pilot => jetson_runtime/pilot}/pilot/core.py (100%) rename {pilot => jetson_runtime/pilot}/pilot/relay.py (100%) rename {pilot => jetson_runtime/pilot}/pilot/tests.py (100%) rename {pilot => jetson_runtime/pilot}/pilot/tests_relay.py (100%) rename {pilot => jetson_runtime/pilot}/pilot/web.py (100%) rename {pilot => jetson_runtime/pilot}/pytest.ini (100%) rename {rosnode => jetson_runtime/rosnode}/Dockerfile (100%) rename {rosnode => jetson_runtime/rosnode}/app.py (100%) rename {teleop => jetson_runtime/teleop}/Dockerfile (100%) rename {teleop => jetson_runtime/teleop}/htm/jmuxer/jmuxer-21.07.min.js (100%) rename {teleop => jetson_runtime/teleop}/htm/jmuxer/z_index_video_mux.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/CSS/menu_controls.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/CSS/menu_logbox.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/CSS/menu_settings.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/CSS/mobileController.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/CSS/style.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/CSS/theme_mode.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/CSS/user_menu.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/Index/index_a_utils.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/Index/index_b_gamepad.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/Index/index_c_screen.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/Index/index_d_navigator.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/Index/index_e_teleop.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/Index/index_video_hlp.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/Index/index_video_mjpeg.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/index.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/feature/mobileController_f_auto_navigation.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/feature/mobileController_f_confidence.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/feature/mobileController_f_following.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/feature/mobileController_f_maneuver_training.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/mobileController_a_app.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/mobileController_b_shape_square.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/mobileController_c_logic.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/mobileController/mobileController_z_state.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/performance-polyfill.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/router.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/userMenu/menu_controls.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/userMenu/menu_logbox.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/JS/userMenu/menu_settings.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/VOR_Logo_dark.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/VOR_Logo_light.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/VR - pattern tree 1.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/VR-pattern_tree_1.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/ai_training_mode.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/autopilot_mode.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/caret.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/expand_collapse.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/follow_mode.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/im_no_image_available.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/manual_mode.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/manual_mode_nav.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/map_recognition.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/nav_advanved_settings.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/nav_background.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/nav_control_by_phone.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/nav_controls_menu.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/nav_training_events.png (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/pictures/ps4_mapping.jpg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-Bold.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-ExtraBold.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-ExtraLight.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-Light.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-Medium.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-Regular.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-SemiBold.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-Thin.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/static_sora/Sora-VariableFont_wght.ttf (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/MC_VOR_logo.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/NC_VOR_logo.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/advanced_settings_menu.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/ai_training_mode.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/autopilot_mode.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/control_by_phone.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/controls_settings_menu.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/follow_mode.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/manual_mode.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/map_recognition.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/assets/svg/training_settings_menu.svg (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/bootstrap-theme.min.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/bootstrap.min.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/bootstrap.min.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/font-awesome.min.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/http-live-player.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/jquery-1.12.4.min.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/jquery-3.4.1.min.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/jquery-3.7.1.min.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/jquery-ui-1.12.1.min.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/jquery-ui-1.12.1.min.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/jquery.radioslider-1.0.0_b1.min.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/leaflet.awesome-markers.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/leaflet.awesome-markers.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/leaflet.awesome.rotate.min.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/leaflet.css (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/leaflet.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/performance-polyfill.js (100%) rename {teleop => jetson_runtime/teleop}/htm/static/external/radioslider-1.0.0_b1.min.css (100%) rename {teleop => jetson_runtime/teleop}/htm/templates/index.html (100%) rename {teleop => jetson_runtime/teleop}/htm/templates/mobile_controller_ui.html (100%) rename {teleop => jetson_runtime/teleop}/htm/templates/normal_ui.html (100%) rename {teleop => jetson_runtime/teleop}/htm/templates/userMenu/menu_controls.html (100%) rename {teleop => jetson_runtime/teleop}/htm/templates/userMenu/menu_logbox.html (100%) rename {teleop => jetson_runtime/teleop}/htm/templates/userMenu/menu_settings.html (100%) rename {teleop => jetson_runtime/teleop}/logbox/__init__.py (100%) rename {teleop => jetson_runtime/teleop}/logbox/app.py (100%) rename {teleop => jetson_runtime/teleop}/logbox/core.py (100%) rename {teleop => jetson_runtime/teleop}/logbox/store.py (100%) rename {teleop => jetson_runtime/teleop}/logbox/web.py (100%) rename {teleop => jetson_runtime/teleop}/pytest.ini (100%) rename {pilot/pilot => jetson_runtime/teleop/teleop}/__init__.py (100%) rename {teleop => jetson_runtime/teleop}/teleop/app.py (100%) rename {teleop => jetson_runtime/teleop}/teleop/server.py (100%) rename {teleop => jetson_runtime/teleop}/teleop/tel_utils.py (100%) rename {teleop => jetson_runtime/teleop}/teleop/tests.py (100%) rename {vehicles => jetson_runtime/vehicles}/carla09/Dockerfile (100%) rename {vehicles => jetson_runtime/vehicles}/carla09/app.py (100%) rename {vehicles => jetson_runtime/vehicles}/carla09/config.template (100%) rename {vehicles => jetson_runtime/vehicles}/carla09/vehicle.py (100%) rename {vehicles => jetson_runtime/vehicles}/carla09/video.py (100%) rename {vehicles => jetson_runtime/vehicles}/rover/Dockerfile (100%) rename {vehicles => jetson_runtime/vehicles}/rover/app.py (100%) rename {vehicles => jetson_runtime/vehicles}/rover/config.template (100%) rename {vehicles => jetson_runtime/vehicles}/rover/core.py (100%) rename {vehicles => jetson_runtime/vehicles}/rover/pytest.ini (100%) rename {vehicles => jetson_runtime/vehicles}/rover/tests_rover.py (100%) create mode 100644 pi4_runtime/README.md rename {raspi => pi4_runtime}/docker-compose.yml (73%) rename {raspi/ras => pi4_runtime/servos}/Dockerfile (100%) rename {raspi/ras => pi4_runtime/servos}/__init__.py (100%) rename raspi/ras/servos.py => pi4_runtime/servos/app.py (99%) rename {raspi/ras => pi4_runtime/servos}/core.py (100%) rename {raspi/ras => pi4_runtime/servos}/driver.template (100%) rename raspi/pi_gstreamer.dockerfile => pi4_runtime/stream/Dockerfile (100%) rename {raspi => pi4_runtime}/stream/__init__.py (100%) rename {raspi => pi4_runtime}/stream/camera.py (100%) rename {raspi => pi4_runtime}/stream/camera.template (100%) delete mode 100644 raspi/BYODR_utils/common/location.py delete mode 100644 raspi/BYODR_utils/common/option.py delete mode 100644 raspi/BYODR_utils/common/testing.py delete mode 100644 raspi/BYODR_utils/common/video.py delete mode 100644 raspi/BYODR_utils/common/websocket.py delete mode 100644 teleop/teleop/__init__.py diff --git a/raspi/BYODR_utils/NANO_specific/gpio_relay.py b/BYODR_utils/JETSON_specific/gpio_relay.py similarity index 100% rename from raspi/BYODR_utils/NANO_specific/gpio_relay.py rename to BYODR_utils/JETSON_specific/gpio_relay.py diff --git a/raspi/BYODR_utils/NANO_specific/utilities.py b/BYODR_utils/JETSON_specific/utilities.py similarity index 100% rename from raspi/BYODR_utils/NANO_specific/utilities.py rename to BYODR_utils/JETSON_specific/utilities.py diff --git a/raspi/BYODR_utils/PI_specific/gpio_relay.py b/BYODR_utils/PI_specific/gpio_relay.py similarity index 100% rename from raspi/BYODR_utils/PI_specific/gpio_relay.py rename to BYODR_utils/PI_specific/gpio_relay.py diff --git a/raspi/BYODR_utils/PI_specific/utilities.py b/BYODR_utils/PI_specific/utilities.py similarity index 100% rename from raspi/BYODR_utils/PI_specific/utilities.py rename to BYODR_utils/PI_specific/utilities.py diff --git a/raspi/BYODR_utils/common/__init__.py b/BYODR_utils/common/__init__.py similarity index 100% rename from raspi/BYODR_utils/common/__init__.py rename to BYODR_utils/common/__init__.py diff --git a/raspi/BYODR_utils/common/ipc.py b/BYODR_utils/common/ipc.py similarity index 100% rename from raspi/BYODR_utils/common/ipc.py rename to BYODR_utils/common/ipc.py diff --git a/common/byodr/utils/location.py b/BYODR_utils/common/location.py similarity index 100% rename from common/byodr/utils/location.py rename to BYODR_utils/common/location.py diff --git a/raspi/BYODR_utils/common/navigate.py b/BYODR_utils/common/navigate.py similarity index 100% rename from raspi/BYODR_utils/common/navigate.py rename to BYODR_utils/common/navigate.py diff --git a/common/byodr/utils/option.py b/BYODR_utils/common/option.py similarity index 100% rename from common/byodr/utils/option.py rename to BYODR_utils/common/option.py diff --git a/raspi/BYODR_utils/common/protocol.py b/BYODR_utils/common/protocol.py similarity index 100% rename from raspi/BYODR_utils/common/protocol.py rename to BYODR_utils/common/protocol.py diff --git a/raspi/BYODR_utils/common/ssh.py b/BYODR_utils/common/ssh.py similarity index 100% rename from raspi/BYODR_utils/common/ssh.py rename to BYODR_utils/common/ssh.py diff --git a/common/byodr/utils/testing.py b/BYODR_utils/common/testing.py similarity index 100% rename from common/byodr/utils/testing.py rename to BYODR_utils/common/testing.py diff --git a/raspi/BYODR_utils/common/usbrelay.py b/BYODR_utils/common/usbrelay.py similarity index 100% rename from raspi/BYODR_utils/common/usbrelay.py rename to BYODR_utils/common/usbrelay.py diff --git a/common/byodr/utils/video.py b/BYODR_utils/common/video.py similarity index 100% rename from common/byodr/utils/video.py rename to BYODR_utils/common/video.py diff --git a/common/byodr/utils/websocket.py b/BYODR_utils/common/websocket.py similarity index 100% rename from common/byodr/utils/websocket.py rename to BYODR_utils/common/websocket.py diff --git a/README.md b/README.md index 453fd788..550a807f 100644 --- a/README.md +++ b/README.md @@ -21,3 +21,8 @@ We went ahead and designed a rover that can be assembled from **generally availa * Community driven self-driving models * Dockerized * Free for personal use +* Ability to work with modular hardware + +## License + +This project is licensed under the MIT License. See the [LICENSE](./LICENSE) file for more information. diff --git a/docker/archive/bionic-cp39-gstreamer.dockerfile b/archived/docker/bionic-cp39-gstreamer.dockerfile similarity index 100% rename from docker/archive/bionic-cp39-gstreamer.dockerfile rename to archived/docker/bionic-cp39-gstreamer.dockerfile diff --git a/docker/archive/bullseye-cp39-gstreamer.dockerfile b/archived/docker/bullseye-cp39-gstreamer.dockerfile similarity index 100% rename from docker/archive/bullseye-cp39-gstreamer.dockerfile rename to archived/docker/bullseye-cp39-gstreamer.dockerfile diff --git a/docker/archive/jp42-nano-cp36-base.dockerfile b/archived/docker/jp42-nano-cp36-base.dockerfile similarity index 100% rename from docker/archive/jp42-nano-cp36-base.dockerfile rename to archived/docker/jp42-nano-cp36-base.dockerfile diff --git a/docker/archive/jp42-nano-cp36-tensorrt.dockerfile b/archived/docker/jp42-nano-cp36-tensorrt.dockerfile similarity index 100% rename from docker/archive/jp42-nano-cp36-tensorrt.dockerfile rename to archived/docker/jp42-nano-cp36-tensorrt.dockerfile diff --git a/docker/archive/jp42-nano-cp36-tf115-opencv440.dockerfile b/archived/docker/jp42-nano-cp36-tf115-opencv440.dockerfile similarity index 100% rename from docker/archive/jp42-nano-cp36-tf115-opencv440.dockerfile rename to archived/docker/jp42-nano-cp36-tf115-opencv440.dockerfile diff --git a/docker/archive/jp42-nano-cp36-tf115.dockerfile b/archived/docker/jp42-nano-cp36-tf115.dockerfile similarity index 100% rename from docker/archive/jp42-nano-cp36-tf115.dockerfile rename to archived/docker/jp42-nano-cp36-tf115.dockerfile diff --git a/docker/archive/pi-cp27-gpio.dockerfile b/archived/docker/pi-cp27-gpio.dockerfile similarity index 100% rename from docker/archive/pi-cp27-gpio.dockerfile rename to archived/docker/pi-cp27-gpio.dockerfile diff --git a/docker/archive/pi-cp37-odrive.dockerfile b/archived/docker/pi-cp37-odrive.dockerfile similarity index 100% rename from docker/archive/pi-cp37-odrive.dockerfile rename to archived/docker/pi-cp37-odrive.dockerfile diff --git a/docker/archive/pure-ftpd.dockerfile b/archived/docker/pure-ftpd.dockerfile similarity index 100% rename from docker/archive/pure-ftpd.dockerfile rename to archived/docker/pure-ftpd.dockerfile diff --git a/docker/archive/tf113-jp43.dockerfile b/archived/docker/tf113-jp43.dockerfile similarity index 100% rename from docker/archive/tf113-jp43.dockerfile rename to archived/docker/tf113-jp43.dockerfile diff --git a/docker/archive/zerotier.dockerfile b/archived/docker/zerotier.dockerfile similarity index 100% rename from docker/archive/zerotier.dockerfile rename to archived/docker/zerotier.dockerfile diff --git a/cli_wrapper.py b/cli_wrapper.py index 2ed2d133..72cd6984 100644 --- a/cli_wrapper.py +++ b/cli_wrapper.py @@ -4,128 +4,184 @@ import subprocess import filecmp -# Optional: For colored output in the terminal -try: - from colorama import Fore, Style - - colorama_installed = True -except ImportError: - colorama_installed = False - -common_folder_name = "BYODR_utils" - - -def compare_common_dirs(dir1, dir2): - differences = [] - dirs_cmp = filecmp.dircmp(dir1, dir2) - # Files that differ - for file_name in dirs_cmp.diff_files: - differences.append(f"Files differ: {os.path.join(dir1, file_name)} and {os.path.join(dir2, file_name)}") - # Files only in dir1 - for file_name in dirs_cmp.left_only: - differences.append(f"File only in original {common_folder_name} folder: {os.path.join(dir1, file_name)}") - # Files only in dir2 - for file_name in dirs_cmp.right_only: - differences.append(f"File only in '{dir2}' folder: {os.path.join(dir2, file_name)}") - # Recursively compare subdirectories - for sub_dir in dirs_cmp.common_dirs: - sub_differences = compare_common_dirs(os.path.join(dir1, sub_dir), os.path.join(dir2, sub_dir)) - differences.extend(sub_differences) - return differences - - -def compare_common_dirs_main(common_dir, dst_common_dir): - differences = compare_common_dirs(common_dir, dst_common_dir) - if differences: - if colorama_installed: - print(Fore.RED + f"Error: {common_folder_name} folder already exists in '{os.path.dirname(dst_common_dir)}', and there are differences between the original and the copied folder." + Style.RESET_ALL) + +class CLIWrapper: + def __init__(self, common_folder_name, target_dir_name, balena_command_args): + """ + Initialize the CLIWrapper with the target directory and Balena command arguments. + + Args: + target_dir_name (str): Name of the target directory. + balena_command_args (list): List of arguments for the Balena CLI command. + """ + self.common_folder_name = common_folder_name + self.target_dir_name = target_dir_name + self.balena_command_args = balena_command_args + + self.current_dir = os.getcwd() + self.target_dir = os.path.join(self.current_dir, self.target_dir_name) + self.common_dir = os.path.join(self.current_dir, self.common_folder_name) + self.dst_common_dir = os.path.join(self.target_dir, self.common_folder_name) + + # Optional colorama for colored output + try: + from colorama import Fore, Style + + self.colorama_installed = True + self.Fore = Fore + self.Style = Style + except ImportError: + self.colorama_installed = False + self.Fore = None + self.Style = None + + def compare_common_dirs(self, dir1, dir2): + """ + Recursively compare two directories and return a list of differences. + + Args: + dir1 (str): Path to the first directory. + dir2 (str): Path to the second directory. + + Returns: + list: A list of differences between the directories. + """ + differences = [] + dirs_cmp = filecmp.dircmp(dir1, dir2) + # Files that differ + for file_name in dirs_cmp.diff_files: + differences.append(f"Files differ: {os.path.join(dir1, file_name)} and {os.path.join(dir2, file_name)}") + # Files only in dir1 + for file_name in dirs_cmp.left_only: + differences.append(f"File only in original {self.common_folder_name} folder: {os.path.join(dir1, file_name)}") + # Files only in dir2 + for file_name in dirs_cmp.right_only: + differences.append(f"File only in '{dir2}' folder: {os.path.join(dir2, file_name)}") + # Recursively compare subdirectories + for sub_dir in dirs_cmp.common_dirs: + sub_differences = self.compare_common_dirs(os.path.join(dir1, sub_dir), os.path.join(dir2, sub_dir)) + differences.extend(sub_differences) + return differences + + def compare_common_dirs_main(self): + """ + Compare the original and destination common directories and decide whether to proceed. + + Returns: + bool: True if it's safe to proceed, False otherwise. + """ + differences = self.compare_common_dirs(self.common_dir, self.dst_common_dir) + if differences: + if self.colorama_installed: + print( + self.Fore.RED + + f"Error: {self.common_folder_name} folder already exists in '{os.path.dirname(self.dst_common_dir)}', and there are differences between the original and the copied folder." + + self.Style.RESET_ALL + ) + else: + print(f"Error: {self.common_folder_name} folder already exists in '{os.path.dirname(self.dst_common_dir)}', and there are differences between the original and the copied folder.") + print("Differences found:") + for diff in differences: + print(diff) + return False # Do not proceed else: - print(f"Error: {common_folder_name} folder already exists in '{os.path.dirname(dst_common_dir)}', and there are differences between the original and the copied folder.") - print("Differences found:") - for diff in differences: - print(diff) - return False # Do not proceed - else: - print(f"Warning: {common_folder_name} folder already exists in '{os.path.dirname(dst_common_dir)}', but the files are the same. Proceeding.") - return True # Proceed - - -def compare_and_handle_on_exit(common_dir, dst_common_dir): - differences = compare_common_dirs(common_dir, dst_common_dir) - if differences: - # Differences found; do not delete the copied 'common' folder - if colorama_installed: - print(Fore.RED + f"Warning: Changes were made to the {common_folder_name} folder in '{os.path.dirname(dst_common_dir)}'. It will not be deleted." + Style.RESET_ALL) + print(f"Warning: {self.common_folder_name} folder already exists in '{os.path.dirname(self.dst_common_dir)}', but the files are the same. Proceeding.") + return True # Proceed + + def compare_and_handle_on_exit(self): + """ + Compare the common directories upon exit and handle the copied folder accordingly. + """ + differences = self.compare_common_dirs(self.common_dir, self.dst_common_dir) + if differences: + # Differences found; do not delete the copied 'common' folder + if self.colorama_installed: + print(self.Fore.RED + f"Warning: Changes were made to the {self.common_folder_name} folder in '{os.path.dirname(self.dst_common_dir)}'. It will not be deleted." + self.Style.RESET_ALL) + else: + print(f"Warning: Changes were made to the {self.common_folder_name} folder in '{os.path.dirname(self.dst_common_dir)}'. It will not be deleted.") + print("Differences found:") + for diff in differences: + print(diff) else: - print(f"Warning: Changes were made to the {common_folder_name} folder in '{os.path.dirname(dst_common_dir)}'. It will not be deleted.") - print("Differences found:") - for diff in differences: - print(diff) - else: - # No differences; safe to delete the copied 'common' folder - if os.path.exists(dst_common_dir): - shutil.rmtree(dst_common_dir) - print(f"Removed {common_folder_name} folder from '{os.path.basename(dst_common_dir)}'.") + # No differences; safe to delete the copied 'common' folder + if os.path.exists(self.dst_common_dir): + shutil.rmtree(self.dst_common_dir) + print(f"Removed {self.common_folder_name} folder from '{os.path.basename(self.dst_common_dir)}'.") + else: + print(f"No {self.common_folder_name} folder to remove from '{os.path.basename(self.dst_common_dir)}'.") + + def copy_common_dir(self): + """ + Copy the common directory to the target directory. + """ + # Remove the existing 'common' folder in target directory if it exists + if os.path.exists(self.dst_common_dir): + shutil.rmtree(self.dst_common_dir) + # Copy the 'common' folder + shutil.copytree(self.common_dir, self.dst_common_dir) + + def run_balena_command(self): + """ + Run the Balena CLI command in the target directory. + """ + # Build the Balena CLI command + balena_command = ["balena"] + self.balena_command_args + # Set the working directory to the target directory + subprocess.run(balena_command, cwd=self.target_dir) + + def execute(self): + """ + Execute the main logic of copying the common directory, running the Balena command, + and handling the common directory upon exit. + """ + # Check if target directory exists + if not os.path.isdir(self.target_dir): + print(f"Error: Target directory '{self.target_dir_name}' does not exist.") + sys.exit(1) + + # Step 1: Check if 'common' directory exists in target directory + if os.path.exists(self.dst_common_dir): + proceed = self.compare_common_dirs_main() + if not proceed: + # Exit the script due to differences + sys.exit(1) else: - print(f"No {common_folder_name} folder to remove from '{os.path.basename(dst_common_dir)}'.") + # No existing 'common_folder_name' folder in the target directory + pass + + try: + # Step 2: Copy the 'common_folder_name' directory into the target directory + self.copy_common_dir() + + # Step 3: Run the Balena CLI command + self.run_balena_command() + except KeyboardInterrupt: + # Handle graceful shutdown (Ctrl+C) + print("\nScript interrupted by user.") + finally: + # Compare and handle the 'common' folder upon exit + self.compare_and_handle_on_exit() -def main(): - # Check if at least two arguments are provided (script name, target directory) +if __name__ == "__main__": + # How to run: python cli_wrapper.py jetson_runtime push 192.168.1.100 --debug + + # Command-line parsing if len(sys.argv) < 2: print("Usage: python cli_wrapper.py [balena_command] [balena_args]") sys.exit(1) - # Get the target directory from the first argument target_dir_name = sys.argv[1] - target_dir = os.path.join(os.getcwd(), target_dir_name) - # Ensure the target directory exists - if not os.path.isdir(target_dir): - print(f"Error: Target directory '{target_dir_name}' does not exist.") - sys.exit(1) - - # Define paths - common_dir = os.path.join(os.getcwd(), common_folder_name) - dst_common_dir = os.path.join(target_dir, common_folder_name) - - # Extract the Balena CLI command and arguments balena_command_args = sys.argv[2:] + common_folder_name = "BYODR_utils" + if not balena_command_args: print("Error: No Balena CLI command provided.") print("Usage: python cli_wrapper.py [balena_command] [balena_args]") sys.exit(1) - # Step 1: Check if 'common' directory exists in target directory - if os.path.exists(dst_common_dir): - proceed = compare_common_dirs_main(common_dir, dst_common_dir) - if not proceed: - # Exit the script due to differences - sys.exit(1) - else: - # No existing 'common_folder_name' folder in the target directory - pass - - try: - # Step 2: Copy the 'common_folder_name' directory into the target directory - # Remove the existing 'common' folder in target directory if it exists - if os.path.exists(dst_common_dir): - shutil.rmtree(dst_common_dir) - # Copy the 'common' folder - shutil.copytree(common_dir, dst_common_dir) + # Create an instance of CLIWrapper + cli_wrapper = CLIWrapper(common_folder_name, target_dir_name, balena_command_args) - # Step 3: Run the Balena CLI command - # Build the Balena CLI command - balena_command = ["balena"] + balena_command_args - - # Set the working directory to the target directory - subprocess.run(balena_command, cwd=target_dir) - except KeyboardInterrupt: - # Handle graceful shutdown (Ctrl+C) - print("\nScript interrupted by user.") - finally: - # Compare and handle the 'common' folder upon exit - compare_and_handle_on_exit(common_dir, dst_common_dir) - - -if __name__ == "__main__": - main() + # Execute the main logic + cli_wrapper.execute() diff --git a/common/byodr/utils/__init__.py b/common/byodr/utils/__init__.py deleted file mode 100644 index 9fba1429..00000000 --- a/common/byodr/utils/__init__.py +++ /dev/null @@ -1,200 +0,0 @@ -from __future__ import absolute_import - -import collections -import logging -import multiprocessing -import signal -import time -import traceback -from abc import ABCMeta, abstractmethod -from cProfile import Profile -from contextlib import contextmanager - -import numpy as np -import six - -from byodr.utils.option import hash_dict - -logger = logging.getLogger(__name__) - - -def timestamp(value=None): - """ - Timestamp as integer to retain precision e.g. when serializing to string. - """ - ts = time.time() if value is None else value - return int(ts * 1e6) - - -def entropy(x, eps=1e-20): - return abs(-np.sum(x * np.log(np.clip(x, eps, 1.0)))) - - -class Profiler(Profile): - """ - Custom Profile class with a __call__() context manager method to enable profiling. - Use: - profiler = Profiler() - with profiler(): - - profiler.dump_stats('prof.stats') - -- - python -c "import pstats; p = pstats.Stats('prof.stats'); p.sort_stats('time').print_stats(50)" - python -c "import pstats; p = pstats.Stats('prof.stats'); p.sort_stats('cumulative').print_stats(50)" - """ - - def __init__(self, *args, **kwargs): - super(Profile, self).__init__(*args, **kwargs) - self.disable() # Profiling initially off. - - @contextmanager - def __call__(self): - self.enable() - yield # Execute code to be profiled. - self.disable() - - -class Configurable(six.with_metaclass(ABCMeta, object)): - def __init__(self): - self._lock = multiprocessing.Lock() - self._errors = [] - self._hash = -1 - self._num_starts = 0 - - # noinspection PyUnusedLocal - @abstractmethod - def internal_start(self, **kwargs): - return [] - - @abstractmethod - def internal_quit(self, restarting=False): - pass - - def get_errors(self): - return self._errors - - def get_num_starts(self): - return self._num_starts - - def is_reconfigured(self, **kwargs): - return self._hash != hash_dict(**kwargs) - - def start(self, **kwargs): - with self._lock: - self._errors = self.internal_start(**kwargs) - self._hash = hash_dict(**kwargs) - self._num_starts += 1 - - def quit(self, restarting=False): - with self._lock: - self.internal_quit(restarting) - - def join(self): - self.quit() - - def restart(self, **kwargs): - _reconfigured = self.is_reconfigured(**kwargs) - if _reconfigured: - if self._num_starts > 0: - self.quit(restarting=True) - self.start(**kwargs) - return _reconfigured - - -class Application(object): - def __init__(self, run_hz=10, quit_event=None): - self.logger = logging.getLogger(__name__) - self._hz = run_hz - self._sleep = 0.100 - self.set_hz(run_hz) - if quit_event is None: - self.quit_event = multiprocessing.Event() - signal.signal(signal.SIGINT, lambda sig, frame: self._interrupt()) - signal.signal(signal.SIGTERM, lambda sig, frame: self._interrupt()) - else: - self.quit_event = quit_event - # Recent window to calculate the actual processing frequency. - self._rt_queue = collections.deque(maxlen=50) - - def _interrupt(self): - self.logger.info("Received interrupt, quitting.") - self.quit() - - @staticmethod - def _latest_or_none(receiver, patience): - candidate = receiver() - _time = candidate.get("time", 0) if candidate is not None else 0 - _on_time = (timestamp() - _time) < patience - return candidate if _on_time else None - - def get_hz(self): - return self._hz - - def get_actual_hz(self): - return (1.0 / np.mean(self._rt_queue)) if self._rt_queue else 0 - - def set_hz(self, hz): - self._hz = hz - self._sleep = 1.0 / hz - - def active(self): - return not self.quit_event.is_set() - - def quit(self): - self.quit_event.set() - - def setup(self): - pass - - def step(self): - pass - - def finish(self): - pass - - def run(self): - try: - self.setup() - while self.active(): - _start = time.time() - self.step() - _duration = time.time() - _start - time.sleep(max(0.0, self._sleep - _duration)) - # Report the actual clock frequency which includes the user specified wait time. - self._rt_queue.append(time.time() - _start) - except Exception as e: - # Quit first to be sure - the traceback may in some cases raise another exception. - self.quit() - self.logger.error(e) - self.logger.error(traceback.format_exc()) - except KeyboardInterrupt: - self.quit() - finally: - self.finish() - - -class ApplicationExit(object): - def __init__(self, event, cb): - self._event = event - self._cb = cb - - def __call__(self, *args, **kwargs): - if self._event.is_set(): - try: - self._cb() - except Exception as e: - logger.info(e) - logger.info(traceback.format_exc()) - - -class PeriodicCallTrace(object): - def __init__(self, seconds=1.0): - self._seconds_micro = seconds * 1e6 - self._last = timestamp() - - def __call__(self, *args, **kwargs): - _callback = args[0] - _now = timestamp() - if _now - self._last > self._seconds_micro: - _callback() - self._last = _now diff --git a/common/byodr/utils/gpio_relay.py b/common/byodr/utils/gpio_relay.py deleted file mode 100644 index 73689f13..00000000 --- a/common/byodr/utils/gpio_relay.py +++ /dev/null @@ -1,43 +0,0 @@ -from __future__ import absolute_import - -import threading - -import Jetson.GPIO as GPIO - - -class ThreadSafeGpioRelay(object): - """ - Thread-safe class for managing a GPIO relay on a Jetson Nano. - """ - - def __init__(self, pin=15): - self.pin = pin - self.state = False # False for OFF, True for ON - self.lock = threading.Lock() - GPIO.setmode(GPIO.BOARD) # Set the pin numbering system to BOARD - GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.LOW) - - def open(self): - """Turns the relay ON (sets the GPIO pin LOW).""" - with self.lock: - GPIO.output(self.pin, GPIO.LOW) - print("opened the relay") - self.state = False - - def close(self): - """Turns the relay OFF (sets the GPIO pin HIGH).""" - with self.lock: - GPIO.output(self.pin, GPIO.HIGH) - print("closed the relay") - self.state = True - - def toggle(self): - """Toggles the relay state.""" - with self.lock: - self.state = not self.state - GPIO.output(self.pin, GPIO.LOW if self.state else GPIO.HIGH) - - def states(self): - """Returns the current state of the relay.""" - with self.lock: - return self.state diff --git a/common/byodr/utils/ipc.py b/common/byodr/utils/ipc.py deleted file mode 100644 index 9812bbe1..00000000 --- a/common/byodr/utils/ipc.py +++ /dev/null @@ -1,309 +0,0 @@ -from __future__ import absolute_import - -import collections -import datetime -import json -import logging -import multiprocessing -import os -import sys -import threading -import time - -import numpy as np -import zmq - -from byodr.utils import timestamp - -if sys.version_info > (3,): - # noinspection PyShadowingBuiltins - buffer = memoryview - - def receive_string(subscriber): - return subscriber.recv_string() - - def send_string(sender, val, flags=0): - return sender.send_string(val, flags) - -else: - - def receive_string(subscriber): - return subscriber.recv() - - def send_string(sender, val, flags=0): - return sender.send(val, flags) - - -logger = logging.getLogger(__name__) - - -class JSONPublisher(object): - def __init__(self, url, topic="", hwm=1, clean_start=True): - if clean_start and url.startswith("ipc://") and os.path.exists(url[6:]): - os.remove(url[6:]) - publisher = zmq.Context().socket(zmq.PUB) - publisher.set_hwm(hwm) - publisher.bind(url) - self._publisher = publisher - self._topic = topic - - def publish(self, data, topic=None): - _topic = self._topic if topic is None else topic - if data is not None: - data = dict((k, v) for k, v in data.items() if v is not None) - send_string(self._publisher, "{}:{}".format(_topic, json.dumps(data)), zmq.NOBLOCK) - - -class ImagePublisher(object): - def __init__(self, url, topic="", hwm=1, clean_start=True): - if clean_start and url.startswith("ipc://") and os.path.exists(url[6:]): - os.remove(url[6:]) - publisher = zmq.Context().socket(zmq.PUB) - publisher.set_hwm(hwm) - publisher.bind(url) - self._publisher = publisher - self._topic = topic.encode("utf-8") # Encode the topic to bytes at initialization - - def publish(self, _img, topic=None): - _topic = self._topic if topic is None else topic.encode("utf-8") - # json.dumps(...) returns a string, it needs to be encoded into bytes. - self._publisher.send_multipart( - [ - _topic, - json.dumps(dict(time=timestamp(), shape=_img.shape)).encode("utf-8"), - np.ascontiguousarray(_img, dtype=np.uint8), - ], - flags=zmq.NOBLOCK, - ) - - -class JSONReceiver(object): - def __init__(self, url, topic=b"", hwm=1, receive_timeout_ms=2, pop=False): - subscriber = zmq.Context().socket(zmq.SUB) - subscriber.set_hwm(hwm) - subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - subscriber.setsockopt(zmq.LINGER, 0) - subscriber.connect(url) - subscriber.setsockopt(zmq.SUBSCRIBE, topic) - self._pop = pop - self._unpack = hwm == 1 - self._subscriber = subscriber - self._lock = threading.Lock() - self._queue = collections.deque(maxlen=hwm) - - def consume(self): - with self._lock: - try: - # Does not replace local queue messages when none are available. - self._queue.appendleft(json.loads(receive_string(self._subscriber).split(":", 1)[1])) - except zmq.Again: - pass - - def get(self): - _view = self._queue[0] if (self._queue and self._unpack) else list(self._queue) if self._queue else None - if self._pop: - self._queue.clear() - return _view - - def peek(self): - return self._queue[0] if self._queue else None - - -class CollectorThread(threading.Thread): - def __init__(self, receivers, event=None, hz=1000): - super(CollectorThread, self).__init__() - _list = isinstance(receivers, tuple) or isinstance(receivers, list) - self._receivers = receivers if _list else [receivers] - self._quit_event = multiprocessing.Event() if event is None else event - self._sleep = 1.0 / hz - - def get(self, index=0): - # Get the latest message without blocking. - # _receiver.consume() -- blocks; perform at thread.run() - return self._receivers[index].get() - - def peek(self, index=0): - return self._receivers[index].peek() - - def quit(self): - self._quit_event.set() - - def run(self): - while not self._quit_event.is_set(): - # Empty the receiver queues to not block upstream senders. - list(map(lambda receiver: receiver.consume(), self._receivers)) - time.sleep(self._sleep) - - -def json_collector(url, topic, event, receive_timeout_ms=1000, hwm=1, pop=False): - return CollectorThread(JSONReceiver(url, topic, hwm=hwm, receive_timeout_ms=receive_timeout_ms, pop=pop), event=event) - - -class ReceiverThread(threading.Thread): - def __init__(self, url, event=None, topic=b"", hwm=1, receive_timeout_ms=1): - super(ReceiverThread, self).__init__() - subscriber = zmq.Context().socket(zmq.SUB) - subscriber.set_hwm(hwm) - subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - subscriber.setsockopt(zmq.LINGER, 0) - subscriber.connect(url) - subscriber.setsockopt(zmq.SUBSCRIBE, topic) - self._subscriber = subscriber - self._quit_event = multiprocessing.Event() if event is None else event - self._queue = collections.deque(maxlen=1) - self._listeners = [] - - def add_listener(self, c): - self._listeners.append(c) - - def get_latest(self): - return self._queue[0] if bool(self._queue) else None - - def pop_latest(self): - return self._queue.popleft() if bool(self._queue) else None - - def quit(self): - self._quit_event.set() - - def run(self): - while not self._quit_event.is_set(): - try: - _latest = json.loads(receive_string(self._subscriber).split(":", 1)[1]) - self._queue.appendleft(_latest) - list(map(lambda x: x(_latest), self._listeners)) - except zmq.Again: - pass - - -class CameraThread(threading.Thread): - def __init__(self, url, event, topic=b"", hwm=1, receive_timeout_ms=25): - super(CameraThread, self).__init__() - subscriber = zmq.Context().socket(zmq.SUB) - subscriber.set_hwm(hwm) - subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - subscriber.setsockopt(zmq.LINGER, 0) - subscriber.connect(url) - subscriber.setsockopt(zmq.SUBSCRIBE, topic) - self._subscriber = subscriber - self._quit_event = event - self._images = collections.deque(maxlen=1) - - def capture(self): - return self._images[0] if bool(self._images) else (None, None) - - def run(self): - while not self._quit_event.is_set(): - try: - [_, md, data] = self._subscriber.recv_multipart() - md = json.loads(md) - height, width, channels = md["shape"] - img = np.frombuffer(buffer(data), dtype=np.uint8) - img = img.reshape((height, width, channels)) - self._images.appendleft((md, img)) - except ValueError as e: - logger.warning(e) - except zmq.Again: - pass - - -class JSONServerThread(threading.Thread): - def __init__(self, url, event, hwm=1, receive_timeout_ms=50): - super(JSONServerThread, self).__init__() - server = zmq.Context().socket(zmq.REP) - server.set_hwm(hwm) - server.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - server.setsockopt(zmq.LINGER, 0) - server.bind(url) - self._server = server - self._quit_event = event - self._queue = collections.deque(maxlen=1) - self._listeners = [] - - def add_listener(self, c): - self._listeners.append(c) - - def on_message(self, message): - self._queue.appendleft(message) - list(map(lambda x: x(message), self._listeners)) - - def get_latest(self): - return self._queue[0] if bool(self._queue) else None - - def pop_latest(self): - return self._queue.popleft() if bool(self._queue) else None - - def serve(self, request): - return {} - - def run(self): - while not self._quit_event.is_set(): - try: - message = json.loads(receive_string(self._server)) - self.on_message(message) - send_string(self._server, json.dumps(self.serve(message))) - except zmq.Again: - pass - - -class LocalIPCServer(JSONServerThread): - def __init__(self, name, url, event, receive_timeout_ms=50): - super(LocalIPCServer, self).__init__(url, event, receive_timeout_ms) - self._name = name - self._m_startup = collections.deque(maxlen=1) - self._m_capabilities = collections.deque(maxlen=1) - - def register_start(self, errors, capabilities=None): - capabilities = {} if capabilities is None else capabilities - self._m_startup.append((datetime.datetime.utcnow().strftime("%b %d %H:%M:%S.%s UTC"), errors)) - self._m_capabilities.append(capabilities) - - def serve(self, message): - try: - if message.get("request") == "system/startup/list" and self._m_startup: - ts, errors = self._m_startup[-1] - messages = ["No errors"] - if errors: - d_errors = dict() # Merge to obtain distinct keys. - [d_errors.update({error.key: error.message}) for error in errors] - messages = ["{} - {}".format(k, d_errors[k]) for k in d_errors.keys()] - return {self._name: {ts: messages}} - elif message.get("request") == "system/service/capabilities" and self._m_capabilities: - return {self._name: self._m_capabilities[-1]} - except IndexError: - pass - return {} - - -class JSONZmqClient(object): - def __init__(self, urls, hwm=1, receive_timeout_ms=200): - self._urls = urls if isinstance(urls, list) else [urls] - self._receive_timeout = receive_timeout_ms - self._context = None - self._socket = None - self._hwm = hwm - self._create(self._urls) - - def _create(self, locations): - context = zmq.Context() - socket = context.socket(zmq.REQ) - socket.set_hwm(self._hwm) - socket.setsockopt(zmq.RCVTIMEO, self._receive_timeout) - socket.setsockopt(zmq.LINGER, 0) - [socket.connect(location) for location in locations] - self._context = context - self._socket = socket - - def quit(self): - if self._context is not None: - self._context.destroy() - - def call(self, message): - ret = {} - for i in range(len(self._urls)): - try: - send_string(self._socket, json.dumps(message), zmq.NOBLOCK) - ret.update(json.loads(receive_string(self._socket))) - except zmq.ZMQError: - j = i + 1 - self._create(self._urls[j:] + self._urls[:j]) - return ret diff --git a/common/byodr/utils/navigate.py b/common/byodr/utils/navigate.py deleted file mode 100644 index 96e40786..00000000 --- a/common/byodr/utils/navigate.py +++ /dev/null @@ -1,373 +0,0 @@ -from __future__ import absolute_import - -import glob -import json -import logging -import multiprocessing -import os -import threading -from abc import ABCMeta, abstractmethod - -from byodr.utils import timestamp - -logger = logging.getLogger(__name__) - - -def _translate_navigation_direction(value): - if value is not None: - value = value.lower() - if value == "left": - return NavigationCommand.LEFT - elif value == "right": - return NavigationCommand.RIGHT - elif value == "ahead": - return NavigationCommand.AHEAD - elif value == "default": - return NavigationCommand.DEFAULT - # No change in direction. - return None - - -class NavigationCommand(object): - DEFAULT, LEFT, AHEAD, RIGHT = (0, 1, 2, 3) - - def __init__(self, sleep=None, direction=None, speed=None): - self._time = None - self._sleep = sleep - self._direction = direction - self._speed = speed - - def get_time(self): - return self._time - - def set_time(self, value): - self._time = value - return self - - def get_sleep(self): - return self._sleep - - def get_direction(self): - return self._direction - - def get_speed(self): - return self._speed - - -class NavigationInstructions(object): - def __init__(self, version=1, commands=None): - self._version = version - commands = commands or [] - if not isinstance(commands, tuple) and not isinstance(commands, list): - commands = [commands] - self._commands = commands - - def get_commands(self): - return self._commands - - -def _parse_navigation_instructions(m): - """ - { - "version": 1, - "pilot": {"direction": "ahead" } - } - - { - "version": 1, - "pilot": [{"speed": 0}, {"sleep": 30, "direction": "left", "speed": 1}] - } - """ - - version = m.get("version", 1) - commands = [] - pilot = m.get("pilot") - if pilot is not None: - nodes = pilot if isinstance(pilot, list) else [pilot] - for node in nodes: - commands.append( - NavigationCommand( - sleep=None if node.get("sleep") is None else float(node.get("sleep")), - direction=_translate_navigation_direction(node.get("direction")), - speed=None if node.get("speed") is None else float(node.get("speed")), - ) - ) - return NavigationInstructions(version, commands) - - -class AbstractRouteDataSource(object): - __metaclass__ = ABCMeta - - @abstractmethod - def __len__(self): - raise NotImplementedError() - - @abstractmethod - def load_routes(self): - raise NotImplementedError() - - @abstractmethod - def list_routes(self): - raise NotImplementedError() - - @abstractmethod - def get_selected_route(self): - raise NotImplementedError() - - @abstractmethod - def open(self, route_name=None): - raise NotImplementedError() - - @abstractmethod - def is_open(self): - raise NotImplementedError() - - @abstractmethod - def close(self): - raise NotImplementedError() - - @abstractmethod - def quit(self): - raise NotImplementedError() - - @abstractmethod - def list_navigation_points(self): - raise NotImplementedError() - - @abstractmethod - def has_navigation_point(self, route, point): - raise NotImplementedError() - - @abstractmethod - def list_all_images(self): - raise NotImplementedError() - - @abstractmethod - def get_image(self, image_id): - raise NotImplementedError() - - @abstractmethod - def get_image_navigation_point(self, idx): - raise NotImplementedError() - - @abstractmethod - def get_image_navigation_point_id(self, idx): - raise NotImplementedError() - - @abstractmethod - def get_instructions(self, point): - raise NotImplementedError() - - -class FileSystemRouteDataSource(AbstractRouteDataSource): - - def __init__(self, directory, fn_load_image=(lambda x: x), load_instructions=True): - self.directory = directory - self.fn_load_image = fn_load_image - self.load_instructions = load_instructions - self.quit_event = multiprocessing.Event() - self._load_timestamp = 0 - self.routes = [] - self.selected_route = None - # Route specific data follows. - self.points = [] - self.all_images = [] - self.image_index_to_point = {} - self.image_index_to_point_id = {} - self.point_to_instructions = {} - self._check_exists() - - def _check_exists(self): - directory = self.directory - self._exists = directory is not None and os.path.exists(directory) and os.path.isdir(directory) - - def _reset(self): - self.selected_route = None - self.points = [] - self.all_images = [] - self.image_index_to_point = {} - self.image_index_to_point_id = {} - self.point_to_instructions = {} - self.quit_event.clear() - - def load_routes(self): - self._check_exists() - if not self._exists: - self._reset() - else: - _now = timestamp() # In micro seconds. - if _now - self._load_timestamp > 1e6: - # Each route is a sub-directory of the base folder. - self.routes = [d for d in os.listdir(self.directory) if not d.startswith(".")] - self._load_timestamp = _now - logger.info("Directory '{}' contains the following routes {}.".format(self.directory, self.routes)) - - @staticmethod - def _get_command(fname): - try: - with open(fname) as f: - return json.load(f) - except IOError: - return {} - - def __len__(self): - # Zero when no route selected. - return len(self.points) - - def list_routes(self): - return self.routes - - def get_selected_route(self): - return self.selected_route - - def open(self, route_name=None): - # Reopening the selected route constitutes a reload of the disk state. - self._reset() - if self._exists and route_name in self.routes: - try: - # Load the route navigation points. - _route_directory = os.path.join(self.directory, route_name) - if os.path.exists(_route_directory) and os.path.isdir(_route_directory): - np_dirs = sorted([d for d in os.listdir(_route_directory) if not d.startswith(".")]) - logger.info("{} -> {}".format(route_name, np_dirs)) - # Take the existing sort-order. - image_id = 0 - point_id = 0 # Cannot enumerate as points without images must be skipped. - for point_name in np_dirs: - if self.quit_event.is_set(): - break - np_dir = os.path.join(self.directory, route_name, point_name) - _pattern = np_dir + os.path.sep - im_files = sorted([f for f_ in [glob.glob(_pattern + e) for e in ("*.jpg", "*.jpeg")] for f in f_]) - if len(im_files) < 1: - logger.info("Skipping point '{}' as there are no images for it.".format(point_name)) - continue - if self.load_instructions: - contents = self._get_command(os.path.join(np_dir, "command.json")) - contents = contents if contents else self._get_command(os.path.join(np_dir, point_name + ".json")) - self.point_to_instructions[point_name] = _parse_navigation_instructions(contents) - # Collect images by navigation point. - for im_file in im_files: - self.all_images.append(self.fn_load_image(im_file)) - self.image_index_to_point[image_id] = point_name - self.image_index_to_point_id[image_id] = point_id - image_id += 1 - # Accept the point. - self.points.append(point_name) - point_id += 1 - self.selected_route = route_name - except OSError as e: - logger.info(e) - - def is_open(self): - return self.selected_route in self.routes - - def close(self): - self._reset() - - def quit(self): - self.quit_event.set() - - def list_navigation_points(self): - return self.points - - def has_navigation_point(self, route, point): - _dir = os.path.join(self.directory, route, point) - return os.path.exists(_dir) and os.path.isdir(_dir) - - def list_all_images(self): - return self.all_images - - def get_image(self, image_id): - image_id = -1 if image_id is None else image_id - images = self.list_all_images() - return images[image_id] if len(images) > image_id >= 0 else None - - def get_image_navigation_point(self, idx): - return self.image_index_to_point[idx] - - def get_image_navigation_point_id(self, idx): - return self.image_index_to_point_id[idx] - - def get_instructions(self, point): - return self.point_to_instructions.get(point) - - -class ReloadableDataSource(AbstractRouteDataSource): - def __init__(self, delegate): - self._delegate = delegate - self._lock = threading.Lock() - # Cache the most recent selected route. - self._last_listed_routes = [] - self._last_selected_route = None - - def _do_safe(self, fn): - _acquired = self._lock.acquire(False) - try: - return fn(_acquired) - finally: - if _acquired: - self._lock.release() - - def __len__(self): - return self._do_safe(lambda acquired: len(self._delegate) if acquired else 0) - - def load_routes(self): - with self._lock: - self._delegate.load_routes() - - def list_routes(self): - _acquired = self._lock.acquire(False) - try: - if _acquired: - self._last_listed_routes = self._delegate.list_routes() - return self._last_listed_routes - finally: - if _acquired: - self._lock.release() - - def get_selected_route(self): - _acquired = self._lock.acquire(False) - try: - if _acquired: - self._last_selected_route = self._delegate.get_selected_route() - return self._last_selected_route - finally: - if _acquired: - self._lock.release() - - def open(self, route_name=None): - with self._lock: - self._delegate.open(route_name) - - def is_open(self): - return self._do_safe(lambda acquired: self._delegate.is_open() if acquired else False) - - def close(self): - with self._lock: - self._delegate.close() - - def quit(self): - with self._lock: - self._delegate.quit() - - def list_navigation_points(self): - return self._do_safe(lambda acquired: self._delegate.list_navigation_points() if acquired else []) - - def has_navigation_point(self, route, point): - return self._do_safe(lambda acquired: self._delegate.has_navigation_point(route, point) if acquired else False) - - def list_all_images(self): - return self._do_safe(lambda acquired: self._delegate.list_all_images() if acquired else []) - - def get_image(self, image_id): - return self._do_safe(lambda acquired: self._delegate.get_image(image_id) if acquired else None) - - def get_image_navigation_point(self, idx): - return self._do_safe(lambda acquired: self._delegate.get_image_navigation_point(idx) if acquired else None) - - def get_image_navigation_point_id(self, idx): - return self._do_safe(lambda acquired: self._delegate.get_image_navigation_point_id(idx) if acquired else None) - - def get_instructions(self, point): - return self._do_safe(lambda acquired: self._delegate.get_instructions(point) if acquired else None) diff --git a/common/byodr/utils/protocol.py b/common/byodr/utils/protocol.py deleted file mode 100644 index 712b3b28..00000000 --- a/common/byodr/utils/protocol.py +++ /dev/null @@ -1,58 +0,0 @@ -from __future__ import absolute_import - -from byodr.utils import timestamp - - -class MessageStreamProtocol(object): - """ - Safety: - Protocol uses 2 timestamps, remote and local, and does not require the clocks to be synced. - Local means receiver side so incoming messages. - Because the clocks are not synced remote and local timestamps are not directly comparable. - Timestamps: - 1. remote as reported by the sender - 2. local as recorded by the receiver - - - The protocol can be validated or invalidated. - There is a warm-up period with invalidated protocol, after system reboot. - - - The incoming stream needs to be continuous (or uninterrupted) and recent (timely). - Continuity violation - Age violation - """ - - def __init__(self, max_age_ms=200, max_delay_ms=250): - self._max_age_micro = max_age_ms * 1000.0 - self._max_delay_micro = max_delay_ms * 1000.0 - # There is currently no distinction in violation types. - self._n_violations = 0 - self._last_message_time = 0 - self._last_protocol_time = 0 - - def _violation(self): - self._n_violations = 1 if self._n_violations < 1 else min(1e4, self._n_violations + 1) - - def _success(self): - self._n_violations = 0 if self._n_violations > 0 else max(-1e4, self._n_violations - 1) - - def reset(self): - self._n_violations = 0 - self._last_message_time = 0 - self._last_protocol_time = 0 - - def on_message(self, message_timestamp_micro): - # This is our time in microseconds. - local_time = timestamp() - if local_time - self._last_protocol_time > self._max_delay_micro: - self._violation() - elif message_timestamp_micro - self._last_message_time > self._max_age_micro: - self._violation() - else: - self._success() - self._last_message_time = message_timestamp_micro - self._last_protocol_time = local_time - - def check(self): - if timestamp() - self._last_protocol_time > self._max_delay_micro: - self._violation() - return self._n_violations diff --git a/common/byodr/utils/ssh.py b/common/byodr/utils/ssh.py deleted file mode 100644 index da8ca691..00000000 --- a/common/byodr/utils/ssh.py +++ /dev/null @@ -1,146 +0,0 @@ -# TESTED AND WORKING ON -# Firmware version :RUT9_R_00.07.06.1 -# Firmware build date: 2024-01-02 11:11:13 -# Internal modem firmware version: SLM750_4.0.6_EQ101 -# Kernel version: 5.4.259 - - -import logging -import subprocess -import time -import traceback - -import paramiko - -# Declaring the logger -logging.basicConfig(format="%(levelname)s: %(asctime)s %(filename)s %(funcName)s %(lineno)d %(message)s", datefmt="%Y-%m-%d %H:%M:%S %p") - -logging.getLogger().setLevel(logging.INFO) -logger = logging.getLogger(__name__) - -paramiko_logger = logging.getLogger("paramiko") -paramiko_logger.setLevel(logging.CRITICAL) - - -class Router: - def __init__(self, ip=None, username="root", password="Modem001", port=22): - self.ip = ip if ip is not None else self.__get_nano_third_octet() - self.username = username - self.password = password - self.port = int(port) # Default value for SSH port - self.client = None - self.__open_ssh_connection() - - def __get_nano_third_octet(self): - try: - # Fetch the IP address - ip_address = subprocess.check_output("hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", shell=True).decode().strip().split()[0] - - # Trim off the last segment of the IP address - parts = ip_address.split(".") - network_prefix = ".".join(parts[:3]) + "." - router_ip = f"{network_prefix}1" - return router_ip - except subprocess.CalledProcessError as e: - print(f"An error occurred: {e}") - return None - - def __open_ssh_connection(self): - """ - Opens an SSH connection to the router. - """ - try: - self.client = paramiko.SSHClient() - self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - self.client.connect(self.ip, self.port, self.username, self.password) - except Exception as e: - logger.error(f"Failed to open SSH connection: {e}") - self.client = None - - def _execute_ssh_command(self, command, ip=None, file_path=None, file_contents=None, suppress_error_log=False): - """ - Executes a command on the router via SSH and returns the result. - Optionally, can write to a file on the router using SFTP. - """ - router_ip = ip if ip is not None else self.ip - temp_client = None - - try: - if router_ip != self.ip: - # Establish a temporary connection for a different router - temp_client = paramiko.SSHClient() - temp_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - temp_client.connect(router_ip, self.port, self.username, self.password) - client = temp_client - else: - # Check and use the persistent connection for the primary router - if not self.client or not self.client.get_transport() or not self.client.get_transport().is_active(): - self.__open_ssh_connection() - client = self.client - - if file_path and file_contents is not None: - # Handle SFTP file write operation - with client.open_sftp() as sftp: - with sftp.file(file_path, "w") as file: - file.write(file_contents) - # No command output in case of SFTP operation - return None - - # Execute the SSH command - stdin, stdout, stderr = client.exec_command(command) - result = stdout.read().decode().strip() - error = stderr.read().decode().strip() - - if error: - raise Exception(error) - - return result - - except Exception as e: - if not suppress_error_log: - # Log the error - caller = traceback.extract_stack(None, 2)[0][2] - logger.info(f"Error occurred in {caller}: {e}") - return None - - finally: - # Close the temporary client if it was used - if router_ip != self.ip and temp_client: - temp_client.close() - - def __close_ssh_connection(self): - """ - Closes the SSH connection to the router. - """ - if self.client: - self.client.close() - self.client = None - - def fetch_ssid(self): - """Get SSID of current segment""" - output = None - # The loop is to keep calling the ssh function until it returns a value - while output is None: - output = self._execute_ssh_command("uci get wireless.@wifi-iface[0].ssid", suppress_error_log=True) - if output is None: - time.sleep(1) - return output - - -class Nano: - @staticmethod - def get_ip_address(): - try: - ip_addresses = ( - subprocess.check_output( - "hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", - shell=True, - ) - .decode() - .strip() - ) - # Split in case there are multiple local IP addresses - return ip_addresses - except subprocess.CalledProcessError as e: - print(f"An error occurred: {e}") - return None diff --git a/common/byodr/utils/usbrelay.py b/common/byodr/utils/usbrelay.py deleted file mode 100644 index 04b3f8e2..00000000 --- a/common/byodr/utils/usbrelay.py +++ /dev/null @@ -1,252 +0,0 @@ -from __future__ import absolute_import - -import logging -import multiprocessing -import time - -import usb.core -import usb.util -from usb.util import CTRL_IN, CTRL_OUT, CTRL_TYPE_VENDOR - -logger = logging.getLogger(__name__) - - -class SingleChannelUsbRelay(object): - """ - HALJIA USB-relaismodule USB Smart Control Switch Intelligent Switch Control USB Relais module - """ - - def __init__(self, vendor=0x1A86, product=0x7523): - self._vendor = vendor - self._product = product - self._device = None - self._endpoint = None - - def attach(self): - self._device = usb.core.find(idVendor=self._vendor, idProduct=self._product) - if self._device is None: - logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) - return - - try: - if self._device.is_kernel_driver_active(0): - self._device.detach_kernel_driver(0) - - _config = self._device.get_active_configuration() - _intf = _config[(0, 0)] - - self._endpoint = usb.util.find_descriptor( - _intf, - # match the first OUT endpoint - custom_match=(lambda _e: usb.util.endpoint_direction(_e.bEndpointAddress) == usb.util.ENDPOINT_OUT), - ) - - if self._endpoint is None: - logger.error("Endpoint not found.") - except Exception as e: - logger.error(e) - - def open(self): - if self._endpoint is not None: - self._endpoint.write([0xA0, 0x01, 0x00, 0xA1]) - - def close(self): - if self._endpoint is not None: - self._endpoint.write([0xA0, 0x01, 0x01, 0xA2]) - - -class DoubleChannelUsbRelay(object): - """ - ICQUANZX SRD-05VDC-SL-C 2-way - """ - - def __init__(self, vendor=0x16C0, product=0x05DF): - self._vendor = vendor - self._product = product - self._device_on = [[0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], [0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]] - self._device_off = [[0xFC, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], [0xFC, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]] - self._device = None - - def find(self): - return usb.core.find(idVendor=self._vendor, idProduct=self._product) - - def poll(self): - return self.find() is not None - - def attach(self): - self._device = self.find() - self._device = usb.core.find(idVendor=self._vendor, idProduct=self._product) - if self._device is None: - logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) - return - - try: - if self._device.is_kernel_driver_active(0): - self._device.detach_kernel_driver(0) - self._device.set_configuration() - except Exception as e: - logger.error(e) - - def is_attached(self): - return self._device is not None - - def open(self, channel=0): - assert self.is_attached(), "The device is not attached." - self._device.ctrl_transfer(0x21, 0x09, 0x0300, 0x0000, "".join(chr(n) for n in self._device_off[channel]), 1000) - - def close(self, channel=0): - assert self.is_attached(), "The device is not attached." - self._device.ctrl_transfer(0x21, 0x09, 0x0300, 0x0000, "".join(chr(n) for n in self._device_on[channel]), 1000) - - -class TransientMemoryRelay(object): - """Fake class, I suppose""" - - def __init__(self, num_channels=4): - self._state = [0] * num_channels - - def open(self, channel=0): - self._state[channel] = 0 - - def close(self, channel=0): - self._state[channel] = 1 - - def states(self): - return [bool(x) for x in self._state] - - -class FourChannelUsbRelay(object): - """ - Conrad Components 393905 Relay Module 5 V/DC - Conrad article 393905 - Conrad supplier 393905 - EAN: 4016138810585 - Type: CP210x - """ - - MAX_GPIO_INDEX = 4 - - CP210X_VENDOR_ID = 0x10C4 - CP210X_PRODUCT_ID = 0xEA60 - - CP210X_REQUEST_TYPE_READ = CTRL_IN | CTRL_TYPE_VENDOR - CP210X_REQUEST_TYPE_WRITE = CTRL_OUT | CTRL_TYPE_VENDOR - - CP210X_REQUEST_VENDOR = 0xFF - - CP210X_VALUE_READ_LATCH = 0x00C2 - CP210X_VALUE_WRITE_LATCH = 0x37E1 - - def __init__(self, vendor=CP210X_VENDOR_ID, product=CP210X_PRODUCT_ID): - """ - Adapted from https://github.com/jjongbloets/CP210xControl/blob/master/CP210xControl/model.py. - """ - self._vendor = vendor - self._product = product - self._device = None - - def find(self): - return usb.core.find(idVendor=self._vendor, idProduct=self._product) - - def poll(self): - return self.find() is not None - - def attach(self): - self._device = self.find() - if self._device is None: - logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) - return - - try: - if self._device.is_kernel_driver_active(0): - self._device.detach_kernel_driver(0) - self._device.set_configuration() - except Exception as e: - logger.error(e) - - def is_attached(self): - return self._device is not None - - def _query(self, request, value, index, length): - assert self.is_attached(), "The device is not attached." - return self._device.ctrl_transfer(self.CP210X_REQUEST_TYPE_READ, request, value, index, length) - - def _write(self, request, value, index, data): - assert self.is_attached(), "The device is not attached." - return self._device.ctrl_transfer(self.CP210X_REQUEST_TYPE_WRITE, request, value, index, data) - - def _set_gpio(self, index, value): - mask = 1 << index - values = (0 if value else 1) << index - msg = (values << 8) | mask - return self._write(self.CP210X_REQUEST_VENDOR, self.CP210X_VALUE_WRITE_LATCH, msg, 0) - - def _get_gpio_states(self): - results = [] - response = self._query(self.CP210X_REQUEST_VENDOR, self.CP210X_VALUE_READ_LATCH, 0, 1) - if len(response) > 0: - response = response[0] - for idx in range(self.MAX_GPIO_INDEX): - results.append((response & (1 << idx)) == 0) - return results - - def open(self, channel=0): - self._set_gpio(channel, 0) - - def close(self, channel=0): - self._set_gpio(channel, 1) - - def states(self): - return self._get_gpio_states() - - -class SearchUsbRelayFactory(object): - def __init__(self): - _relay = FourChannelUsbRelay() - # The others are not supported until they expose a read state method. - # if not _relay.poll(): - # _relay = DoubleChannelUsbRelay() - _relay.attach() - self._relay = _relay - - def get_relay(self): - return self._relay - - -class StaticRelayHolder(object): - def __init__(self, relay, default_channels=(0,)): - self._relay = relay - self._default_channels = self._tup_or_li(default_channels) - self._pulse_channels = () - self._lock = multiprocessing.Lock() - - @staticmethod - def _tup_or_li(arg): - return arg if isinstance(arg, tuple) or isinstance(arg, list) else (arg,) - - def _arg_(self, ch=None): - return self._default_channels if ch is None else self._tup_or_li(ch) - - def set_pulse_channels(self, channels): - with self._lock: - self._pulse_channels = self._tup_or_li(channels) - - def open(self, channels=None): - with self._lock: - [self._relay.open() for ch in self._arg_(channels)] - - def close(self, channels=None): - with self._lock: - for ch in self._arg_(channels): - self._relay.close() - if ch in self._pulse_channels: - time.sleep(0.100) - self._relay.open() - - def states(self): - with self._lock: - return self._relay.states() - - def pulse_config(self): - with self._lock: - return [i in self._pulse_channels for i in range(len(self._relay.states()))] diff --git a/docker-compose.carla.yml b/docker-compose.carla.yml deleted file mode 100644 index 102bd25d..00000000 --- a/docker-compose.carla.yml +++ /dev/null @@ -1,64 +0,0 @@ -version: '2' -services: - zerotier: - image: rwgrim/docker-noop:latest - restart: 'no' - command: ["/bin/true"] - wireguard: - image: rwgrim/docker-noop:latest - restart: 'no' - command: ["/bin/true"] - httpd: - restart: 'no' - volumes: - - ./common:/common - - ./httpd:/app - ftpd: - restart: 'no' - volumes: - - ./common:/common - - ./ftpd:/app - rosnode: - restart: 'no' - volumes: - - ./common:/common - - ./rosnode:/app - mongodb: - restart: 'no' - volumes: - - ./common:/common - - ./mongodb:/app - vehicle: - # Need extra cpu resources to run the h264 encoder sockets. - cpuset: "1,4,5" - build: - context: . - dockerfile: vehicles/carla09/Dockerfile - image: centipede2donald/byodr-ce:carla-099 - restart: 'no' - network_mode: host - volumes: - - ./common:/common - - ./vehicles/carla09:/app - teleop: - restart: 'no' - volumes: - - ./common:/common - - ./teleop:/app - pilot: - restart: 'no' - volumes: - - ./common:/common - - ./pilot:/app - inference: - build: - context: . - dockerfile: inference/runtime-cp36-x86.dockerfile - image: centipede2donald/byodr-ce:inference-carla - restart: 'no' - ipc: host - environment: - - NVIDIA_VISIBLE_DEVICES=all - volumes: - - ./common:/common - - ./inference:/app diff --git a/docker-compose.override.yml b/docker-compose.override.yml deleted file mode 100644 index 68fcda94..00000000 --- a/docker-compose.override.yml +++ /dev/null @@ -1,5 +0,0 @@ -version: '3' # or whatever version you're using - -services: - pilot: - cpuset: '' # Clearing the CPU set constraint for testing diff --git a/docker-compose.test.yml b/docker-compose.test.yml deleted file mode 100644 index e27e9a69..00000000 --- a/docker-compose.test.yml +++ /dev/null @@ -1,53 +0,0 @@ -version: '2' -services: - zerotier: - image: rwgrim/docker-noop:latest - restart: 'no' - command: ["/bin/true"] - httpd: - image: rwgrim/docker-noop:latest - restart: 'no' - command: ["/bin/true"] - ftpd: - image: rwgrim/docker-noop:latest - restart: 'no' - command: ["/bin/true"] - rosnode: - image: rwgrim/docker-noop:latest - restart: 'no' - command: ["/bin/true"] - raspi: - build: - context: . - dockerfile: raspi/Dockerfile - image: centipede2donald/byodr-ce:servos - user: root - restart: 'no' - command: ["python", "-m", "pytest", "-vvv", "tests.py"] - volumes: - - ./common:/common - - ./raspi:/app - vehicle: - restart: 'no' - command: ["python", "-m", "pytest", "-vvv", "tests_rover.py"] - volumes: - - ./common:/common - - ./vehicles/rover:/app - teleop: - restart: 'no' - command: ["python", "-m", "pytest", "-vvv", "tests.py"] - volumes: - - ./common:/common - - ./teleop:/app - pilot: - restart: 'no' - command: ["python", "-m", "pytest", "-vvv", "tests.py"] - volumes: - - ./common:/common - - ./pilot:/app - inference: - restart: 'no' - command: ["python3", "-m", "pytest", "-vvv", "inference/tests.py"] - volumes: - - ./common:/common - - ./inference:/app diff --git a/docs/Services_Documentation.txt b/docs/Services_Documentation.txt deleted file mode 100644 index 0bc87099..00000000 --- a/docs/Services_Documentation.txt +++ /dev/null @@ -1,222 +0,0 @@ -Service Architecture -Smart Segment -A smart segment is part of the robot that houses computer systems inside that allow it to move autonomously. Connected segments make the robot act like a caterpillar. -This computer system includes a Raspberry Pi, a Jetson Nano, 2 cameras, a router and 2 motor controllers. - -Hardware -1) AI Camera (camera0) -A smart segment uses Hikvision PTZ Dome Network camera for its AI Camera. -IP: 192.168.1.64 --Input: Footage from its surroundings --Output: Sends H264 encoded video ouput to the Pi’s Docker container service called “Stream0”. - -2) Operator Camera (camera1) -The smart segment also uses a 2nd Hikvision PTZ Dome Network camera for an Operator Camera. -IP: 192.168.1.65 --Input: Footage from its surroundings --Output: Sends H264 encoded video output to the Pi’s Docker container service called “Stream1”. - -3) Raspberry Pi 4B -OS: balena-cloud-byodr- pi-raspberrypi4-64-2.99.27-v14.0.8 -IP: 192.168.1.32 -This OS allows it to communicate with Balena Cloud. Inside the Pi, there are 5 processes running, 4 of which run in their own separate Docker containers. - -4) Nvidia Jetson Nano -OS: balena-cloud-byodr-nano-jetson-nano-2.88.4+rev1-v12.11.0 -IP: 192.168.1.100 -This OS allows it to communicate with Balena Cloud. Inside the Nano, there are 10 processes running, all of which run in their own separate Docker containers. - -5) RUT-955 -IP: 192.168.1.1 -The router inside the segment is called RUT955 from Teltonika. The router has LAN, WAN, 4G, 5G and LTE capabilities. It’s ethernet connectivity is extended with a switch. The router is responsible for all internet connectivity between the segment and the rest of the Internet. -This router also includes an internal relay that works as a switch that lets the battery power the rest of the segment. Only when the router is booted up and the relay switch closes, will the segment receive power to the rest of its internal components. - -6) Motor Controller 1 -The segment uses the Mini FSESC6.7 from Flipsky. It is connected via USB to the ttyACM0 serial port of the Pi. --Input: Commands from the Pi. --Output: Sends power to its respective motor wheel in order to turn it according to its commands. - -7) Motor Controller 2 -The segment uses the Mini FSESC6.7 from Flipsky. It is connected via USB to the ttyACM1 serial port of the Pi. --Input: Commands from the Pi. --Output: Sends power to its respective motor wheel in order to turn it according to its commands. - -Software stack -1) Balena -From Balena, we use their Balena Cloud services, and also use BalenaOS on the Raspberry Pi and Jetson Nano, to make them compatible with Balena Cloud. From the Balena Cloud we can upload new versions of software, update segments OTA, reboot, connect via SSH, and manage segments remotely. - -2) Docker -Docker is a platform for building, shipping, and running applications in containers. Containers are lightweight, portable, and self-sufficient units that contain all the necessary software, libraries, and dependencies to run an application. Docker enables developers to package their applications into containers, which can be easily deployed and run on any platform that supports Docker. With Docker, developers can ensure that their applications run consistently across different environments, from development to production. -The BYODR project includes dockerfiles that can be used to build a Docker image for each service as well as instructions on how to deploy the image onto a robot using Balena Cloud. By using this approach, users can ensure that the software stack is consistent and reproducible across multiple robots, and can easily deploy updates and manage their fleet of cars from a central location. - -3) Zerotier -Zerotier is a “freemium” P2P (Peer to Peer) VPN service that allows devices with internet capabilities to securely connect to P2P virtual software-defined networks. -The Pi has a Zerotier instance running inside it. This means that it is equipped to work with a Zerotier client that is running on our devices, so that we can add the Pi to our VPN network. -Similarly to the Pi, the Nano also has the same functionality regarding Zerotier, although arguably more important here, since it allows the User to connect to the Nano, and by extension the Web server, via a secure zerotier network. - -4) Wireguard -Similarly to Zerotier, Wireguard is also a VPN. The difference here is that Wireguard is used by the Nano in every network procedure it has to go through for safety. Since the Nano has plenty more processes that require a network connection, compared to the Pi, Wireguard is an extra layer of security against attacks. This process is running inside a docker container. --Q: Why do we use Wireguard if we have ZT? --A: Since ZeroTier and WireGuard look similar, the project uses both ZeroTier and WireGuard for different purposes. ZeroTier is used to create a secure network connection between the robot and the user's computer, while WireGuard is used to encrypt the data that is transmitted over that connection. Together, these technologies provide a secure and reliable way for users to remotely control the robots. - -Raspberry Pi docker service descriptions: -1) Stream0 --Input: Receives video stream from the AI camera. --Function: Creates a high quality H264 video output stream --Output: Sends the stream via RTSP to the web server located in Teleop. --Q1: Why does the Pi create the streams, and not just send them from the cameras directly to the nano, bypassing the Pi? - -2) Stream1 --Input: Receives video stream from the Operator camera. --Purpose: Similarly to the process above, it creates a high quality H264 video output stream. --Output: Sends the stream via RTSP to the web server located in Teleop. --Q1: How does the AI get the images for itself? From the H264 stream, or someplace else? - -3) Zerotier --Input: Receives input from the user, using the built-in command line. --Function: We can add the Pi to our VPN network. --Output: The Pi can communicate with the software-defined virtual networks that the user has built, via the internet. --Q1: Why does the Pi need the zerotier? - -4) Servos --Input: Receives commands in JSON format from Teleop, Inference, Pilot that request movement from the motors. --Function: Sets up a JSON server that listens on 0.0.0.0:5555 for commands from other processes. Listening to 0.0.0.0 means listening from anywhere that has network access to this device. It also sets up a JSON Publisher so that this service can send JSON data to any services that are listening to this service. Decodes commands received from the other services are decoded and stored in a deque. -This service also initiates an http server listening to port 9101 (default option). --Output 1: The commands are sent to the Motor controllers via the serial USB connection. --Output 2: --Q1: BalenaCloud lists another service called “pigpiod”. This service and the “servos” service both use the same image in their docker container. What does the pigpiod service do? --Q2: Why does this service have a JSON publisher? Who does it send data to? - -5) Battery Management System (BMS) [The only service that does not run in a docker container] --Input: Receives data from the BMS inside the battery itself. --Function: The Pi uses an I2C Pi Hat to communicate with the special battery that the segment uses. From here, the Pi can give a “pulse” to the battery in order to “reset” it. This system also allows for seamless use of 2 or more of the same battery, on the same segment. This process is exclusively hardware based, so it is not running in a container. --Output: Sends data to the BMS inside the battery. - -Jetson Nano docker service descriptions: -1) HTTPD --Input: Listens for data requests from Teleop, Pilot, Stream1, Stream0. The sources are listed in the configuration file (called haproxy.conf) that the proxy server uses. --Function: This service sets up a proxy server (Intermediate server between the client and the actual HTTP server) using HAProxy, for load balancing and request forwarding. --Output: Forwards requests to the same services as above, but taking into account load balancing. The destinations are listed in the configuration files that the proxy server uses. - -2) Inference --Input 1: Receives stream from AI camera with the socket url being 'ipc:///byodr/camera_0.sock' --Input 2: Receives a route from Teleop’s Output 5 --Input 3: Receives the timestamp with a 'restart' command from Output 6 of Teleop --Function: This service is responsible for an interface for generating steering angles and making predictions based on images. These actions are based on a trained neural network model. If this service has input from Teleop, they override the self-driving directions of the model. -This service also initiates an IPC server with url 'ipc:///byodr/inference_c.sock', and a JSON publisher with url 'ipc:///byodr/inference.sock'. The IPC server has the ability to receive requests from other services, and respond based on the message type of the request. --Output 1: Sends the AI model’s current state (predicted action, obstacle avoidance, penalties, and other navigation-related information) to the local JSON Publisher. --Output 2:Creates list with any errors from this service. The list will be send to the Teleop service upon request. --Q1: How does Inference, Pilot and Teleop work together, if they work together? --Q2: How does Inference send its data to the Pi for proper movement? - -3) Zerotier --Input: Receives input from the user, using the buildin command line. --Function: The Nano can be added into a virtual network. --Output: Can communicate securely with nodes of the same network. - -4) WireGuard (Not used) --Input: Receives data from the Nano and the Router. --Function: Encrypts the data of the Nano. --Output: The data send by the Nano towards the internet are encrypted. - -5) Teleop --Input 1: Receives stream from the AI camera with the socket url being 'ipc:///byodr/camera_0.sock' --Input 2: Receives stream from Op camera with the socket url being 'ipc:///byodr/camera_1.sock' --Input 3: Receives a JSON from the Pilot service, that includes the proper action for the segment to take, depending on various factors. Potential actions are: - >Switch to manual driving mode - >Continue with the current manual mode commands that are being executed - >Continue with the current autopilot mode commands that are being executed - >Continue with the current backend(Carla simulation) mode commands that are being executed -Note, those commands could be old, repeated or empty commands (do nothing) --Input 4: Receives the current state (location, speed, timestamp and more) of the segment that is located in the Carla simulation, in a JSON format from the Vehicle service --Input 5: Receives the AI model’s current state in a JSON format from the Inference service’s Output 1. --Input 6: Receives input from the Operator’s method of control --Input 7: Receives lists of errors and/or capabilities from the following services from respective sockets: - >Pilot service via 'ipc:///byodr/pilot_c.sock' - >Inference service via 'ipc:///byodr/inference_c.sock' - >Vehicle service via ipc:///byodr/vehicle_c.sock' --Function: This service includes a web server that listens for inputs from multiple sources that are later used to move the robot. The key presses from the operator are registered and reflected upon the robot using this service. -This service includes a logger that logs information regarding the manual control of the robot. -In addition, there is a function in this server that encodes the streams from the cameras to MJPEG. -It also hosts the site design files necessary to draw the Web App. --Output 1: Robot movement according to user’s commands??? --Output 2: Live video feed on the web app --Output 3: MJPEG stream capability --Output 4: Logs and messages produced during operation. --Output 5: Publishes the segment’s selected route on its JSON Publisher. --Output 6: Publishes a timestamp with a 'restart' command on its JSON Publisher --Q1: How does “Teleop” send data to the Pi for robot movement? --Q2: What does it do with the route images? --Q3: From where does it receive user input from the controller/keyboard?(If the input is even sent here) --Q4: Does this service also communicate with the MongoDB service somehow? If yes, how? --Q5: What does it exactly receive from the Pilot service? - -6) Vehicle --Input 1: Receives a JSON from the Pilot service, that includes the proper action for the segment to take, depending on various factors. Potential actions are: - >Switch to manual driving mode - >Continue with the current manual mode commands that are being executed - >Continue with the current autopilot mode commands that are being executed - >Continue with the current backend(Carla simulation) mode commands that are being executed --Input 2: Receives the timestamp with a 'restart' command from Output 6 of Teleop --Input 3: Receives a route from Teleop’s Output 5 --Function: This process sets up a server that connects to a CARLA simulator and communicates with it to control a self-driving car. Carla is an open-source simulator for autonomous driving research. It is used to simulate the robot’s behavior in a virtual environment. -This service also sets up 2 HTTP servers that receive video streams from the simulated segment. --Output 1: Publishes the current state (location, speed, timestamp and more) of the segment that is located in the Carla simulation, in the JSON Publisher. --Output 2: Creates list with any errors and capabilities of the simulated segment. The list will be send to the Teleop service upon request. --Q1: Is this process exclusively used to send data to the CARLA simulator, and nothing else regarding the driving of the robot? --Q2: Where is the CARLA simulation hosted? --Q3: How does this service connect to the simulation server? - -7) ROS Node --Input 1: Receives a JSON from the Pilot service, that includes the proper action for the segment to take, depending on various factors. Potential actions are: - >Switch to manual driving mode - >Continue with the current manual mode commands that are being executed - >Continue with the current autopilot mode commands that are being executed - >Continue with the current backend(Carla simulation) mode commands that are being executed --Input 2: Receives the timestamp with a 'restart' command from Output 6 of Teleop --Function: This service defines a ROS2 node which connects to a teleop node and a pilot node, and switches the driving mode to Autopilot or Manual, depending on user input. It also sets a max speed for the segment. --Output: Sends “m” to the Pilot service --Q1: Why exactly do we need this service? --Q2: Does the communication with other services imply the existence of multiple nodes? --Q3: Why does it publish json data only to the Pilot, and not to both Pilot and Teleop? --Q4: What is “m” and what does it include? - -8) Pilot --Input 1: Receives a route from Teleop’s Output 5 --Input 2: Receives “m” from Rosnode’s Output 1 --Input 3: Receives the current state (location, speed, timestamp and more) of the segment that is located in the Carla simulation from Vehicle’s Output 1. --Input 4: Receives the AI model’s current state (predicted action, obstacle avoidance, penalties, and other navigation-related information) from Inference’s Output 1 --Input 5: Receives the timestamp with a 'restart' command from Output 6 of Teleop --Input 6: Receives user input (????????????) --Function: This process sets up a JSON publisher and a local IPC server to send data to other services that have JSON collectors. It also is responsible for controlling the segment’s movement by using a pre-trained AI model, or using user input, or driving the segment inside the simulation. --Output 1: Publishes a JSON to its JSON Publisher that includes the proper action for the segment to take, depending on various factors. Potential actions are: - >Switch to manual driving mode - >Continue with the current manual mode commands that are being executed - >Continue with the current autopilot mode commands that are being executed - >Continue with the current backend(Carla simulation) mode commands that are being executed --Output 2: Creates list with any errors from this service. The list will be send to the Teleop service upon request. --Q1: This process is exclusively used by the robot for its autonomous driving? --Q2: How does this service cooperate with “Inference”? --Q3: Why does this service start an HTTP server? --Q4: How does this service send movement commands to the Pi? (If it does) - -9) MongoDB --Input: Receives data from the segment, and stores any and all logs produced by the other services (?) --Function: This service creates a default MongoDB user and starts a configurable MongoDB server on the local machine. --Output: Stores logs in its builtin database --Q1: What does the DB store inside it? --Q2: How does the DB get the data that it stores? - -10) FTPD -Input: Receives the newly trained model from the training server. --Function: This service creates a Pure-FTPd Server with a predefined set of commands. This server is used to send its training data to the server, and similarly, receive the trained model from the server. --Output: Sends data to the AI training server with parameters for its specific training. --Q1: Is this the code that connects the Nano to the Firezilla FTP server? (Mentioned in the readthedocs) --Q2: How does this ftp server store, send and receive data from the training server? - - -General questions -Q1: How json receivers and publishers work? Because this is used to send data one to another. -Q2: If all segments are in a zerotier network, does any data sent between the segments encrypted? -Q3: Why do we docker for this project? -Q4: Where is the watchdog function of the Pi/Nano? diff --git a/jetson_runtime/README.md b/jetson_runtime/README.md new file mode 100644 index 00000000..2f150c5e --- /dev/null +++ b/jetson_runtime/README.md @@ -0,0 +1,106 @@ +# Nano Runtime + +## Overview + +The **Jetson Nano Runtime** handles computationally intensive tasks such as inference, video streaming, and network management within the **BYODR** project. + +## Docker Services + +The Jetson Nano runs the following Docker services: + +### 1. HTTPD + +- **Input**: Listens for data requests from `Teleop`, `Pilot`, `Stream1`, and `Stream0`. These sources are listed in the configuration file (`haproxy.conf`) used by the proxy server. +- **Function**: This service sets up a proxy server using HAProxy to balance load and forward requests. +- **Output**: Forwards requests to the specified services, optimizing data flow and managing traffic. +- **Configuration**: Configured using `haproxy.template` and `haproxy_ssl.template` located in `httpd/certs/`. + +### 2. Inference + +- **Input**: + - Stream from the AI camera with the socket URL `ipc:///byodr/camera_0.sock`. + - Routes from `Teleop` with the socket URL `ipc:///byodr/teleop.sock`. + - Timestamps from `Teleop` with the socket URL `ipc:///byodr/teleop_c.sock`. +- **Function**: Uses a trained AI model to generate steering angles and make predictions based on image data from the AI camera. Overrides the self-driving directions when input is received from `Teleop`. +- **Outputs**: + - Sends commands to the `Servos` service for motor control. + - Initiates an IPC server at `ipc:///byodr/inference_c.sock`. + - Uses a JSON publisher at `ipc:///byodr/inference.sock` to broadcast data. +- **Files**: Model files are stored in `inference/models/`. + +### 3. Teleop + +- **Inputs**: + - Receives streams from `Stream0` and `Stream1` services on the Pi. + - Receives JSON data from `Pilot`, `Vehicle`, and `Inference` services. + - Receives control input from the operator. +- **Function**: Acts as the central control point, translating user input into robot commands and managing live video streams. This service also encodes video streams into MJPEG for web display. +- **Outputs**: + - Robot movement based on user commands. + - Live video feed to the web app. + - Logs and messages stored in `MongoDB`. +- **Files**: Web server files can be found in `teleop/htm/`. + +### 4. Vehicle + +- **Inputs**: + - JSON data from the `Pilot` service. + - JSON data from the `Teleop` service. +- **Function**: Sets up a server to connect with a CARLA simulator, which is used to simulate the robot's behavior in a virtual environment. +- **Outputs**: Sends data to a server running an instance of CARLA, representing a segment inside the simulation. +- **Questions**: + - Is this service solely for communicating with the CARLA simulator? + - Where is the CARLA simulation hosted? + - What do the video streams from the server do? + +### 5. ROS Node + +- **Inputs**: + - JSON data from the `Pilot` service. + - JSON data from the `Teleop` service. +- **Function**: Defines a ROS2 node, facilitating communication between the `Teleop` node and the `Pilot` node. It controls the driving mode (Autopilot or Manual) based on user input and adjusts the segment's max speed. +- **Output**: Publishes ROS commands in JSON format to the `Pilot` service. + +### 6. Pilot + +- **Inputs**: + - JSON data from `Teleop`, `ROS Node`, `Vehicle`, and `Inference` services. + - IPC chatter from `Teleop`. +- **Function**: Controls the segment's autonomous movement using a pre-trained AI model. Sets up a JSON publisher and local IPC server to share data with other services. +- **Output**: Sends JSON commands to the `Servos` service for autonomous driving. + +### 7. Zerotier + +- **Function**: Adds the Jetson Nano to a secure P2P virtual network, allowing it to communicate with other nodes. +- **Output**: Enables secure connection with other segments in the ZeroTier network. +- **Input**: Configured through command-line inputs. +- **Note**: Works in conjunction with WireGuard for added security. + +### 8. WireGuard + +- **Input**: Manages network data from the Jetson Nano and Router. +- **Function**: Encrypts all communication to enhance security during data transmission. +- **Output**: Secures all outgoing and incoming data. +- **Why Use WireGuard and ZeroTier?**: WireGuard focuses on encryption, while ZeroTier manages network connections between devices, providing a layered security approach. + +### 9. MongoDB + +- **Input**: Receives logs and data from `Teleop` and other services. +- **Function**: Stores logs and data in a local MongoDB instance. +- **Output**: Provides a structured storage for logs that can be accessed by other services. +- **Configuration**: Located in `mongodb/wrap.py`. + +### 10. FTPD + +- **Input**: Receives newly trained models from the training server. +- **Function**: Sets up a Pure-FTPd server for managing model data. Facilitates the transfer of models to/from the training server. +- Create an FTP server that exposes three folder to the user `Autopilot`, `Models` and `photos`. The user can download the training sessions file from the Autopilot folder after the robot is done with making the compressed file. +- **Files**: Configuration scripts are found in `ftpd/`. +- **Questions**: + - Is this service the link between the Nano and Firezilla FTP server? Yes + +### 11. Following + +- **Input**: Gets starting and stopping command from `teleop` . +- **Function**: Make the robot follow a person in front of it. +- **Output**: Sends movement command to `vehicle` service as it streamlines the commands from different services to it. diff --git a/jetson_runtime/docker-compose.yml b/jetson_runtime/docker-compose.yml new file mode 100644 index 00000000..0377c775 --- /dev/null +++ b/jetson_runtime/docker-compose.yml @@ -0,0 +1,161 @@ +version: '2' +volumes: + volume_zerotier_config: + volume_wireguard_config: + volume_ftpd_config: + volume_mongodb_config: + volume_mongodb_data: + volume_byodr_config: + volume_byodr_sockets: + volume_byodr_sessions: +services: + zerotier: + cpuset: '0' + image: zyclonite/zerotier:1.6.6 + restart: always + network_mode: host + devices: + - '/dev/net/tun' + cap_add: + - SYS_ADMIN + - NET_ADMIN + - CAP_SYS_RAWIO + volumes: + - volume_zerotier_config:/var/lib/zerotier-one:rw + wireguard: + cpuset: '0' + image: masipcat/wireguard-go + container_name: wireguard + restart: always + network_mode: host + devices: + - '/dev/net/tun' + cap_add: + - SYS_ADMIN + - NET_ADMIN + - CAP_SYS_RAWIO + volumes: + - volume_wireguard_config:/etc/wireguard:rw + httpd: + cpuset: '0' + build: + context: . + dockerfile: httpd/Dockerfile + restart: always + network_mode: host + command: ['python', 'wrap.py'] + stop_signal: SIGKILL + volumes: + - volume_byodr_config:/config:rw + ftpd: + cpuset: '0' + build: + context: . + dockerfile: ftpd/Dockerfile + restart: always + command: ['python3', 'wrap.py'] + stop_signal: SIGKILL + ports: + - '21:21' + - '30000-30009:30000-30009' + volumes: + - volume_ftpd_config:/etc/pureftpd:rw + - volume_byodr_sessions:/home/ftpuser:rw + rosnode: + cpuset: '0' + build: + context: . + dockerfile: rosnode/Dockerfile + restart: always + command: ['python3', 'app.py', '--name', 'rosnode'] + network_mode: host + stop_signal: SIGKILL + volumes: + - volume_byodr_sockets:/byodr:rw + - volume_byodr_config:/config:ro + mongodb: + cpuset: '0' + build: + context: . + dockerfile: mongodb/Dockerfile + restart: always + command: ['python3', 'wrap.py'] + network_mode: host + stop_signal: SIGKILL + environment: + MONGO_INITDB_ROOT_USERNAME: admin + MONGO_INITDB_ROOT_PASSWORD: robot + volumes: + - volume_mongodb_config:/config:rw + - volume_mongodb_data:/data/db:rw + teleop: + cpuset: '0' + build: + context: . + dockerfile: teleop/Dockerfile + restart: always + command: ['sh', '-c', 'python3 -m teleop.app --name teleop --routes /sessions/routes'] + network_mode: host + depends_on: + - mongodb + environment: + LD_PRELOAD: libgomp.so.1 + volumes: + - volume_byodr_sockets:/byodr:rw + - volume_byodr_config:/config:rw + - volume_byodr_sessions:/sessions:rw + vehicle: + cpuset: '1' + build: + context: . + dockerfile: vehicles/rover/Dockerfile + restart: always + privileged: true # NvMedia device creation for omx decoder. + network_mode: host + environment: + LD_PRELOAD: libgomp.so.1 + volumes: + - volume_byodr_sockets:/byodr:rw + - volume_byodr_config:/config:rw + pilot: + cpuset: '2' + build: + context: . + dockerfile: pilot/Dockerfile + restart: always + privileged: true # Access to usb devices without udev rules. + command: ['python3', '-m', 'pilot.app', '--name', 'pilot', '--routes', '/sessions/routes'] + ports: + - '8082:8082' + environment: + LD_PRELOAD: libgomp.so.1 + volumes: + - volume_byodr_sockets:/byodr:rw + - volume_byodr_config:/config:ro + - volume_byodr_sessions:/sessions:ro + inference: + # cpuset: "3" # - not compatible with onnx runtime - use env omp_places. + build: + context: . + dockerfile: inference/runtime-cp36-jp441.dockerfile + restart: always + privileged: true + command: ['python3', '-m', 'inference.app', '--user', '/sessions/models', '--routes', '/sessions/routes'] + environment: + LD_PRELOAD: libgomp.so.1 + OMP_PLACES: '{3}' + volumes: + - volume_byodr_sockets:/byodr:rw + - volume_byodr_config:/config:ro + following: + cpuset: '3,2' + build: + context: . + dockerfile: following/Dockerfile + restart: always + privileged: true + network_mode: host + stop_signal: SIGKILL + volumes: + - volume_byodr_sockets:/byodr:rw + - volume_byodr_config:/config:rw diff --git a/following/Dockerfile b/jetson_runtime/following/Dockerfile similarity index 100% rename from following/Dockerfile rename to jetson_runtime/following/Dockerfile diff --git a/following/app.py b/jetson_runtime/following/app.py similarity index 100% rename from following/app.py rename to jetson_runtime/following/app.py diff --git a/following/botsort.yaml b/jetson_runtime/following/botsort.yaml similarity index 100% rename from following/botsort.yaml rename to jetson_runtime/following/botsort.yaml diff --git a/following/fol_utils.py b/jetson_runtime/following/fol_utils.py similarity index 100% rename from following/fol_utils.py rename to jetson_runtime/following/fol_utils.py diff --git a/following/models/480_20k.pt b/jetson_runtime/following/models/480_20k.pt similarity index 100% rename from following/models/480_20k.pt rename to jetson_runtime/following/models/480_20k.pt diff --git a/following/models/yolov8_20240717_coco(imgsz480x640_FP16).engine b/jetson_runtime/following/models/yolov8_20240717_coco(imgsz480x640_FP16).engine similarity index 100% rename from following/models/yolov8_20240717_coco(imgsz480x640_FP16).engine rename to jetson_runtime/following/models/yolov8_20240717_coco(imgsz480x640_FP16).engine diff --git a/following/models/yolov8n(256*320_FB16).engine b/jetson_runtime/following/models/yolov8n(256*320_FB16).engine similarity index 100% rename from following/models/yolov8n(256*320_FB16).engine rename to jetson_runtime/following/models/yolov8n(256*320_FB16).engine diff --git a/following/models/yolov8n.engine b/jetson_runtime/following/models/yolov8n.engine similarity index 100% rename from following/models/yolov8n.engine rename to jetson_runtime/following/models/yolov8n.engine diff --git a/following/models/yolov8n_d_20240722(256*320_FB16).engine b/jetson_runtime/following/models/yolov8n_d_20240722(256*320_FB16).engine similarity index 100% rename from following/models/yolov8n_d_20240722(256*320_FB16).engine rename to jetson_runtime/following/models/yolov8n_d_20240722(256*320_FB16).engine diff --git a/following/models/yolov8n_d_20240722(480*640_FB16).engine b/jetson_runtime/following/models/yolov8n_d_20240722(480*640_FB16).engine similarity index 100% rename from following/models/yolov8n_d_20240722(480*640_FB16).engine rename to jetson_runtime/following/models/yolov8n_d_20240722(480*640_FB16).engine diff --git a/following/models/yolov8n_d_20240722.yaml b/jetson_runtime/following/models/yolov8n_d_20240722.yaml similarity index 100% rename from following/models/yolov8n_d_20240722.yaml rename to jetson_runtime/following/models/yolov8n_d_20240722.yaml diff --git a/ftpd/Dockerfile b/jetson_runtime/ftpd/Dockerfile similarity index 100% rename from ftpd/Dockerfile rename to jetson_runtime/ftpd/Dockerfile diff --git a/ftpd/create_user.sh b/jetson_runtime/ftpd/create_user.sh old mode 100755 new mode 100644 similarity index 100% rename from ftpd/create_user.sh rename to jetson_runtime/ftpd/create_user.sh diff --git a/ftpd/wrap.py b/jetson_runtime/ftpd/wrap.py similarity index 100% rename from ftpd/wrap.py rename to jetson_runtime/ftpd/wrap.py diff --git a/httpd/Dockerfile b/jetson_runtime/httpd/Dockerfile similarity index 100% rename from httpd/Dockerfile rename to jetson_runtime/httpd/Dockerfile diff --git a/httpd/certs/lan.conf b/jetson_runtime/httpd/certs/lan.conf similarity index 100% rename from httpd/certs/lan.conf rename to jetson_runtime/httpd/certs/lan.conf diff --git a/httpd/certs/readme.txt b/jetson_runtime/httpd/certs/readme.txt similarity index 100% rename from httpd/certs/readme.txt rename to jetson_runtime/httpd/certs/readme.txt diff --git a/httpd/haproxy.template b/jetson_runtime/httpd/haproxy.template similarity index 100% rename from httpd/haproxy.template rename to jetson_runtime/httpd/haproxy.template diff --git a/httpd/haproxy_ssl.template b/jetson_runtime/httpd/haproxy_ssl.template similarity index 100% rename from httpd/haproxy_ssl.template rename to jetson_runtime/httpd/haproxy_ssl.template diff --git a/httpd/wrap.py b/jetson_runtime/httpd/wrap.py similarity index 100% rename from httpd/wrap.py rename to jetson_runtime/httpd/wrap.py diff --git a/inference/archive/tf113-cp27-jp43.dockerfile b/jetson_runtime/inference/archive/tf113-cp27-jp43.dockerfile similarity index 100% rename from inference/archive/tf113-cp27-jp43.dockerfile rename to jetson_runtime/inference/archive/tf113-cp27-jp43.dockerfile diff --git a/inference/archive/tf113-cp27-x86.dockerfile b/jetson_runtime/inference/archive/tf113-cp27-x86.dockerfile similarity index 100% rename from inference/archive/tf113-cp27-x86.dockerfile rename to jetson_runtime/inference/archive/tf113-cp27-x86.dockerfile diff --git a/inference/archive/tf115-cp36-jp42.dockerfile b/jetson_runtime/inference/archive/tf115-cp36-jp42.dockerfile similarity index 100% rename from inference/archive/tf115-cp36-jp42.dockerfile rename to jetson_runtime/inference/archive/tf115-cp36-jp42.dockerfile diff --git a/inference/archive/tf115-cp36-x86.dockerfile b/jetson_runtime/inference/archive/tf115-cp36-x86.dockerfile similarity index 100% rename from inference/archive/tf115-cp36-x86.dockerfile rename to jetson_runtime/inference/archive/tf115-cp36-x86.dockerfile diff --git a/common/byodr/__init__.py b/jetson_runtime/inference/inference/__init__.py similarity index 100% rename from common/byodr/__init__.py rename to jetson_runtime/inference/inference/__init__.py diff --git a/inference/inference/app.py b/jetson_runtime/inference/inference/app.py similarity index 100% rename from inference/inference/app.py rename to jetson_runtime/inference/inference/app.py diff --git a/inference/inference/image.py b/jetson_runtime/inference/inference/image.py similarity index 100% rename from inference/inference/image.py rename to jetson_runtime/inference/inference/image.py diff --git a/inference/inference/tests.py b/jetson_runtime/inference/inference/tests.py similarity index 100% rename from inference/inference/tests.py rename to jetson_runtime/inference/inference/tests.py diff --git a/inference/inference/torched.py b/jetson_runtime/inference/inference/torched.py similarity index 100% rename from inference/inference/torched.py rename to jetson_runtime/inference/inference/torched.py diff --git a/inference/pytest.ini b/jetson_runtime/inference/pytest.ini similarity index 100% rename from inference/pytest.ini rename to jetson_runtime/inference/pytest.ini diff --git a/inference/runtime-cp36-jp441.dockerfile b/jetson_runtime/inference/runtime-cp36-jp441.dockerfile similarity index 100% rename from inference/runtime-cp36-jp441.dockerfile rename to jetson_runtime/inference/runtime-cp36-jp441.dockerfile diff --git a/inference/runtime-cp36-x86.dockerfile b/jetson_runtime/inference/runtime-cp36-x86.dockerfile similarity index 100% rename from inference/runtime-cp36-x86.dockerfile rename to jetson_runtime/inference/runtime-cp36-x86.dockerfile diff --git a/mongodb/Dockerfile b/jetson_runtime/mongodb/Dockerfile similarity index 100% rename from mongodb/Dockerfile rename to jetson_runtime/mongodb/Dockerfile diff --git a/mongodb/wrap.py b/jetson_runtime/mongodb/wrap.py similarity index 100% rename from mongodb/wrap.py rename to jetson_runtime/mongodb/wrap.py diff --git a/pilot/Dockerfile b/jetson_runtime/pilot/Dockerfile similarity index 100% rename from pilot/Dockerfile rename to jetson_runtime/pilot/Dockerfile diff --git a/inference/inference/__init__.py b/jetson_runtime/pilot/pilot/__init__.py similarity index 100% rename from inference/inference/__init__.py rename to jetson_runtime/pilot/pilot/__init__.py diff --git a/pilot/pilot/app.py b/jetson_runtime/pilot/pilot/app.py similarity index 100% rename from pilot/pilot/app.py rename to jetson_runtime/pilot/pilot/app.py diff --git a/pilot/pilot/core.py b/jetson_runtime/pilot/pilot/core.py similarity index 100% rename from pilot/pilot/core.py rename to jetson_runtime/pilot/pilot/core.py diff --git a/pilot/pilot/relay.py b/jetson_runtime/pilot/pilot/relay.py similarity index 100% rename from pilot/pilot/relay.py rename to jetson_runtime/pilot/pilot/relay.py diff --git a/pilot/pilot/tests.py b/jetson_runtime/pilot/pilot/tests.py similarity index 100% rename from pilot/pilot/tests.py rename to jetson_runtime/pilot/pilot/tests.py diff --git a/pilot/pilot/tests_relay.py b/jetson_runtime/pilot/pilot/tests_relay.py similarity index 100% rename from pilot/pilot/tests_relay.py rename to jetson_runtime/pilot/pilot/tests_relay.py diff --git a/pilot/pilot/web.py b/jetson_runtime/pilot/pilot/web.py similarity index 100% rename from pilot/pilot/web.py rename to jetson_runtime/pilot/pilot/web.py diff --git a/pilot/pytest.ini b/jetson_runtime/pilot/pytest.ini similarity index 100% rename from pilot/pytest.ini rename to jetson_runtime/pilot/pytest.ini diff --git a/rosnode/Dockerfile b/jetson_runtime/rosnode/Dockerfile similarity index 100% rename from rosnode/Dockerfile rename to jetson_runtime/rosnode/Dockerfile diff --git a/rosnode/app.py b/jetson_runtime/rosnode/app.py similarity index 100% rename from rosnode/app.py rename to jetson_runtime/rosnode/app.py diff --git a/teleop/Dockerfile b/jetson_runtime/teleop/Dockerfile similarity index 100% rename from teleop/Dockerfile rename to jetson_runtime/teleop/Dockerfile diff --git a/teleop/htm/jmuxer/jmuxer-21.07.min.js b/jetson_runtime/teleop/htm/jmuxer/jmuxer-21.07.min.js similarity index 100% rename from teleop/htm/jmuxer/jmuxer-21.07.min.js rename to jetson_runtime/teleop/htm/jmuxer/jmuxer-21.07.min.js diff --git a/teleop/htm/jmuxer/z_index_video_mux.js b/jetson_runtime/teleop/htm/jmuxer/z_index_video_mux.js similarity index 100% rename from teleop/htm/jmuxer/z_index_video_mux.js rename to jetson_runtime/teleop/htm/jmuxer/z_index_video_mux.js diff --git a/teleop/htm/static/CSS/menu_controls.css b/jetson_runtime/teleop/htm/static/CSS/menu_controls.css similarity index 100% rename from teleop/htm/static/CSS/menu_controls.css rename to jetson_runtime/teleop/htm/static/CSS/menu_controls.css diff --git a/teleop/htm/static/CSS/menu_logbox.css b/jetson_runtime/teleop/htm/static/CSS/menu_logbox.css similarity index 100% rename from teleop/htm/static/CSS/menu_logbox.css rename to jetson_runtime/teleop/htm/static/CSS/menu_logbox.css diff --git a/teleop/htm/static/CSS/menu_settings.css b/jetson_runtime/teleop/htm/static/CSS/menu_settings.css similarity index 100% rename from teleop/htm/static/CSS/menu_settings.css rename to jetson_runtime/teleop/htm/static/CSS/menu_settings.css diff --git a/teleop/htm/static/CSS/mobileController.css b/jetson_runtime/teleop/htm/static/CSS/mobileController.css similarity index 100% rename from teleop/htm/static/CSS/mobileController.css rename to jetson_runtime/teleop/htm/static/CSS/mobileController.css diff --git a/teleop/htm/static/CSS/style.css b/jetson_runtime/teleop/htm/static/CSS/style.css similarity index 100% rename from teleop/htm/static/CSS/style.css rename to jetson_runtime/teleop/htm/static/CSS/style.css diff --git a/teleop/htm/static/CSS/theme_mode.css b/jetson_runtime/teleop/htm/static/CSS/theme_mode.css similarity index 100% rename from teleop/htm/static/CSS/theme_mode.css rename to jetson_runtime/teleop/htm/static/CSS/theme_mode.css diff --git a/teleop/htm/static/CSS/user_menu.css b/jetson_runtime/teleop/htm/static/CSS/user_menu.css similarity index 100% rename from teleop/htm/static/CSS/user_menu.css rename to jetson_runtime/teleop/htm/static/CSS/user_menu.css diff --git a/teleop/htm/static/JS/Index/index_a_utils.js b/jetson_runtime/teleop/htm/static/JS/Index/index_a_utils.js similarity index 100% rename from teleop/htm/static/JS/Index/index_a_utils.js rename to jetson_runtime/teleop/htm/static/JS/Index/index_a_utils.js diff --git a/teleop/htm/static/JS/Index/index_b_gamepad.js b/jetson_runtime/teleop/htm/static/JS/Index/index_b_gamepad.js similarity index 100% rename from teleop/htm/static/JS/Index/index_b_gamepad.js rename to jetson_runtime/teleop/htm/static/JS/Index/index_b_gamepad.js diff --git a/teleop/htm/static/JS/Index/index_c_screen.js b/jetson_runtime/teleop/htm/static/JS/Index/index_c_screen.js similarity index 100% rename from teleop/htm/static/JS/Index/index_c_screen.js rename to jetson_runtime/teleop/htm/static/JS/Index/index_c_screen.js diff --git a/teleop/htm/static/JS/Index/index_d_navigator.js b/jetson_runtime/teleop/htm/static/JS/Index/index_d_navigator.js similarity index 100% rename from teleop/htm/static/JS/Index/index_d_navigator.js rename to jetson_runtime/teleop/htm/static/JS/Index/index_d_navigator.js diff --git a/teleop/htm/static/JS/Index/index_e_teleop.js b/jetson_runtime/teleop/htm/static/JS/Index/index_e_teleop.js similarity index 100% rename from teleop/htm/static/JS/Index/index_e_teleop.js rename to jetson_runtime/teleop/htm/static/JS/Index/index_e_teleop.js diff --git a/teleop/htm/static/JS/Index/index_video_hlp.js b/jetson_runtime/teleop/htm/static/JS/Index/index_video_hlp.js similarity index 100% rename from teleop/htm/static/JS/Index/index_video_hlp.js rename to jetson_runtime/teleop/htm/static/JS/Index/index_video_hlp.js diff --git a/teleop/htm/static/JS/Index/index_video_mjpeg.js b/jetson_runtime/teleop/htm/static/JS/Index/index_video_mjpeg.js similarity index 100% rename from teleop/htm/static/JS/Index/index_video_mjpeg.js rename to jetson_runtime/teleop/htm/static/JS/Index/index_video_mjpeg.js diff --git a/teleop/htm/static/JS/index.js b/jetson_runtime/teleop/htm/static/JS/index.js similarity index 100% rename from teleop/htm/static/JS/index.js rename to jetson_runtime/teleop/htm/static/JS/index.js diff --git a/teleop/htm/static/JS/mobileController/feature/mobileController_f_auto_navigation.js b/jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_auto_navigation.js similarity index 100% rename from teleop/htm/static/JS/mobileController/feature/mobileController_f_auto_navigation.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_auto_navigation.js diff --git a/teleop/htm/static/JS/mobileController/feature/mobileController_f_confidence.js b/jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_confidence.js similarity index 100% rename from teleop/htm/static/JS/mobileController/feature/mobileController_f_confidence.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_confidence.js diff --git a/teleop/htm/static/JS/mobileController/feature/mobileController_f_following.js b/jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_following.js similarity index 100% rename from teleop/htm/static/JS/mobileController/feature/mobileController_f_following.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_following.js diff --git a/teleop/htm/static/JS/mobileController/feature/mobileController_f_maneuver_training.js b/jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_maneuver_training.js similarity index 100% rename from teleop/htm/static/JS/mobileController/feature/mobileController_f_maneuver_training.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/feature/mobileController_f_maneuver_training.js diff --git a/teleop/htm/static/JS/mobileController/mobileController_a_app.js b/jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_a_app.js similarity index 100% rename from teleop/htm/static/JS/mobileController/mobileController_a_app.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_a_app.js diff --git a/teleop/htm/static/JS/mobileController/mobileController_b_shape_square.js b/jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_b_shape_square.js similarity index 100% rename from teleop/htm/static/JS/mobileController/mobileController_b_shape_square.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_b_shape_square.js diff --git a/teleop/htm/static/JS/mobileController/mobileController_c_logic.js b/jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_c_logic.js similarity index 100% rename from teleop/htm/static/JS/mobileController/mobileController_c_logic.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_c_logic.js diff --git a/teleop/htm/static/JS/mobileController/mobileController_z_state.js b/jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_z_state.js similarity index 100% rename from teleop/htm/static/JS/mobileController/mobileController_z_state.js rename to jetson_runtime/teleop/htm/static/JS/mobileController/mobileController_z_state.js diff --git a/teleop/htm/static/JS/performance-polyfill.js b/jetson_runtime/teleop/htm/static/JS/performance-polyfill.js similarity index 100% rename from teleop/htm/static/JS/performance-polyfill.js rename to jetson_runtime/teleop/htm/static/JS/performance-polyfill.js diff --git a/teleop/htm/static/JS/router.js b/jetson_runtime/teleop/htm/static/JS/router.js similarity index 100% rename from teleop/htm/static/JS/router.js rename to jetson_runtime/teleop/htm/static/JS/router.js diff --git a/teleop/htm/static/JS/userMenu/menu_controls.js b/jetson_runtime/teleop/htm/static/JS/userMenu/menu_controls.js similarity index 100% rename from teleop/htm/static/JS/userMenu/menu_controls.js rename to jetson_runtime/teleop/htm/static/JS/userMenu/menu_controls.js diff --git a/teleop/htm/static/JS/userMenu/menu_logbox.js b/jetson_runtime/teleop/htm/static/JS/userMenu/menu_logbox.js similarity index 100% rename from teleop/htm/static/JS/userMenu/menu_logbox.js rename to jetson_runtime/teleop/htm/static/JS/userMenu/menu_logbox.js diff --git a/teleop/htm/static/JS/userMenu/menu_settings.js b/jetson_runtime/teleop/htm/static/JS/userMenu/menu_settings.js similarity index 100% rename from teleop/htm/static/JS/userMenu/menu_settings.js rename to jetson_runtime/teleop/htm/static/JS/userMenu/menu_settings.js diff --git a/teleop/htm/static/assets/pictures/VOR_Logo_dark.png b/jetson_runtime/teleop/htm/static/assets/pictures/VOR_Logo_dark.png similarity index 100% rename from teleop/htm/static/assets/pictures/VOR_Logo_dark.png rename to jetson_runtime/teleop/htm/static/assets/pictures/VOR_Logo_dark.png diff --git a/teleop/htm/static/assets/pictures/VOR_Logo_light.png b/jetson_runtime/teleop/htm/static/assets/pictures/VOR_Logo_light.png similarity index 100% rename from teleop/htm/static/assets/pictures/VOR_Logo_light.png rename to jetson_runtime/teleop/htm/static/assets/pictures/VOR_Logo_light.png diff --git a/teleop/htm/static/assets/pictures/VR - pattern tree 1.png b/jetson_runtime/teleop/htm/static/assets/pictures/VR - pattern tree 1.png similarity index 100% rename from teleop/htm/static/assets/pictures/VR - pattern tree 1.png rename to jetson_runtime/teleop/htm/static/assets/pictures/VR - pattern tree 1.png diff --git a/teleop/htm/static/assets/pictures/VR-pattern_tree_1.png b/jetson_runtime/teleop/htm/static/assets/pictures/VR-pattern_tree_1.png similarity index 100% rename from teleop/htm/static/assets/pictures/VR-pattern_tree_1.png rename to jetson_runtime/teleop/htm/static/assets/pictures/VR-pattern_tree_1.png diff --git a/teleop/htm/static/assets/pictures/ai_training_mode.png b/jetson_runtime/teleop/htm/static/assets/pictures/ai_training_mode.png similarity index 100% rename from teleop/htm/static/assets/pictures/ai_training_mode.png rename to jetson_runtime/teleop/htm/static/assets/pictures/ai_training_mode.png diff --git a/teleop/htm/static/assets/pictures/autopilot_mode.png b/jetson_runtime/teleop/htm/static/assets/pictures/autopilot_mode.png similarity index 100% rename from teleop/htm/static/assets/pictures/autopilot_mode.png rename to jetson_runtime/teleop/htm/static/assets/pictures/autopilot_mode.png diff --git a/teleop/htm/static/assets/pictures/caret.png b/jetson_runtime/teleop/htm/static/assets/pictures/caret.png similarity index 100% rename from teleop/htm/static/assets/pictures/caret.png rename to jetson_runtime/teleop/htm/static/assets/pictures/caret.png diff --git a/teleop/htm/static/assets/pictures/expand_collapse.png b/jetson_runtime/teleop/htm/static/assets/pictures/expand_collapse.png similarity index 100% rename from teleop/htm/static/assets/pictures/expand_collapse.png rename to jetson_runtime/teleop/htm/static/assets/pictures/expand_collapse.png diff --git a/teleop/htm/static/assets/pictures/follow_mode.png b/jetson_runtime/teleop/htm/static/assets/pictures/follow_mode.png similarity index 100% rename from teleop/htm/static/assets/pictures/follow_mode.png rename to jetson_runtime/teleop/htm/static/assets/pictures/follow_mode.png diff --git a/teleop/htm/static/assets/pictures/im_no_image_available.png b/jetson_runtime/teleop/htm/static/assets/pictures/im_no_image_available.png similarity index 100% rename from teleop/htm/static/assets/pictures/im_no_image_available.png rename to jetson_runtime/teleop/htm/static/assets/pictures/im_no_image_available.png diff --git a/teleop/htm/static/assets/pictures/manual_mode.png b/jetson_runtime/teleop/htm/static/assets/pictures/manual_mode.png similarity index 100% rename from teleop/htm/static/assets/pictures/manual_mode.png rename to jetson_runtime/teleop/htm/static/assets/pictures/manual_mode.png diff --git a/teleop/htm/static/assets/pictures/manual_mode_nav.png b/jetson_runtime/teleop/htm/static/assets/pictures/manual_mode_nav.png similarity index 100% rename from teleop/htm/static/assets/pictures/manual_mode_nav.png rename to jetson_runtime/teleop/htm/static/assets/pictures/manual_mode_nav.png diff --git a/teleop/htm/static/assets/pictures/map_recognition.png b/jetson_runtime/teleop/htm/static/assets/pictures/map_recognition.png similarity index 100% rename from teleop/htm/static/assets/pictures/map_recognition.png rename to jetson_runtime/teleop/htm/static/assets/pictures/map_recognition.png diff --git a/teleop/htm/static/assets/pictures/nav_advanved_settings.png b/jetson_runtime/teleop/htm/static/assets/pictures/nav_advanved_settings.png similarity index 100% rename from teleop/htm/static/assets/pictures/nav_advanved_settings.png rename to jetson_runtime/teleop/htm/static/assets/pictures/nav_advanved_settings.png diff --git a/teleop/htm/static/assets/pictures/nav_background.png b/jetson_runtime/teleop/htm/static/assets/pictures/nav_background.png similarity index 100% rename from teleop/htm/static/assets/pictures/nav_background.png rename to jetson_runtime/teleop/htm/static/assets/pictures/nav_background.png diff --git a/teleop/htm/static/assets/pictures/nav_control_by_phone.png b/jetson_runtime/teleop/htm/static/assets/pictures/nav_control_by_phone.png similarity index 100% rename from teleop/htm/static/assets/pictures/nav_control_by_phone.png rename to jetson_runtime/teleop/htm/static/assets/pictures/nav_control_by_phone.png diff --git a/teleop/htm/static/assets/pictures/nav_controls_menu.png b/jetson_runtime/teleop/htm/static/assets/pictures/nav_controls_menu.png similarity index 100% rename from teleop/htm/static/assets/pictures/nav_controls_menu.png rename to jetson_runtime/teleop/htm/static/assets/pictures/nav_controls_menu.png diff --git a/teleop/htm/static/assets/pictures/nav_training_events.png b/jetson_runtime/teleop/htm/static/assets/pictures/nav_training_events.png similarity index 100% rename from teleop/htm/static/assets/pictures/nav_training_events.png rename to jetson_runtime/teleop/htm/static/assets/pictures/nav_training_events.png diff --git a/teleop/htm/static/assets/pictures/ps4_mapping.jpg b/jetson_runtime/teleop/htm/static/assets/pictures/ps4_mapping.jpg similarity index 100% rename from teleop/htm/static/assets/pictures/ps4_mapping.jpg rename to jetson_runtime/teleop/htm/static/assets/pictures/ps4_mapping.jpg diff --git a/teleop/htm/static/assets/static_sora/Sora-Bold.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Bold.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-Bold.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Bold.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-ExtraBold.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-ExtraBold.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-ExtraBold.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-ExtraBold.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-ExtraLight.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-ExtraLight.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-ExtraLight.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-ExtraLight.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-Light.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Light.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-Light.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Light.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-Medium.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Medium.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-Medium.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Medium.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-Regular.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Regular.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-Regular.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Regular.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-SemiBold.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-SemiBold.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-SemiBold.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-SemiBold.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-Thin.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Thin.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-Thin.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-Thin.ttf diff --git a/teleop/htm/static/assets/static_sora/Sora-VariableFont_wght.ttf b/jetson_runtime/teleop/htm/static/assets/static_sora/Sora-VariableFont_wght.ttf similarity index 100% rename from teleop/htm/static/assets/static_sora/Sora-VariableFont_wght.ttf rename to jetson_runtime/teleop/htm/static/assets/static_sora/Sora-VariableFont_wght.ttf diff --git a/teleop/htm/static/assets/svg/MC_VOR_logo.svg b/jetson_runtime/teleop/htm/static/assets/svg/MC_VOR_logo.svg similarity index 100% rename from teleop/htm/static/assets/svg/MC_VOR_logo.svg rename to jetson_runtime/teleop/htm/static/assets/svg/MC_VOR_logo.svg diff --git a/teleop/htm/static/assets/svg/NC_VOR_logo.svg b/jetson_runtime/teleop/htm/static/assets/svg/NC_VOR_logo.svg similarity index 100% rename from teleop/htm/static/assets/svg/NC_VOR_logo.svg rename to jetson_runtime/teleop/htm/static/assets/svg/NC_VOR_logo.svg diff --git a/teleop/htm/static/assets/svg/advanced_settings_menu.svg b/jetson_runtime/teleop/htm/static/assets/svg/advanced_settings_menu.svg similarity index 100% rename from teleop/htm/static/assets/svg/advanced_settings_menu.svg rename to jetson_runtime/teleop/htm/static/assets/svg/advanced_settings_menu.svg diff --git a/teleop/htm/static/assets/svg/ai_training_mode.svg b/jetson_runtime/teleop/htm/static/assets/svg/ai_training_mode.svg similarity index 100% rename from teleop/htm/static/assets/svg/ai_training_mode.svg rename to jetson_runtime/teleop/htm/static/assets/svg/ai_training_mode.svg diff --git a/teleop/htm/static/assets/svg/autopilot_mode.svg b/jetson_runtime/teleop/htm/static/assets/svg/autopilot_mode.svg similarity index 100% rename from teleop/htm/static/assets/svg/autopilot_mode.svg rename to jetson_runtime/teleop/htm/static/assets/svg/autopilot_mode.svg diff --git a/teleop/htm/static/assets/svg/control_by_phone.svg b/jetson_runtime/teleop/htm/static/assets/svg/control_by_phone.svg similarity index 100% rename from teleop/htm/static/assets/svg/control_by_phone.svg rename to jetson_runtime/teleop/htm/static/assets/svg/control_by_phone.svg diff --git a/teleop/htm/static/assets/svg/controls_settings_menu.svg b/jetson_runtime/teleop/htm/static/assets/svg/controls_settings_menu.svg similarity index 100% rename from teleop/htm/static/assets/svg/controls_settings_menu.svg rename to jetson_runtime/teleop/htm/static/assets/svg/controls_settings_menu.svg diff --git a/teleop/htm/static/assets/svg/follow_mode.svg b/jetson_runtime/teleop/htm/static/assets/svg/follow_mode.svg similarity index 100% rename from teleop/htm/static/assets/svg/follow_mode.svg rename to jetson_runtime/teleop/htm/static/assets/svg/follow_mode.svg diff --git a/teleop/htm/static/assets/svg/manual_mode.svg b/jetson_runtime/teleop/htm/static/assets/svg/manual_mode.svg similarity index 100% rename from teleop/htm/static/assets/svg/manual_mode.svg rename to jetson_runtime/teleop/htm/static/assets/svg/manual_mode.svg diff --git a/teleop/htm/static/assets/svg/map_recognition.svg b/jetson_runtime/teleop/htm/static/assets/svg/map_recognition.svg similarity index 100% rename from teleop/htm/static/assets/svg/map_recognition.svg rename to jetson_runtime/teleop/htm/static/assets/svg/map_recognition.svg diff --git a/teleop/htm/static/assets/svg/training_settings_menu.svg b/jetson_runtime/teleop/htm/static/assets/svg/training_settings_menu.svg similarity index 100% rename from teleop/htm/static/assets/svg/training_settings_menu.svg rename to jetson_runtime/teleop/htm/static/assets/svg/training_settings_menu.svg diff --git a/teleop/htm/static/external/bootstrap-theme.min.css b/jetson_runtime/teleop/htm/static/external/bootstrap-theme.min.css similarity index 100% rename from teleop/htm/static/external/bootstrap-theme.min.css rename to jetson_runtime/teleop/htm/static/external/bootstrap-theme.min.css diff --git a/teleop/htm/static/external/bootstrap.min.css b/jetson_runtime/teleop/htm/static/external/bootstrap.min.css similarity index 100% rename from teleop/htm/static/external/bootstrap.min.css rename to jetson_runtime/teleop/htm/static/external/bootstrap.min.css diff --git a/teleop/htm/static/external/bootstrap.min.js b/jetson_runtime/teleop/htm/static/external/bootstrap.min.js similarity index 100% rename from teleop/htm/static/external/bootstrap.min.js rename to jetson_runtime/teleop/htm/static/external/bootstrap.min.js diff --git a/teleop/htm/static/external/font-awesome.min.css b/jetson_runtime/teleop/htm/static/external/font-awesome.min.css similarity index 100% rename from teleop/htm/static/external/font-awesome.min.css rename to jetson_runtime/teleop/htm/static/external/font-awesome.min.css diff --git a/teleop/htm/static/external/http-live-player.js b/jetson_runtime/teleop/htm/static/external/http-live-player.js similarity index 100% rename from teleop/htm/static/external/http-live-player.js rename to jetson_runtime/teleop/htm/static/external/http-live-player.js diff --git a/teleop/htm/static/external/jquery-1.12.4.min.js b/jetson_runtime/teleop/htm/static/external/jquery-1.12.4.min.js similarity index 100% rename from teleop/htm/static/external/jquery-1.12.4.min.js rename to jetson_runtime/teleop/htm/static/external/jquery-1.12.4.min.js diff --git a/teleop/htm/static/external/jquery-3.4.1.min.js b/jetson_runtime/teleop/htm/static/external/jquery-3.4.1.min.js similarity index 100% rename from teleop/htm/static/external/jquery-3.4.1.min.js rename to jetson_runtime/teleop/htm/static/external/jquery-3.4.1.min.js diff --git a/teleop/htm/static/external/jquery-3.7.1.min.js b/jetson_runtime/teleop/htm/static/external/jquery-3.7.1.min.js similarity index 100% rename from teleop/htm/static/external/jquery-3.7.1.min.js rename to jetson_runtime/teleop/htm/static/external/jquery-3.7.1.min.js diff --git a/teleop/htm/static/external/jquery-ui-1.12.1.min.css b/jetson_runtime/teleop/htm/static/external/jquery-ui-1.12.1.min.css similarity index 100% rename from teleop/htm/static/external/jquery-ui-1.12.1.min.css rename to jetson_runtime/teleop/htm/static/external/jquery-ui-1.12.1.min.css diff --git a/teleop/htm/static/external/jquery-ui-1.12.1.min.js b/jetson_runtime/teleop/htm/static/external/jquery-ui-1.12.1.min.js similarity index 100% rename from teleop/htm/static/external/jquery-ui-1.12.1.min.js rename to jetson_runtime/teleop/htm/static/external/jquery-ui-1.12.1.min.js diff --git a/teleop/htm/static/external/jquery.radioslider-1.0.0_b1.min.js b/jetson_runtime/teleop/htm/static/external/jquery.radioslider-1.0.0_b1.min.js similarity index 100% rename from teleop/htm/static/external/jquery.radioslider-1.0.0_b1.min.js rename to jetson_runtime/teleop/htm/static/external/jquery.radioslider-1.0.0_b1.min.js diff --git a/teleop/htm/static/external/leaflet.awesome-markers.css b/jetson_runtime/teleop/htm/static/external/leaflet.awesome-markers.css similarity index 100% rename from teleop/htm/static/external/leaflet.awesome-markers.css rename to jetson_runtime/teleop/htm/static/external/leaflet.awesome-markers.css diff --git a/teleop/htm/static/external/leaflet.awesome-markers.js b/jetson_runtime/teleop/htm/static/external/leaflet.awesome-markers.js similarity index 100% rename from teleop/htm/static/external/leaflet.awesome-markers.js rename to jetson_runtime/teleop/htm/static/external/leaflet.awesome-markers.js diff --git a/teleop/htm/static/external/leaflet.awesome.rotate.min.css b/jetson_runtime/teleop/htm/static/external/leaflet.awesome.rotate.min.css similarity index 100% rename from teleop/htm/static/external/leaflet.awesome.rotate.min.css rename to jetson_runtime/teleop/htm/static/external/leaflet.awesome.rotate.min.css diff --git a/teleop/htm/static/external/leaflet.css b/jetson_runtime/teleop/htm/static/external/leaflet.css similarity index 100% rename from teleop/htm/static/external/leaflet.css rename to jetson_runtime/teleop/htm/static/external/leaflet.css diff --git a/teleop/htm/static/external/leaflet.js b/jetson_runtime/teleop/htm/static/external/leaflet.js similarity index 100% rename from teleop/htm/static/external/leaflet.js rename to jetson_runtime/teleop/htm/static/external/leaflet.js diff --git a/teleop/htm/static/external/performance-polyfill.js b/jetson_runtime/teleop/htm/static/external/performance-polyfill.js similarity index 100% rename from teleop/htm/static/external/performance-polyfill.js rename to jetson_runtime/teleop/htm/static/external/performance-polyfill.js diff --git a/teleop/htm/static/external/radioslider-1.0.0_b1.min.css b/jetson_runtime/teleop/htm/static/external/radioslider-1.0.0_b1.min.css similarity index 100% rename from teleop/htm/static/external/radioslider-1.0.0_b1.min.css rename to jetson_runtime/teleop/htm/static/external/radioslider-1.0.0_b1.min.css diff --git a/teleop/htm/templates/index.html b/jetson_runtime/teleop/htm/templates/index.html similarity index 100% rename from teleop/htm/templates/index.html rename to jetson_runtime/teleop/htm/templates/index.html diff --git a/teleop/htm/templates/mobile_controller_ui.html b/jetson_runtime/teleop/htm/templates/mobile_controller_ui.html similarity index 100% rename from teleop/htm/templates/mobile_controller_ui.html rename to jetson_runtime/teleop/htm/templates/mobile_controller_ui.html diff --git a/teleop/htm/templates/normal_ui.html b/jetson_runtime/teleop/htm/templates/normal_ui.html similarity index 100% rename from teleop/htm/templates/normal_ui.html rename to jetson_runtime/teleop/htm/templates/normal_ui.html diff --git a/teleop/htm/templates/userMenu/menu_controls.html b/jetson_runtime/teleop/htm/templates/userMenu/menu_controls.html similarity index 100% rename from teleop/htm/templates/userMenu/menu_controls.html rename to jetson_runtime/teleop/htm/templates/userMenu/menu_controls.html diff --git a/teleop/htm/templates/userMenu/menu_logbox.html b/jetson_runtime/teleop/htm/templates/userMenu/menu_logbox.html similarity index 100% rename from teleop/htm/templates/userMenu/menu_logbox.html rename to jetson_runtime/teleop/htm/templates/userMenu/menu_logbox.html diff --git a/teleop/htm/templates/userMenu/menu_settings.html b/jetson_runtime/teleop/htm/templates/userMenu/menu_settings.html similarity index 100% rename from teleop/htm/templates/userMenu/menu_settings.html rename to jetson_runtime/teleop/htm/templates/userMenu/menu_settings.html diff --git a/teleop/logbox/__init__.py b/jetson_runtime/teleop/logbox/__init__.py similarity index 100% rename from teleop/logbox/__init__.py rename to jetson_runtime/teleop/logbox/__init__.py diff --git a/teleop/logbox/app.py b/jetson_runtime/teleop/logbox/app.py similarity index 100% rename from teleop/logbox/app.py rename to jetson_runtime/teleop/logbox/app.py diff --git a/teleop/logbox/core.py b/jetson_runtime/teleop/logbox/core.py similarity index 100% rename from teleop/logbox/core.py rename to jetson_runtime/teleop/logbox/core.py diff --git a/teleop/logbox/store.py b/jetson_runtime/teleop/logbox/store.py similarity index 100% rename from teleop/logbox/store.py rename to jetson_runtime/teleop/logbox/store.py diff --git a/teleop/logbox/web.py b/jetson_runtime/teleop/logbox/web.py similarity index 100% rename from teleop/logbox/web.py rename to jetson_runtime/teleop/logbox/web.py diff --git a/teleop/pytest.ini b/jetson_runtime/teleop/pytest.ini similarity index 100% rename from teleop/pytest.ini rename to jetson_runtime/teleop/pytest.ini diff --git a/pilot/pilot/__init__.py b/jetson_runtime/teleop/teleop/__init__.py similarity index 100% rename from pilot/pilot/__init__.py rename to jetson_runtime/teleop/teleop/__init__.py diff --git a/teleop/teleop/app.py b/jetson_runtime/teleop/teleop/app.py similarity index 100% rename from teleop/teleop/app.py rename to jetson_runtime/teleop/teleop/app.py diff --git a/teleop/teleop/server.py b/jetson_runtime/teleop/teleop/server.py similarity index 100% rename from teleop/teleop/server.py rename to jetson_runtime/teleop/teleop/server.py diff --git a/teleop/teleop/tel_utils.py b/jetson_runtime/teleop/teleop/tel_utils.py similarity index 100% rename from teleop/teleop/tel_utils.py rename to jetson_runtime/teleop/teleop/tel_utils.py diff --git a/teleop/teleop/tests.py b/jetson_runtime/teleop/teleop/tests.py similarity index 100% rename from teleop/teleop/tests.py rename to jetson_runtime/teleop/teleop/tests.py diff --git a/vehicles/carla09/Dockerfile b/jetson_runtime/vehicles/carla09/Dockerfile similarity index 100% rename from vehicles/carla09/Dockerfile rename to jetson_runtime/vehicles/carla09/Dockerfile diff --git a/vehicles/carla09/app.py b/jetson_runtime/vehicles/carla09/app.py similarity index 100% rename from vehicles/carla09/app.py rename to jetson_runtime/vehicles/carla09/app.py diff --git a/vehicles/carla09/config.template b/jetson_runtime/vehicles/carla09/config.template similarity index 100% rename from vehicles/carla09/config.template rename to jetson_runtime/vehicles/carla09/config.template diff --git a/vehicles/carla09/vehicle.py b/jetson_runtime/vehicles/carla09/vehicle.py similarity index 100% rename from vehicles/carla09/vehicle.py rename to jetson_runtime/vehicles/carla09/vehicle.py diff --git a/vehicles/carla09/video.py b/jetson_runtime/vehicles/carla09/video.py similarity index 100% rename from vehicles/carla09/video.py rename to jetson_runtime/vehicles/carla09/video.py diff --git a/vehicles/rover/Dockerfile b/jetson_runtime/vehicles/rover/Dockerfile similarity index 100% rename from vehicles/rover/Dockerfile rename to jetson_runtime/vehicles/rover/Dockerfile diff --git a/vehicles/rover/app.py b/jetson_runtime/vehicles/rover/app.py similarity index 100% rename from vehicles/rover/app.py rename to jetson_runtime/vehicles/rover/app.py diff --git a/vehicles/rover/config.template b/jetson_runtime/vehicles/rover/config.template similarity index 100% rename from vehicles/rover/config.template rename to jetson_runtime/vehicles/rover/config.template diff --git a/vehicles/rover/core.py b/jetson_runtime/vehicles/rover/core.py similarity index 100% rename from vehicles/rover/core.py rename to jetson_runtime/vehicles/rover/core.py diff --git a/vehicles/rover/pytest.ini b/jetson_runtime/vehicles/rover/pytest.ini similarity index 100% rename from vehicles/rover/pytest.ini rename to jetson_runtime/vehicles/rover/pytest.ini diff --git a/vehicles/rover/tests_rover.py b/jetson_runtime/vehicles/rover/tests_rover.py similarity index 100% rename from vehicles/rover/tests_rover.py rename to jetson_runtime/vehicles/rover/tests_rover.py diff --git a/pi4_runtime/README.md b/pi4_runtime/README.md new file mode 100644 index 00000000..af13aa00 --- /dev/null +++ b/pi4_runtime/README.md @@ -0,0 +1,36 @@ +# Pi4 Runtime + +## Overview + +The **Raspberry Pi 4B Runtime** manages essential control and communication tasks for the **BYODR** project on a low level, including video streaming, and motor control. + +## Docker Services + +The Raspberry Pi runs the following Docker services: + +### 1. Servos + +- **Input**: Receives movement commands in JSON format from `Pilot`. +- **Function**: Controls the motor controllers connected via `ttyACM0` and `ttyACM1` serial ports. +- **Output**: Sends commands to the motor controllers for movement. +- **Files**: Configuration and server scripts are in `servos/`. +- with every command sent from `Pilot`, it will broadcast a message about the current speed of the vehicle + +### 2. Stream0 + +- **Input**: Receives video stream from the AI camera (`camera0`) at `192.168.1.64`. +- **Function**: Encodes the video stream into H264 format. +- **Output**: Sends the stream via RTSP to the `Teleop` service on the Jetson Nano. +- **Why Not Direct?**: The Pi preprocesses the video stream to reduce the load on the Nano. + +### 3. Stream1 + +- **Input**: Receives video stream from the Operator camera (`camera1`) at `192.168.1.65`. +- **Function**: Encodes the video stream into H264 format. +- **Output**: Sends the stream via RTSP to the `Teleop` service on the Jetson Nano. +- **Files**: Stream configurations are located in `stream/camera.template`. + +### 4. Zerotier + +- **Function**: Adds the Raspberry Pi to a secure P2P network for remote access. +- **Output**: Establishes a secure connection with other segments and the user. diff --git a/raspi/docker-compose.yml b/pi4_runtime/docker-compose.yml similarity index 73% rename from raspi/docker-compose.yml rename to pi4_runtime/docker-compose.yml index 9b6b6036..9e401e3b 100644 --- a/raspi/docker-compose.yml +++ b/pi4_runtime/docker-compose.yml @@ -3,15 +3,6 @@ volumes: volume_zerotier_config: volume_local_config: services: - # TODO: remove pigpiod service - # pigpiod: - # image: centipede2donald/raspbian-stretch:pigpio-zmq-byodr-0.25.0 - # container_name: pigpiod - # privileged: true - # user: root - # restart: always - # network_mode: 'host' - # command: bash -c "/bin/rm -rf /var/run/pigpio.pid && /pigpio/pigpiod -gl" zerotier: image: zyclonite/zerotier:1.6.6 container_name: zerotier-one @@ -29,7 +20,7 @@ services: servos: build: context: . - dockerfile: ras/Dockerfile + dockerfile: servos/Dockerfile privileged: true labels: io.balena.features.kernel-modules: '1' @@ -41,7 +32,7 @@ services: stream0: build: context: . - dockerfile: pi_gstreamer.dockerfile + dockerfile: stream/Dockerfile privileged: true user: root restart: always @@ -53,7 +44,7 @@ services: stream1: build: context: . - dockerfile: pi_gstreamer.dockerfile + dockerfile: stream/Dockerfile privileged: true user: root restart: always diff --git a/raspi/ras/Dockerfile b/pi4_runtime/servos/Dockerfile similarity index 100% rename from raspi/ras/Dockerfile rename to pi4_runtime/servos/Dockerfile diff --git a/raspi/ras/__init__.py b/pi4_runtime/servos/__init__.py similarity index 100% rename from raspi/ras/__init__.py rename to pi4_runtime/servos/__init__.py diff --git a/raspi/ras/servos.py b/pi4_runtime/servos/app.py similarity index 99% rename from raspi/ras/servos.py rename to pi4_runtime/servos/app.py index b1dfd415..034b0a9c 100644 --- a/raspi/ras/servos.py +++ b/pi4_runtime/servos/app.py @@ -20,7 +20,7 @@ from BYODR_utils.common.usbrelay import SearchUsbRelayFactory from BYODR_utils.PI_specific.gpio_relay import ThreadSafePi4GpioRelay from BYODR_utils.PI_specific.utilities import RaspberryPi -from ras.core import CommandHistory, HallOdometer, VESCDrive +from servos.core import CommandHistory, HallOdometer, VESCDrive logger = logging.getLogger(__name__) log_format = "%(levelname)s: %(asctime)s %(filename)s:%(lineno)d %(funcName)s %(threadName)s %(message)s" diff --git a/raspi/ras/core.py b/pi4_runtime/servos/core.py similarity index 100% rename from raspi/ras/core.py rename to pi4_runtime/servos/core.py diff --git a/raspi/ras/driver.template b/pi4_runtime/servos/driver.template similarity index 100% rename from raspi/ras/driver.template rename to pi4_runtime/servos/driver.template diff --git a/raspi/pi_gstreamer.dockerfile b/pi4_runtime/stream/Dockerfile similarity index 100% rename from raspi/pi_gstreamer.dockerfile rename to pi4_runtime/stream/Dockerfile diff --git a/raspi/stream/__init__.py b/pi4_runtime/stream/__init__.py similarity index 100% rename from raspi/stream/__init__.py rename to pi4_runtime/stream/__init__.py diff --git a/raspi/stream/camera.py b/pi4_runtime/stream/camera.py similarity index 100% rename from raspi/stream/camera.py rename to pi4_runtime/stream/camera.py diff --git a/raspi/stream/camera.template b/pi4_runtime/stream/camera.template similarity index 100% rename from raspi/stream/camera.template rename to pi4_runtime/stream/camera.template diff --git a/raspi/BYODR_utils/common/location.py b/raspi/BYODR_utils/common/location.py deleted file mode 100644 index 953cdbee..00000000 --- a/raspi/BYODR_utils/common/location.py +++ /dev/null @@ -1,106 +0,0 @@ -import collections - -import cachetools -from geographiclib.geodesic import Geodesic - - -def _distance_bearing(from_position, to_position): - c_latitude, c_longitude = to_position - p_latitude, p_longitude = from_position - # noinspection PyUnresolvedReferences - _g = Geodesic.WGS84.Inverse(p_latitude, p_longitude, c_latitude, c_longitude) - # Distance in meters. - _distance = _g["s12"] - # The azimuth is the heading measured clockwise from north. - # azi2 is the "forward" azimuth, i.e., the heading that takes you beyond point 2 not back to point 1. - _bearing = _g["azi2"] - return _distance, _bearing - - -class GeoTracker(object): - """ - A class for tracking geographical positions with the ability to calculate bearing between positions - based on a minimum distance criterion. - - Attributes: - _min_distance (float): Minimum distance in meters required to consider positions distinct for bearing calculations. - _positions (collections.deque): A deque to store the sequence of recent geographic positions. - _cache (cachetools.TTLCache): Cache for storing recent position calculations to reduce computation. - """ - - def __init__(self, cache_ttl=10.0, min_distance_meters=0.10): - self._min_distance = min_distance_meters - self._positions = collections.deque(maxlen=8) - self._cache = cachetools.TTLCache(maxsize=100, ttl=cache_ttl) - - def _begin(self, current): - """ - Handles the initialization or reset of tracking when there are no or insufficient previous positions. - - Parameters: - current (tuple or None): The current geographic position as a tuple (latitude, longitude) or None. - - Returns: - tuple: The current or last known latitude, longitude, and None for bearing (since bearing cannot be calculated). - """ - n_positions = len(self._positions) - if n_positions == 0: - if current is None: - return None, None, None - else: - self._positions.append(current) - return current[0], current[1], None - if current is None: - current = self._positions[-1] - return current[0], current[1], None - else: - distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) - if distance >= self._min_distance: - self._positions.append(current) - return current[0], current[1], None - - def _track(self, current): - """ - Tracks the current position and calculates the bearing if possible based on the stored positions. - - Parameters: - current (tuple or None): The current geographic position as a tuple (latitude, longitude) or None. - - Returns: - tuple: Latitude, longitude, and bearing (if calculable); otherwise, None for the bearing. - """ - n_positions = len(self._positions) - if n_positions < 2: - return self._begin(current) - if current is None: - current = self._positions[-1] - distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) - return current[0], current[1], bearing - else: - distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) - if distance >= self._min_distance: - self._positions.append(current) - return current[0], current[1], bearing - - def clear(self): - """ - Clears the stored positions and cache, resetting the tracker to an initial state. - """ - self._positions.clear() - - def track(self, position): - """ - Retrieves or calculates the latitude, longitude, and bearing of a given position, utilizing caching to optimize. - - Parameters: - position (tuple): The current geographic position as a tuple (latitude, longitude). - - Returns: - tuple: Latitude, longitude, and bearing (if calculable based on movement and distance); otherwise, None for the bearing. - """ - _key = position - res = self._cache.get(_key) - if res is None: - res = self._track(position) - self._cache[_key] = res - return res diff --git a/raspi/BYODR_utils/common/option.py b/raspi/BYODR_utils/common/option.py deleted file mode 100644 index 166f07fc..00000000 --- a/raspi/BYODR_utils/common/option.py +++ /dev/null @@ -1,63 +0,0 @@ -class PropertyError(ValueError): - def __init__(self, key, msg, suggestions=None): - if suggestions is None: - suggestions = list() - self.key = key - self.message = msg - self.suggestions = suggestions - - def __str__(self): - return "{} - {}".format(self.key, self.message) - - -def str_to_bool(value): - if value.lower() in ("true", "1", "t", "y", "yes"): - return True - elif value.lower() in ("false", "0", "f", "n", "no"): - return False - else: - raise ValueError(f"Cannot convert {value} to a boolean.") - - -def _parse(key, fn_type=(lambda x: x), **kwargs): - try: - return fn_type(kwargs[key]) - except (ValueError, TypeError) as e: - raise PropertyError(key, str(e)) - - -def parse_option(key, fn_type=(lambda x: x), default_value=None, errors=None, **kwargs): - """ - Attempts to parse an option from the given keyword arguments based on the specified key. - - If the key is missing and a default value is provided, the default value is used instead. - Parameters: - - key (str): The key to look for in the keyword arguments. - - fn_type (callable, optional): A function to apply to the value of the found key. Defaults to a no-op lambda that returns the value unchanged. - - default_value (any, optional): The default value to use if the key is not found in the keyword arguments. Defaults to None. - - errors (list, optional): A list to which any encountered PropertyErrors will be appended. If None, a new list is created. Defaults to None. - - **kwargs: Additional keyword arguments among which the function will look for the specified key. - - Returns: - - The value associated with 'key' in the keyword arguments after applying 'fn_type', the default value if the key is missing, or raises a KeyError if the key is missing and no default value is provided. - - Raises: - - KeyError: If the key is not found in the keyword arguments and no default value is provided. - - PropertyError: If there is a ValueError or TypeError when applying 'fn_type' to the value associated with 'key'. - """ - errors = [] if errors is None else errors - try: - if fn_type is bool: - # Use custom boolean parser - return str_to_bool(kwargs[key]) - else: - return _parse(key, fn_type=fn_type, **kwargs) - except KeyError: - if default_value is None: - errors.append(PropertyError(key, "The key is missing and no default value has been set")) - else: - return fn_type(default_value) - - -def hash_dict(**m): - return hash("".join(str(k) + str(m.get(k)) for k in sorted(m.keys()))) diff --git a/raspi/BYODR_utils/common/testing.py b/raspi/BYODR_utils/common/testing.py deleted file mode 100644 index 88e003b0..00000000 --- a/raspi/BYODR_utils/common/testing.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import absolute_import -import collections -from six.moves import map - - -class QueueReceiver(object): - def __init__(self, queue_max_size=100): - """ - A drop-in replacement for ipc ReceiverThread. - :param queue_max_size: Max length of the queue. - """ - self._queue = collections.deque(maxlen=queue_max_size) - self._listeners = [] - self._started = False - - def start(self): - self._started = True - - def is_started(self): - return self._started - - def add_listener(self, c): - self._listeners.append(c) - - def add(self, m): - self._queue.appendleft(m) - list(map(lambda x: x(m), self._listeners)) - - def get_latest(self): - return self._queue[0] if bool(self._queue) else None - - def pop_latest(self): - return self._queue.popleft() if bool(self._queue) else None - - def clear(self): - self._queue.clear() - - def quit(self): - self.clear() - self._listeners = [] - self._started = False - - -class QueueCamera(object): - def __init__(self, queue_max_size=100): - """ - A drop-in replacement for ipc CameraThread. - :param queue_max_size: Max length of the queue. - """ - self._queue = collections.deque(maxlen=queue_max_size) - self._started = False - - def start(self): - self._started = True - - def is_started(self): - return self._started - - def add(self, meta_data, image): - self._queue.appendleft((meta_data, image)) - - def capture(self): - return self._queue[0] if bool(self._queue) else (None, None) - - def clear(self): - self._queue.clear() - - -class CollectPublisher(object): - def __init__(self, topic=""): - """ - A drop-in replacement for ipc JSONPublisher. - :param topic: The default topic. - """ - self._topic = topic - self._map = dict() - - def publish(self, data, topic=None): - _topic = self._topic if topic is None else topic - if _topic not in self._map: - self._map[_topic] = list() - self._map[_topic].append(data) - - def collect(self, topic=None): - _topic = self._topic if topic is None else topic - return self._map.get(_topic) - - def get_latest(self, topic=None): - return self.collect(topic=topic)[-1] - - def clear(self): - self._map.clear() - - -class CollectServer(object): - def __init__(self): - """ - A drop-in replacement for ipc LocalIPCServer. - """ - self._errors = [] - self._capabilities = [] - - def register_start(self, errors, capabilities=None): - capabilities = {} if capabilities is None else capabilities - self._errors.append(errors) - self._capabilities.append(capabilities) - - def collect(self): - return self._errors - - def get_latest(self): - return self._errors[-1] - - def clear(self): - self._errors = [] - - -class CollectJSONClient(object): - """ - A drop-in replacement for ipc JSONZmqClient. - """ - - def __init__(self): - self._list = [] - - def call(self, message, ret=None): - self._list.append(message) - return ret - - def collect(self): - return self._list - - def get_latest(self): - return self._list[-1] - - def clear(self): - self._list = [] - - def quit(self): - self.clear() diff --git a/raspi/BYODR_utils/common/video.py b/raspi/BYODR_utils/common/video.py deleted file mode 100644 index e1b4116b..00000000 --- a/raspi/BYODR_utils/common/video.py +++ /dev/null @@ -1,121 +0,0 @@ -from __future__ import absolute_import - -import collections -import logging -import threading -import time - -import gi -import numpy as np - -gi.require_version("Gst", "1.0") -from gi.repository import Gst - -Gst.init(None) - -logger = logging.getLogger(__name__) - - -class RawGstSource(object): - def __init__(self, name="app", boot_time_seconds=20, command="videotestsrc ! decodebin ! videoconvert ! appsink"): - assert "appsink" in command, "Need the appsink present in the gst command." - self.name = name - self.boot_time_seconds = boot_time_seconds - self.command = command.replace("appsink", "appsink name=sink emit-signals=true sync=false async=false max-buffers=1 drop=true") - self._listeners = collections.deque() - self._listeners_lock = threading.Lock() - self._sample_time = None - self.closed = True - self.video_pipe = None - - def _setup(self): - self.video_pipe = Gst.parse_launch(self.command) - self.closed = True - - # noinspection PyUnusedLocal - def _eos(self, bus, msg): - logger.info(msg) - self.close() - - # noinspection PyUnusedLocal - def _error(self, bus, msg): - logger.error(msg) - self.close() - - def _sample(self, sink): - buffer = sink.emit("pull-sample").get_buffer() - array = self.convert_buffer(buffer.extract_dup(0, buffer.get_size())) - with self._listeners_lock: - for listen in self._listeners: - listen(array) - self._sample_time = time.time() - return Gst.FlowReturn.OK - - def convert_buffer(self, buffer): - return buffer - - def add_listener(self, listener): - with self._listeners_lock: - self._listeners.append(listener) - - def remove_listener(self, listener): - with self._listeners_lock: - self._listeners.remove(listener) - - def open(self): - self._setup() - self.video_pipe.set_state(Gst.State.PLAYING) - video_sink = self.video_pipe.get_by_name("sink") - video_sink.connect("new-sample", self._sample) - bus = self.video_pipe.get_bus() - bus.add_signal_watch() - bus.connect("message::eos", self._eos) - bus.connect("message::error", self._error) - self.closed = False - self._sample_time = time.time() + self.boot_time_seconds - logger.info("Source {} opened.".format(self.name)) - - def is_healthy(self, patience): - return self._sample_time and time.time() - self._sample_time < patience - - def is_closed(self): - return self.closed - - def is_open(self): - return not self.is_closed() - - def check(self, patience=0.50): - if self.is_open() and not self.is_healthy(patience=patience): - self.close() - if self.is_closed(): - self.open() - - def close(self): - if self.video_pipe is not None: - self.video_pipe.set_state(Gst.State.NULL) - self.closed = True - logger.info("Source {} closed.".format(self.name)) - - -class GstStreamSource(RawGstSource): - def __init__(self, name, shape, command, fn_convert=(lambda x: x)): - super(GstStreamSource, self).__init__(name=name, command=command) - self._shape = shape - self._fn_convert = fn_convert - - def get_width(self): - return self._shape[1] - - def get_height(self): - return self._shape[0] - - def convert_buffer(self, buffer): - return self._fn_convert(buffer) - - -def create_image_source(name, shape, command): - return GstStreamSource(name, shape, command, fn_convert=(lambda buffer: np.fromstring(buffer, dtype=np.uint8).reshape(shape))) - - -def create_video_source(name, shape, command): - return GstStreamSource(name, shape, command) diff --git a/raspi/BYODR_utils/common/websocket.py b/raspi/BYODR_utils/common/websocket.py deleted file mode 100644 index 068ed145..00000000 --- a/raspi/BYODR_utils/common/websocket.py +++ /dev/null @@ -1,99 +0,0 @@ -from __future__ import absolute_import - -import json -import logging -import threading -import traceback - -import gi -from tornado import websocket - -gi.require_version("Gst", "1.0") -from gi.repository import Gst - -Gst.init(None) - -logger = logging.getLogger(__name__) - - -class HttpLivePlayerVideoSocket(websocket.WebSocketHandler): - def __init__(self, application, request, **kwargs): - super(HttpLivePlayerVideoSocket, self).__init__(application, request, **kwargs) - self._lock = threading.Lock() - self._streaming = False - - # noinspection PyAttributeOutsideInit - def initialize(self, **kwargs): - self._video = kwargs.get("video_source") - self._io_loop = kwargs.get("io_loop") - - def _push(self, _bytes): - with self._lock: - if self._streaming: - try: - self.write_message(_bytes, binary=True) - except websocket.WebSocketClosedError: - pass - - def _client(self, _bytes): - self._io_loop.add_callback(lambda: self._push(_bytes)) - - # noinspection PyUnusedLocal - @staticmethod - def check_origin(origin): - return True - - def data_received(self, chunk): - pass - - # noinspection PyUnusedLocal - def open(self, *args, **kwargs): - self._video.add_listener(self._client) - self.write_message(json.dumps(dict(action="init", width=self._video.get_width(), height=self._video.get_height()))) - - def on_close(self): - self._video.remove_listener(self._client) - - def on_message(self, message): - try: - with self._lock: - self._streaming = "REQUESTSTREAM" in message - logger.info("On message - streaming = {}.".format(self._streaming)) - except Exception as e: - logger.error("Stream socket@on_message: {} {}".format(e, traceback.format_exc())) - logger.error("Input message:---\n{}\n---".format(message)) - - -class JMuxerVideoStreamSocket(websocket.WebSocketHandler): - # noinspection PyAttributeOutsideInit - def initialize(self, **kwargs): - self._video = kwargs.get("video_source") - self._io_loop = kwargs.get("io_loop") - - def _push(self, _bytes): - try: - self.write_message(_bytes, binary=True) - except websocket.WebSocketClosedError: - pass - - def _client(self, _bytes): - self._io_loop.add_callback(lambda: self._push(_bytes)) - - # noinspection PyUnusedLocal - @staticmethod - def check_origin(origin): - return True - - def data_received(self, chunk): - pass - - # noinspection PyUnusedLocal - def open(self, *args, **kwargs): - self._video.add_listener(self._client) - - def on_close(self): - self._video.remove_listener(self._client) - - @staticmethod - def on_message(message): - logger.info("Unexpected message '{}' received.".format(message)) diff --git a/teleop/teleop/__init__.py b/teleop/teleop/__init__.py deleted file mode 100644 index e69de29b..00000000 From 1f826e3c7ed0c01fbbbd0ae4921e8c0ce57af161 Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 13:32:23 +0200 Subject: [PATCH 4/9] refactor change file move in all docker images to adopt with new files --- .../docker}/runtime-cp36-x86.dockerfile | 0 .../teleop/teleop => archived}/tests.py | 2 +- .../rover => archived}/tests_rover.py | 0 jetson_runtime/docker-compose.yml | 12 ----------- jetson_runtime/following/Dockerfile | 20 ++++++++----------- jetson_runtime/httpd/Dockerfile | 4 ++-- .../inference/runtime-cp36-jp441.dockerfile | 15 ++++++++------ jetson_runtime/mongodb/Dockerfile | 4 ++-- jetson_runtime/pilot/Dockerfile | 11 ++++++---- jetson_runtime/teleop/Dockerfile | 18 ++++++++++++----- jetson_runtime/vehicles/rover/Dockerfile | 14 +++++++++---- pi4_runtime/servos/Dockerfile | 4 ++-- pi4_runtime/stream/Dockerfile | 11 +++++++--- 13 files changed, 62 insertions(+), 53 deletions(-) rename {jetson_runtime/inference => archived/docker}/runtime-cp36-x86.dockerfile (100%) rename {jetson_runtime/teleop/teleop => archived}/tests.py (92%) rename {jetson_runtime/vehicles/rover => archived}/tests_rover.py (100%) diff --git a/jetson_runtime/inference/runtime-cp36-x86.dockerfile b/archived/docker/runtime-cp36-x86.dockerfile similarity index 100% rename from jetson_runtime/inference/runtime-cp36-x86.dockerfile rename to archived/docker/runtime-cp36-x86.dockerfile diff --git a/jetson_runtime/teleop/teleop/tests.py b/archived/tests.py similarity index 92% rename from jetson_runtime/teleop/teleop/tests.py rename to archived/tests.py index 7d443c71..e3eaf5b5 100644 --- a/jetson_runtime/teleop/teleop/tests.py +++ b/archived/tests.py @@ -2,7 +2,7 @@ import multiprocessing from six.moves.configparser import SafeConfigParser -from .app import TeleopApplication +from ..jetson_runtime.teleop.teleop.app import TeleopApplication from io import open diff --git a/jetson_runtime/vehicles/rover/tests_rover.py b/archived/tests_rover.py similarity index 100% rename from jetson_runtime/vehicles/rover/tests_rover.py rename to archived/tests_rover.py diff --git a/jetson_runtime/docker-compose.yml b/jetson_runtime/docker-compose.yml index 0377c775..335881cd 100644 --- a/jetson_runtime/docker-compose.yml +++ b/jetson_runtime/docker-compose.yml @@ -61,18 +61,6 @@ services: volumes: - volume_ftpd_config:/etc/pureftpd:rw - volume_byodr_sessions:/home/ftpuser:rw - rosnode: - cpuset: '0' - build: - context: . - dockerfile: rosnode/Dockerfile - restart: always - command: ['python3', 'app.py', '--name', 'rosnode'] - network_mode: host - stop_signal: SIGKILL - volumes: - - volume_byodr_sockets:/byodr:rw - - volume_byodr_config:/config:ro mongodb: cpuset: '0' build: diff --git a/jetson_runtime/following/Dockerfile b/jetson_runtime/following/Dockerfile index 194482fc..61f525d8 100644 --- a/jetson_runtime/following/Dockerfile +++ b/jetson_runtime/following/Dockerfile @@ -23,18 +23,14 @@ RUN \ RUN pip3 install --upgrade pyzmq lap simple-pid -# Copy your application files -COPY ./common/ /common/ -COPY ./following/ /app/ -WORKDIR /app - -ENV PYTHONPATH "/app:/common:${PYTHONPATH}" - -# Should use this one in the future. to keep the imports also desciverable by the local env. Will need to add `common.` at the beginning of each byodr import -# COPY ./common/ /app/common/ -# COPY ./following/ /app/following/ -# WORKDIR /app/following -# ENV PYTHONPATH "/app:${PYTHONPATH}" +# Copy application files +COPY ./BYODR_utils/common/ /app/BYODR_utils/common/ +COPY ./BYODR_utils/JETSON_specific/ /app/BYODR_utils/JETSON_specific/ + +COPY ./following /app/following +ENV PYTHONPATH "/app:${PYTHONPATH}" + +WORKDIR /app/following # Command to run your application diff --git a/jetson_runtime/httpd/Dockerfile b/jetson_runtime/httpd/Dockerfile index f4884be8..9e384d48 100644 --- a/jetson_runtime/httpd/Dockerfile +++ b/jetson_runtime/httpd/Dockerfile @@ -13,7 +13,7 @@ RUN npm install -g mapport #The entry point for the Docker container is set to run haproxy with a configuration file /app/haproxy.conf. #This means that when the container starts, it will initiate HAProxy with the given configuration in haproxy.conf. -COPY ./httpd app/ -WORKDIR /app +COPY ./httpd app/httpd +WORKDIR /app/httpd CMD ["/usr/sbin/haproxy", "-f", "/app/haproxy.conf"] \ No newline at end of file diff --git a/jetson_runtime/inference/runtime-cp36-jp441.dockerfile b/jetson_runtime/inference/runtime-cp36-jp441.dockerfile index 4b54fe08..9f4b2a67 100644 --- a/jetson_runtime/inference/runtime-cp36-jp441.dockerfile +++ b/jetson_runtime/inference/runtime-cp36-jp441.dockerfile @@ -1,10 +1,13 @@ FROM centipede2donald/nvidia-jetson:jp441-nano-cp36-oxrt-3 -COPY ./common common/ -COPY ./inference app/ -WORKDIR /app +# Copy application files +COPY ./BYODR_utils/common/ /app/BYODR_utils/common/ +COPY ./BYODR_utils/JETSON_specific/ /app/BYODR_utils/JETSON_specific/ -COPY ./build/*.onnx /models/ -COPY ./build/*.ini /models/ +COPY ./build/ /app/models/ + +COPY ./inference /app/inference +ENV PYTHONPATH "/app:${PYTHONPATH}" + +WORKDIR /app/inference -ENV PYTHONPATH "${PYTHONPATH}:/common" diff --git a/jetson_runtime/mongodb/Dockerfile b/jetson_runtime/mongodb/Dockerfile index c132f743..1de9c92c 100644 --- a/jetson_runtime/mongodb/Dockerfile +++ b/jetson_runtime/mongodb/Dockerfile @@ -25,8 +25,8 @@ ENV MONGO_INITDB_ROOT_PASSWORD=robot EXPOSE 27017 # Copy the Python script and any other necessary files into the container -COPY ./mongodb /app/ -WORKDIR /app +COPY ./mongodb /app/mongodb +WORKDIR /app/mongodb # Set the entrypoint to run the Python script that manages MongoDB diff --git a/jetson_runtime/pilot/Dockerfile b/jetson_runtime/pilot/Dockerfile index 480ba419..8768c1eb 100644 --- a/jetson_runtime/pilot/Dockerfile +++ b/jetson_runtime/pilot/Dockerfile @@ -4,10 +4,13 @@ RUN pip3 install simple-pid Jetson.GPIO RUN pip3 install "pyusb==1.0.2" RUN pip3 install "tornado==6.1" -COPY ./common common/ -COPY ./pilot app/ -WORKDIR /app +# Copy application files +COPY ./BYODR_utils/common/ /app/BYODR_utils/common/ +COPY ./BYODR_utils/JETSON_specific/ /app/BYODR_utils/JETSON_specific/ -ENV PYTHONPATH "${PYTHONPATH}:/common" +COPY ./pilot app/pilot +ENV PYTHONPATH "/app:${PYTHONPATH}" + +WORKDIR /app/pilot CMD ["python3", "app.py"] \ No newline at end of file diff --git a/jetson_runtime/teleop/Dockerfile b/jetson_runtime/teleop/Dockerfile index e2eb114e..08280760 100644 --- a/jetson_runtime/teleop/Dockerfile +++ b/jetson_runtime/teleop/Dockerfile @@ -19,10 +19,18 @@ RUN pip3 install pymongo tornado folium Flask flask_socketio paramiko user-agent # from cryptography.x509 import load_der_x509_certificate as _load_der_x509_certificate ENV PYTHONWARNINGS "ignore::UserWarning" -COPY ./common common/ -COPY ./teleop app/ -WORKDIR /app - EXPOSE 8080 5000 -ENV PYTHONPATH "${PYTHONPATH}:/common" + +# Copy application files +COPY ./BYODR_utils/common/ /app/BYODR_utils/common/ +COPY ./BYODR_utils/JETSON_specific/ /app/BYODR_utils/JETSON_specific/ + +COPY ./teleop /app/teleop +ENV PYTHONPATH "/app:${PYTHONPATH}" + +WORKDIR /app/teleop + + +# # Command to run your application +# CMD ["python3", "app.py"] diff --git a/jetson_runtime/vehicles/rover/Dockerfile b/jetson_runtime/vehicles/rover/Dockerfile index 741606f4..3d934c21 100644 --- a/jetson_runtime/vehicles/rover/Dockerfile +++ b/jetson_runtime/vehicles/rover/Dockerfile @@ -4,8 +4,14 @@ FROM mwlvdev/jetson-nano-ubuntu:focal-cp310-GST RUN pip3 install --upgrade pip && \ pip3 install pysnmp==4.4.12 pyasn1==0.4.8 -COPY ./common common/ -COPY ./vehicles/rover app/ -WORKDIR /app -ENV PYTHONPATH "${PYTHONPATH}:/common" + +COPY ./BYODR_utils/common/ /app/BYODR_utils/common/ +COPY ./BYODR_utils/JETSON_specific/ /app/BYODR_utils/JETSON_specific/ + +COPY ./vehicles/rover app/vehicles/rover +ENV PYTHONPATH "/app:${PYTHONPATH}" + +WORKDIR /app/vehicles/rover + + CMD ["python3.10", "app.py"] \ No newline at end of file diff --git a/pi4_runtime/servos/Dockerfile b/pi4_runtime/servos/Dockerfile index 1c0cfe4c..b8df0beb 100644 --- a/pi4_runtime/servos/Dockerfile +++ b/pi4_runtime/servos/Dockerfile @@ -28,9 +28,9 @@ RUN git clone https://github.com/LiamBindle/PyVESC.git \ COPY ./BYODR_utils/common/ /app/BYODR_utils/common/ COPY ./BYODR_utils/PI_specific/ /app/BYODR_utils/PI_specific/ -COPY ./ras /app/ras +COPY ./servos /app/servos ENV PYTHONPATH "/app:${PYTHONPATH}" -WORKDIR /app/ras +WORKDIR /app/servos CMD ["python", "servos.py"] \ No newline at end of file diff --git a/pi4_runtime/stream/Dockerfile b/pi4_runtime/stream/Dockerfile index c999a424..495fb472 100644 --- a/pi4_runtime/stream/Dockerfile +++ b/pi4_runtime/stream/Dockerfile @@ -1,9 +1,14 @@ FROM centipede2donald/raspbian-stretch:gst-omx-rpi-0.50.2 -COPY ./ app/ -WORKDIR /app +# Copy application files +COPY ./BYODR_utils/common/ /app/BYODR_utils/common/ +COPY ./BYODR_utils/PI_specific/ /app/BYODR_utils/PI_specific/ + +COPY ./stream /app/stream +ENV PYTHONPATH "/app:${PYTHONPATH}" + +WORKDIR /app/stream -ENV PYTHONPATH "${PYTHONPATH}:/common" CMD ["sleep", "infinity"] \ No newline at end of file From f12a2c842a312b7ed36a94d17e8bfc864d2fb2ce Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 14:32:18 +0200 Subject: [PATCH 5/9] refactor archived/PIL}/tests.py | 6 +- .../pilot => archived/PIL}/tests_relay.py | 6 +- archived/exr/app.py | 4 +- archived/odometry.py | 4 +- archived/tests_rover.py | 2 +- .../BYODR_utils/JETSON_specific/gpio_relay.py | 41 ++ .../BYODR_utils/JETSON_specific/utilities.py | 20 + .../BYODR_utils/PI_specific/gpio_relay.py | 48 +++ .../BYODR_utils/PI_specific/utilities.py | 20 + jetson_runtime/BYODR_utils/common/__init__.py | 200 ++++++++++ jetson_runtime/BYODR_utils/common/ipc.py | 309 +++++++++++++++ jetson_runtime/BYODR_utils/common/location.py | 106 +++++ jetson_runtime/BYODR_utils/common/navigate.py | 374 ++++++++++++++++++ jetson_runtime/BYODR_utils/common/option.py | 63 +++ jetson_runtime/BYODR_utils/common/protocol.py | 59 +++ jetson_runtime/BYODR_utils/common/ssh.py | 128 ++++++ jetson_runtime/BYODR_utils/common/testing.py | 140 +++++++ jetson_runtime/BYODR_utils/common/usbrelay.py | 252 ++++++++++++ jetson_runtime/BYODR_utils/common/video.py | 121 ++++++ .../BYODR_utils/common/websocket.py | 99 +++++ jetson_runtime/docker-compose.yml | 1 - jetson_runtime/following/app.py | 4 +- jetson_runtime/following/fol_utils.py | 4 +- jetson_runtime/inference/inference/app.py | 10 +- jetson_runtime/inference/inference/image.py | 2 +- jetson_runtime/inference/inference/tests.py | 2 +- .../inference/runtime-cp36-jp441.dockerfile | 2 + jetson_runtime/pilot/pilot/app.py | 15 +- jetson_runtime/pilot/pilot/core.py | 6 +- jetson_runtime/pilot/pilot/relay.py | 8 +- jetson_runtime/rosnode/app.py | 4 +- jetson_runtime/teleop/logbox/app.py | 2 +- jetson_runtime/teleop/logbox/core.py | 2 +- jetson_runtime/teleop/teleop/app.py | 15 +- jetson_runtime/teleop/teleop/server.py | 4 +- jetson_runtime/teleop/teleop/tel_utils.py | 2 +- jetson_runtime/vehicles/carla09/app.py | 12 +- jetson_runtime/vehicles/carla09/vehicle.py | 6 +- jetson_runtime/vehicles/carla09/video.py | 4 +- jetson_runtime/vehicles/rover/app.py | 8 +- jetson_runtime/vehicles/rover/core.py | 6 +- pi4_runtime/stream/Dockerfile | 14 +- pi4_runtime/stream/camera.py | 8 +- 45 files changed, 2073 insertions(+), 76 deletions(-) rename {jetson_runtime/pilot/pilot => archived/PIL}/tests.py (92%) rename {jetson_runtime/pilot/pilot => archived/PIL}/tests_relay.py (95%) create mode 100644 jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py create mode 100644 jetson_runtime/BYODR_utils/JETSON_specific/utilities.py create mode 100644 jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py create mode 100644 jetson_runtime/BYODR_utils/PI_specific/utilities.py create mode 100644 jetson_runtime/BYODR_utils/common/__init__.py create mode 100644 jetson_runtime/BYODR_utils/common/ipc.py create mode 100644 jetson_runtime/BYODR_utils/common/location.py create mode 100644 jetson_runtime/BYODR_utils/common/navigate.py create mode 100644 jetson_runtime/BYODR_utils/common/option.py create mode 100644 jetson_runtime/BYODR_utils/common/protocol.py create mode 100644 jetson_runtime/BYODR_utils/common/ssh.py create mode 100644 jetson_runtime/BYODR_utils/common/testing.py create mode 100644 jetson_runtime/BYODR_utils/common/usbrelay.py create mode 100644 jetson_runtime/BYODR_utils/common/video.py create mode 100644 jetson_runtime/BYODR_utils/common/websocket.py diff --git a/.vscode/settings.json b/.vscode/settings.json index 3ac89b99..1560ed24 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,8 @@ { "cSpell.words": [ + "BYODR", "GPIO", + "Jetson", "teleop" ], "python.analysis.typeCheckingMode": "off", diff --git a/BYODR_utils/JETSON_specific/gpio_relay.py b/BYODR_utils/JETSON_specific/gpio_relay.py index f5256603..722a8776 100644 --- a/BYODR_utils/JETSON_specific/gpio_relay.py +++ b/BYODR_utils/JETSON_specific/gpio_relay.py @@ -2,10 +2,10 @@ import threading -import Jetson.GPIO as GPIO +import Jetson.GPIO as GPIO # type: ignore -class ThreadSafeGpioRelay(object): +class ThreadSafeJetsonGpioRelay(object): """ Thread-safe class for managing a GPIO relay on a Jetson Nano. """ diff --git a/jetson_runtime/pilot/pilot/tests.py b/archived/PIL/tests.py similarity index 92% rename from jetson_runtime/pilot/pilot/tests.py rename to archived/PIL/tests.py index 6e85b3e3..5875d05e 100644 --- a/jetson_runtime/pilot/pilot/tests.py +++ b/archived/PIL/tests.py @@ -8,9 +8,9 @@ from app import CommandProcessor from app import PilotApplication -from byodr.utils import timestamp -from byodr.utils.navigate import ReloadableDataSource, FileSystemRouteDataSource -from byodr.utils.testing import CollectPublisher, QueueReceiver, CollectServer +from BYODR_utils.common import timestamp +from BYODR_utils.common.navigate import ReloadableDataSource, FileSystemRouteDataSource +from BYODR_utils.common.testing import CollectPublisher, QueueReceiver, CollectServer def test_create_and_setup(tmpdir): diff --git a/jetson_runtime/pilot/pilot/tests_relay.py b/archived/PIL/tests_relay.py similarity index 95% rename from jetson_runtime/pilot/pilot/tests_relay.py rename to archived/PIL/tests_relay.py index f685a098..40382265 100644 --- a/jetson_runtime/pilot/pilot/tests_relay.py +++ b/archived/PIL/tests_relay.py @@ -8,9 +8,9 @@ from six.moves import range from six.moves.configparser import SafeConfigParser -from byodr.utils import timestamp -from byodr.utils.testing import QueueReceiver, CollectServer, CollectJSONClient -from byodr.utils.usbrelay import SearchUsbRelayFactory +from BYODR_utils.common import timestamp +from BYODR_utils.common.testing import QueueReceiver, CollectServer, CollectJSONClient +from BYODR_utils.common.usbrelay import SearchUsbRelayFactory from relay import MonitorApplication diff --git a/archived/exr/app.py b/archived/exr/app.py index 6eb17d95..57afa811 100644 --- a/archived/exr/app.py +++ b/archived/exr/app.py @@ -18,8 +18,8 @@ from can import CanError from pyueye import ueye -from byodr.utils import timestamp -from byodr.utils.ipc import ReceiverThread, JSONPublisher, ImagePublisher +from BYODR_utils.common import timestamp +from BYODR_utils.common.ipc import ReceiverThread, JSONPublisher, ImagePublisher from camera import Camera, FrameThread logger = logging.getLogger(__name__) diff --git a/archived/odometry.py b/archived/odometry.py index dff62829..2863dff5 100644 --- a/archived/odometry.py +++ b/archived/odometry.py @@ -5,8 +5,8 @@ from gpiozero import DigitalInputDevice -from byodr.utils import timestamp -from byodr.utils.ipc import JSONPublisher +from BYODR_utils.common import timestamp +from BYODR_utils.common.ipc import JSONPublisher logger = logging.getLogger(__name__) log_format = "%(levelname)s: %(filename)s %(funcName)s %(message)s" diff --git a/archived/tests_rover.py b/archived/tests_rover.py index 215df6ab..b411e718 100644 --- a/archived/tests_rover.py +++ b/archived/tests_rover.py @@ -3,7 +3,7 @@ from ConfigParser import SafeConfigParser from app import RoverApplication -from byodr.utils.testing import CollectPublisher, QueueReceiver, CollectServer +from BYODR_utils.common.testing import CollectPublisher, QueueReceiver, CollectServer def test_rover_create_and_setup(tmpdir): diff --git a/jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py b/jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py new file mode 100644 index 00000000..722a8776 --- /dev/null +++ b/jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py @@ -0,0 +1,41 @@ +from __future__ import absolute_import + +import threading + +import Jetson.GPIO as GPIO # type: ignore + + +class ThreadSafeJetsonGpioRelay(object): + """ + Thread-safe class for managing a GPIO relay on a Jetson Nano. + """ + + def __init__(self, pin=15): + self.pin = pin + self.state = False # False for OFF, True for ON + self.lock = threading.Lock() + GPIO.setmode(GPIO.BOARD) # Set the pin numbering system to BOARD + GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.LOW) + + def open(self): + """Turns the relay ON (sets the GPIO pin LOW).""" + with self.lock: + GPIO.output(self.pin, GPIO.LOW) + self.state = False + + def close(self): + """Turns the relay OFF (sets the GPIO pin HIGH).""" + with self.lock: + GPIO.output(self.pin, GPIO.HIGH) + self.state = True + + def toggle(self): + """Toggles the relay state.""" + with self.lock: + self.state = not self.state + GPIO.output(self.pin, GPIO.LOW if self.state else GPIO.HIGH) + + def states(self): + """Returns the current state of the relay.""" + with self.lock: + return self.state diff --git a/jetson_runtime/BYODR_utils/JETSON_specific/utilities.py b/jetson_runtime/BYODR_utils/JETSON_specific/utilities.py new file mode 100644 index 00000000..730993ea --- /dev/null +++ b/jetson_runtime/BYODR_utils/JETSON_specific/utilities.py @@ -0,0 +1,20 @@ +import subprocess + + +class Nano: + @staticmethod + def get_ip_address(): + try: + ip_addresses = ( + subprocess.check_output( + "hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", + shell=True, + ) + .decode() + .strip() + ) + # Split in case there are multiple local IP addresses + return ip_addresses + except subprocess.CalledProcessError as e: + print(f"An error occurred: {e}") + return None diff --git a/jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py b/jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py new file mode 100644 index 00000000..27593b41 --- /dev/null +++ b/jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py @@ -0,0 +1,48 @@ +from __future__ import absolute_import + +import threading + +import RPi.GPIO as GPIO # type: ignore + + +class ThreadSafePi4GpioRelay: + """Thread-safe class for managing a GPIO relay on a Raspberry Pi.""" + + def __init__(self, pin=15): + self.pin = pin + self.state = False # False for OFF, True for ON + self.lock = threading.Lock() + GPIO.setmode(GPIO.BOARD) # Set the pin numbering system to BOARD + GPIO.setwarnings(False) + GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.LOW) + + def open(self): + """Turns the relay ON (sets the GPIO pin LOW).""" + with self.lock: + print("opened the relay") + GPIO.output(self.pin, GPIO.LOW) + self.state = False + + def close(self): + """Turns the relay OFF (sets the GPIO pin HIGH).""" + with self.lock: + GPIO.output(self.pin, GPIO.HIGH) + current_state = GPIO.input(self.pin) + print(f"closed the relay, current state: {current_state}") + self.state = True + + def toggle(self): + """Toggles the relay state.""" + with self.lock: + self.state = not self.state + GPIO.output(self.pin, GPIO.LOW if self.state else GPIO.HIGH) + + def get_state(self): + """Returns the current state of the relay.""" + with self.lock: + return self.state + + def cleanup(self): + """Cleans up the GPIO state.""" + GPIO.cleanup(self.pin) # Reset the specific pin before setup + GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.LOW) diff --git a/jetson_runtime/BYODR_utils/PI_specific/utilities.py b/jetson_runtime/BYODR_utils/PI_specific/utilities.py new file mode 100644 index 00000000..e5e89eab --- /dev/null +++ b/jetson_runtime/BYODR_utils/PI_specific/utilities.py @@ -0,0 +1,20 @@ +import subprocess + + +class RaspberryPi: + @staticmethod + def get_ip_address(): + try: + ip_addresses = ( + subprocess.check_output( + "hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", + shell=True, + ) + .decode() + .strip() + ) + # Split in case there are multiple local IP addresses + return ip_addresses + except subprocess.CalledProcessError as e: + print(f"An error occurred: {e}") + return None diff --git a/jetson_runtime/BYODR_utils/common/__init__.py b/jetson_runtime/BYODR_utils/common/__init__.py new file mode 100644 index 00000000..b9d7d751 --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/__init__.py @@ -0,0 +1,200 @@ +from __future__ import absolute_import + +import collections +import logging +import multiprocessing +import signal +import time +import traceback +from abc import ABCMeta, abstractmethod +from cProfile import Profile +from contextlib import contextmanager + +import numpy as np +import six + +from BYODR_utils.common.option import hash_dict + +logger = logging.getLogger(__name__) + + +def timestamp(value=None): + """ + Timestamp as integer to retain precision e.g. when serializing to string. + """ + ts = time.time() if value is None else value + return int(ts * 1e6) + + +def entropy(x, eps=1e-20): + return abs(-np.sum(x * np.log(np.clip(x, eps, 1.0)))) + + +class Profiler(Profile): + """ + Custom Profile class with a __call__() context manager method to enable profiling. + Use: + profiler = Profiler() + with profiler(): + + profiler.dump_stats('prof.stats') + -- + python -c "import pstats; p = pstats.Stats('prof.stats'); p.sort_stats('time').print_stats(50)" + python -c "import pstats; p = pstats.Stats('prof.stats'); p.sort_stats('cumulative').print_stats(50)" + """ + + def __init__(self, *args, **kwargs): + super(Profile, self).__init__(*args, **kwargs) + self.disable() # Profiling initially off. + + @contextmanager + def __call__(self): + self.enable() + yield # Execute code to be profiled. + self.disable() + + +class Configurable(six.with_metaclass(ABCMeta, object)): + def __init__(self): + self._lock = multiprocessing.Lock() + self._errors = [] + self._hash = -1 + self._num_starts = 0 + + # noinspection PyUnusedLocal + @abstractmethod + def internal_start(self, **kwargs): + return [] + + @abstractmethod + def internal_quit(self, restarting=False): + pass + + def get_errors(self): + return self._errors + + def get_num_starts(self): + return self._num_starts + + def is_reconfigured(self, **kwargs): + return self._hash != hash_dict(**kwargs) + + def start(self, **kwargs): + with self._lock: + self._errors = self.internal_start(**kwargs) + self._hash = hash_dict(**kwargs) + self._num_starts += 1 + + def quit(self, restarting=False): + with self._lock: + self.internal_quit(restarting) + + def join(self): + self.quit() + + def restart(self, **kwargs): + _reconfigured = self.is_reconfigured(**kwargs) + if _reconfigured: + if self._num_starts > 0: + self.quit(restarting=True) + self.start(**kwargs) + return _reconfigured + + +class Application(object): + def __init__(self, run_hz=10, quit_event=None): + self.logger = logging.getLogger(__name__) + self._hz = run_hz + self._sleep = 0.100 + self.set_hz(run_hz) + if quit_event is None: + self.quit_event = multiprocessing.Event() + signal.signal(signal.SIGINT, lambda sig, frame: self._interrupt()) + signal.signal(signal.SIGTERM, lambda sig, frame: self._interrupt()) + else: + self.quit_event = quit_event + # Recent window to calculate the actual processing frequency. + self._rt_queue = collections.deque(maxlen=50) + + def _interrupt(self): + self.logger.info("Received interrupt, quitting.") + self.quit() + + @staticmethod + def _latest_or_none(receiver, patience): + candidate = receiver() + _time = candidate.get("time", 0) if candidate is not None else 0 + _on_time = (timestamp() - _time) < patience + return candidate if _on_time else None + + def get_hz(self): + return self._hz + + def get_actual_hz(self): + return (1.0 / np.mean(self._rt_queue)) if self._rt_queue else 0 + + def set_hz(self, hz): + self._hz = hz + self._sleep = 1.0 / hz + + def active(self): + return not self.quit_event.is_set() + + def quit(self): + self.quit_event.set() + + def setup(self): + pass + + def step(self): + pass + + def finish(self): + pass + + def run(self): + try: + self.setup() + while self.active(): + _start = time.time() + self.step() + _duration = time.time() - _start + time.sleep(max(0.0, self._sleep - _duration)) + # Report the actual clock frequency which includes the user specified wait time. + self._rt_queue.append(time.time() - _start) + except Exception as e: + # Quit first to be sure - the traceback may in some cases raise another exception. + self.quit() + self.logger.error(e) + self.logger.error(traceback.format_exc()) + except KeyboardInterrupt: + self.quit() + finally: + self.finish() + + +class ApplicationExit(object): + def __init__(self, event, cb): + self._event = event + self._cb = cb + + def __call__(self, *args, **kwargs): + if self._event.is_set(): + try: + self._cb() + except Exception as e: + logger.info(e) + logger.info(traceback.format_exc()) + + +class PeriodicCallTrace(object): + def __init__(self, seconds=1.0): + self._seconds_micro = seconds * 1e6 + self._last = timestamp() + + def __call__(self, *args, **kwargs): + _callback = args[0] + _now = timestamp() + if _now - self._last > self._seconds_micro: + _callback() + self._last = _now diff --git a/jetson_runtime/BYODR_utils/common/ipc.py b/jetson_runtime/BYODR_utils/common/ipc.py new file mode 100644 index 00000000..78600c85 --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/ipc.py @@ -0,0 +1,309 @@ +from __future__ import absolute_import + +import collections +import datetime +import json +import logging +import multiprocessing +import os +import sys +import threading +import time + +import numpy as np +import zmq + +from BYODR_utils.common import timestamp + +if sys.version_info > (3,): + # noinspection PyShadowingBuiltins + buffer = memoryview + + def receive_string(subscriber): + return subscriber.recv_string() + + def send_string(sender, val, flags=0): + return sender.send_string(val, flags) + +else: + + def receive_string(subscriber): + return subscriber.recv() + + def send_string(sender, val, flags=0): + return sender.send(val, flags) + + +logger = logging.getLogger(__name__) + + +class JSONPublisher(object): + def __init__(self, url, topic="", hwm=1, clean_start=True): + if clean_start and url.startswith("ipc://") and os.path.exists(url[6:]): + os.remove(url[6:]) + publisher = zmq.Context().socket(zmq.PUB) + publisher.set_hwm(hwm) + publisher.bind(url) + self._publisher = publisher + self._topic = topic + + def publish(self, data, topic=None): + _topic = self._topic if topic is None else topic + if data is not None: + data = dict((k, v) for k, v in data.items() if v is not None) + send_string(self._publisher, "{}:{}".format(_topic, json.dumps(data)), zmq.NOBLOCK) + + +class ImagePublisher(object): + def __init__(self, url, topic="", hwm=1, clean_start=True): + if clean_start and url.startswith("ipc://") and os.path.exists(url[6:]): + os.remove(url[6:]) + publisher = zmq.Context().socket(zmq.PUB) + publisher.set_hwm(hwm) + publisher.bind(url) + self._publisher = publisher + self._topic = topic.encode("utf-8") # Encode the topic to bytes at initialization + + def publish(self, _img, topic=None): + _topic = self._topic if topic is None else topic.encode("utf-8") + # json.dumps(...) returns a string, it needs to be encoded into bytes. + self._publisher.send_multipart( + [ + _topic, + json.dumps(dict(time=timestamp(), shape=_img.shape)).encode("utf-8"), + np.ascontiguousarray(_img, dtype=np.uint8), + ], + flags=zmq.NOBLOCK, + ) + + +class JSONReceiver(object): + def __init__(self, url, topic=b"", hwm=1, receive_timeout_ms=2, pop=False): + subscriber = zmq.Context().socket(zmq.SUB) + subscriber.set_hwm(hwm) + subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) + subscriber.setsockopt(zmq.LINGER, 0) + subscriber.connect(url) + subscriber.setsockopt(zmq.SUBSCRIBE, topic) + self._pop = pop + self._unpack = hwm == 1 + self._subscriber = subscriber + self._lock = threading.Lock() + self._queue = collections.deque(maxlen=hwm) + + def consume(self): + with self._lock: + try: + # Does not replace local queue messages when none are available. + self._queue.appendleft(json.loads(receive_string(self._subscriber).split(":", 1)[1])) + except zmq.Again: + pass + + def get(self): + _view = self._queue[0] if (self._queue and self._unpack) else list(self._queue) if self._queue else None + if self._pop: + self._queue.clear() + return _view + + def peek(self): + return self._queue[0] if self._queue else None + + +class CollectorThread(threading.Thread): + def __init__(self, receivers, event=None, hz=1000): + super(CollectorThread, self).__init__() + _list = isinstance(receivers, tuple) or isinstance(receivers, list) + self._receivers = receivers if _list else [receivers] + self._quit_event = multiprocessing.Event() if event is None else event + self._sleep = 1.0 / hz + + def get(self, index=0): + # Get the latest message without blocking. + # _receiver.consume() -- blocks; perform at thread.run() + return self._receivers[index].get() + + def peek(self, index=0): + return self._receivers[index].peek() + + def quit(self): + self._quit_event.set() + + def run(self): + while not self._quit_event.is_set(): + # Empty the receiver queues to not block upstream senders. + list(map(lambda receiver: receiver.consume(), self._receivers)) + time.sleep(self._sleep) + + +def json_collector(url, topic, event, receive_timeout_ms=1000, hwm=1, pop=False): + return CollectorThread(JSONReceiver(url, topic, hwm=hwm, receive_timeout_ms=receive_timeout_ms, pop=pop), event=event) + + +class ReceiverThread(threading.Thread): + def __init__(self, url, event=None, topic=b"", hwm=1, receive_timeout_ms=1): + super(ReceiverThread, self).__init__() + subscriber = zmq.Context().socket(zmq.SUB) + subscriber.set_hwm(hwm) + subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) + subscriber.setsockopt(zmq.LINGER, 0) + subscriber.connect(url) + subscriber.setsockopt(zmq.SUBSCRIBE, topic) + self._subscriber = subscriber + self._quit_event = multiprocessing.Event() if event is None else event + self._queue = collections.deque(maxlen=1) + self._listeners = [] + + def add_listener(self, c): + self._listeners.append(c) + + def get_latest(self): + return self._queue[0] if bool(self._queue) else None + + def pop_latest(self): + return self._queue.popleft() if bool(self._queue) else None + + def quit(self): + self._quit_event.set() + + def run(self): + while not self._quit_event.is_set(): + try: + _latest = json.loads(receive_string(self._subscriber).split(":", 1)[1]) + self._queue.appendleft(_latest) + list(map(lambda x: x(_latest), self._listeners)) + except zmq.Again: + pass + + +class CameraThread(threading.Thread): + def __init__(self, url, event, topic=b"", hwm=1, receive_timeout_ms=25): + super(CameraThread, self).__init__() + subscriber = zmq.Context().socket(zmq.SUB) + subscriber.set_hwm(hwm) + subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) + subscriber.setsockopt(zmq.LINGER, 0) + subscriber.connect(url) + subscriber.setsockopt(zmq.SUBSCRIBE, topic) + self._subscriber = subscriber + self._quit_event = event + self._images = collections.deque(maxlen=1) + + def capture(self): + return self._images[0] if bool(self._images) else (None, None) + + def run(self): + while not self._quit_event.is_set(): + try: + [_, md, data] = self._subscriber.recv_multipart() + md = json.loads(md) + height, width, channels = md["shape"] + img = np.frombuffer(buffer(data), dtype=np.uint8) + img = img.reshape((height, width, channels)) + self._images.appendleft((md, img)) + except ValueError as e: + logger.warning(e) + except zmq.Again: + pass + + +class JSONServerThread(threading.Thread): + def __init__(self, url, event, hwm=1, receive_timeout_ms=50): + super(JSONServerThread, self).__init__() + server = zmq.Context().socket(zmq.REP) + server.set_hwm(hwm) + server.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) + server.setsockopt(zmq.LINGER, 0) + server.bind(url) + self._server = server + self._quit_event = event + self._queue = collections.deque(maxlen=1) + self._listeners = [] + + def add_listener(self, c): + self._listeners.append(c) + + def on_message(self, message): + self._queue.appendleft(message) + list(map(lambda x: x(message), self._listeners)) + + def get_latest(self): + return self._queue[0] if bool(self._queue) else None + + def pop_latest(self): + return self._queue.popleft() if bool(self._queue) else None + + def serve(self, request): + return {} + + def run(self): + while not self._quit_event.is_set(): + try: + message = json.loads(receive_string(self._server)) + self.on_message(message) + send_string(self._server, json.dumps(self.serve(message))) + except zmq.Again: + pass + + +class LocalIPCServer(JSONServerThread): + def __init__(self, name, url, event, receive_timeout_ms=50): + super(LocalIPCServer, self).__init__(url, event, receive_timeout_ms) + self._name = name + self._m_startup = collections.deque(maxlen=1) + self._m_capabilities = collections.deque(maxlen=1) + + def register_start(self, errors, capabilities=None): + capabilities = {} if capabilities is None else capabilities + self._m_startup.append((datetime.datetime.utcnow().strftime("%b %d %H:%M:%S.%s UTC"), errors)) + self._m_capabilities.append(capabilities) + + def serve(self, message): + try: + if message.get("request") == "system/startup/list" and self._m_startup: + ts, errors = self._m_startup[-1] + messages = ["No errors"] + if errors: + d_errors = dict() # Merge to obtain distinct keys. + [d_errors.update({error.key: error.message}) for error in errors] + messages = ["{} - {}".format(k, d_errors[k]) for k in d_errors.keys()] + return {self._name: {ts: messages}} + elif message.get("request") == "system/service/capabilities" and self._m_capabilities: + return {self._name: self._m_capabilities[-1]} + except IndexError: + pass + return {} + + +class JSONZmqClient(object): + def __init__(self, urls, hwm=1, receive_timeout_ms=200): + self._urls = urls if isinstance(urls, list) else [urls] + self._receive_timeout = receive_timeout_ms + self._context = None + self._socket = None + self._hwm = hwm + self._create(self._urls) + + def _create(self, locations): + context = zmq.Context() + socket = context.socket(zmq.REQ) + socket.set_hwm(self._hwm) + socket.setsockopt(zmq.RCVTIMEO, self._receive_timeout) + socket.setsockopt(zmq.LINGER, 0) + [socket.connect(location) for location in locations] + self._context = context + self._socket = socket + + def quit(self): + if self._context is not None: + self._context.destroy() + + def call(self, message): + ret = {} + for i in range(len(self._urls)): + try: + send_string(self._socket, json.dumps(message), zmq.NOBLOCK) + ret.update(json.loads(receive_string(self._socket))) + except zmq.ZMQError: + j = i + 1 + self._create(self._urls[j:] + self._urls[:j]) + return ret diff --git a/jetson_runtime/BYODR_utils/common/location.py b/jetson_runtime/BYODR_utils/common/location.py new file mode 100644 index 00000000..953cdbee --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/location.py @@ -0,0 +1,106 @@ +import collections + +import cachetools +from geographiclib.geodesic import Geodesic + + +def _distance_bearing(from_position, to_position): + c_latitude, c_longitude = to_position + p_latitude, p_longitude = from_position + # noinspection PyUnresolvedReferences + _g = Geodesic.WGS84.Inverse(p_latitude, p_longitude, c_latitude, c_longitude) + # Distance in meters. + _distance = _g["s12"] + # The azimuth is the heading measured clockwise from north. + # azi2 is the "forward" azimuth, i.e., the heading that takes you beyond point 2 not back to point 1. + _bearing = _g["azi2"] + return _distance, _bearing + + +class GeoTracker(object): + """ + A class for tracking geographical positions with the ability to calculate bearing between positions + based on a minimum distance criterion. + + Attributes: + _min_distance (float): Minimum distance in meters required to consider positions distinct for bearing calculations. + _positions (collections.deque): A deque to store the sequence of recent geographic positions. + _cache (cachetools.TTLCache): Cache for storing recent position calculations to reduce computation. + """ + + def __init__(self, cache_ttl=10.0, min_distance_meters=0.10): + self._min_distance = min_distance_meters + self._positions = collections.deque(maxlen=8) + self._cache = cachetools.TTLCache(maxsize=100, ttl=cache_ttl) + + def _begin(self, current): + """ + Handles the initialization or reset of tracking when there are no or insufficient previous positions. + + Parameters: + current (tuple or None): The current geographic position as a tuple (latitude, longitude) or None. + + Returns: + tuple: The current or last known latitude, longitude, and None for bearing (since bearing cannot be calculated). + """ + n_positions = len(self._positions) + if n_positions == 0: + if current is None: + return None, None, None + else: + self._positions.append(current) + return current[0], current[1], None + if current is None: + current = self._positions[-1] + return current[0], current[1], None + else: + distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) + if distance >= self._min_distance: + self._positions.append(current) + return current[0], current[1], None + + def _track(self, current): + """ + Tracks the current position and calculates the bearing if possible based on the stored positions. + + Parameters: + current (tuple or None): The current geographic position as a tuple (latitude, longitude) or None. + + Returns: + tuple: Latitude, longitude, and bearing (if calculable); otherwise, None for the bearing. + """ + n_positions = len(self._positions) + if n_positions < 2: + return self._begin(current) + if current is None: + current = self._positions[-1] + distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) + return current[0], current[1], bearing + else: + distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) + if distance >= self._min_distance: + self._positions.append(current) + return current[0], current[1], bearing + + def clear(self): + """ + Clears the stored positions and cache, resetting the tracker to an initial state. + """ + self._positions.clear() + + def track(self, position): + """ + Retrieves or calculates the latitude, longitude, and bearing of a given position, utilizing caching to optimize. + + Parameters: + position (tuple): The current geographic position as a tuple (latitude, longitude). + + Returns: + tuple: Latitude, longitude, and bearing (if calculable based on movement and distance); otherwise, None for the bearing. + """ + _key = position + res = self._cache.get(_key) + if res is None: + res = self._track(position) + self._cache[_key] = res + return res diff --git a/jetson_runtime/BYODR_utils/common/navigate.py b/jetson_runtime/BYODR_utils/common/navigate.py new file mode 100644 index 00000000..028811ba --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/navigate.py @@ -0,0 +1,374 @@ +from __future__ import absolute_import + +import glob +import json +import logging +import multiprocessing +import os +import threading +from abc import ABCMeta, abstractmethod + +from BYODR_utils.common import timestamp + + +logger = logging.getLogger(__name__) + + +def _translate_navigation_direction(value): + if value is not None: + value = value.lower() + if value == "left": + return NavigationCommand.LEFT + elif value == "right": + return NavigationCommand.RIGHT + elif value == "ahead": + return NavigationCommand.AHEAD + elif value == "default": + return NavigationCommand.DEFAULT + # No change in direction. + return None + + +class NavigationCommand(object): + DEFAULT, LEFT, AHEAD, RIGHT = (0, 1, 2, 3) + + def __init__(self, sleep=None, direction=None, speed=None): + self._time = None + self._sleep = sleep + self._direction = direction + self._speed = speed + + def get_time(self): + return self._time + + def set_time(self, value): + self._time = value + return self + + def get_sleep(self): + return self._sleep + + def get_direction(self): + return self._direction + + def get_speed(self): + return self._speed + + +class NavigationInstructions(object): + def __init__(self, version=1, commands=None): + self._version = version + commands = commands or [] + if not isinstance(commands, tuple) and not isinstance(commands, list): + commands = [commands] + self._commands = commands + + def get_commands(self): + return self._commands + + +def _parse_navigation_instructions(m): + """ + { + "version": 1, + "pilot": {"direction": "ahead" } + } + + { + "version": 1, + "pilot": [{"speed": 0}, {"sleep": 30, "direction": "left", "speed": 1}] + } + """ + + version = m.get("version", 1) + commands = [] + pilot = m.get("pilot") + if pilot is not None: + nodes = pilot if isinstance(pilot, list) else [pilot] + for node in nodes: + commands.append( + NavigationCommand( + sleep=None if node.get("sleep") is None else float(node.get("sleep")), + direction=_translate_navigation_direction(node.get("direction")), + speed=None if node.get("speed") is None else float(node.get("speed")), + ) + ) + return NavigationInstructions(version, commands) + + +class AbstractRouteDataSource(object): + __metaclass__ = ABCMeta + + @abstractmethod + def __len__(self): + raise NotImplementedError() + + @abstractmethod + def load_routes(self): + raise NotImplementedError() + + @abstractmethod + def list_routes(self): + raise NotImplementedError() + + @abstractmethod + def get_selected_route(self): + raise NotImplementedError() + + @abstractmethod + def open(self, route_name=None): + raise NotImplementedError() + + @abstractmethod + def is_open(self): + raise NotImplementedError() + + @abstractmethod + def close(self): + raise NotImplementedError() + + @abstractmethod + def quit(self): + raise NotImplementedError() + + @abstractmethod + def list_navigation_points(self): + raise NotImplementedError() + + @abstractmethod + def has_navigation_point(self, route, point): + raise NotImplementedError() + + @abstractmethod + def list_all_images(self): + raise NotImplementedError() + + @abstractmethod + def get_image(self, image_id): + raise NotImplementedError() + + @abstractmethod + def get_image_navigation_point(self, idx): + raise NotImplementedError() + + @abstractmethod + def get_image_navigation_point_id(self, idx): + raise NotImplementedError() + + @abstractmethod + def get_instructions(self, point): + raise NotImplementedError() + + +class FileSystemRouteDataSource(AbstractRouteDataSource): + + def __init__(self, directory, fn_load_image=(lambda x: x), load_instructions=True): + self.directory = directory + self.fn_load_image = fn_load_image + self.load_instructions = load_instructions + self.quit_event = multiprocessing.Event() + self._load_timestamp = 0 + self.routes = [] + self.selected_route = None + # Route specific data follows. + self.points = [] + self.all_images = [] + self.image_index_to_point = {} + self.image_index_to_point_id = {} + self.point_to_instructions = {} + self._check_exists() + + def _check_exists(self): + directory = self.directory + self._exists = directory is not None and os.path.exists(directory) and os.path.isdir(directory) + + def _reset(self): + self.selected_route = None + self.points = [] + self.all_images = [] + self.image_index_to_point = {} + self.image_index_to_point_id = {} + self.point_to_instructions = {} + self.quit_event.clear() + + def load_routes(self): + self._check_exists() + if not self._exists: + self._reset() + else: + _now = timestamp() # In micro seconds. + if _now - self._load_timestamp > 1e6: + # Each route is a sub-directory of the base folder. + self.routes = [d for d in os.listdir(self.directory) if not d.startswith(".")] + self._load_timestamp = _now + logger.info("Directory '{}' contains the following routes {}.".format(self.directory, self.routes)) + + @staticmethod + def _get_command(fname): + try: + with open(fname) as f: + return json.load(f) + except IOError: + return {} + + def __len__(self): + # Zero when no route selected. + return len(self.points) + + def list_routes(self): + return self.routes + + def get_selected_route(self): + return self.selected_route + + def open(self, route_name=None): + # Reopening the selected route constitutes a reload of the disk state. + self._reset() + if self._exists and route_name in self.routes: + try: + # Load the route navigation points. + _route_directory = os.path.join(self.directory, route_name) + if os.path.exists(_route_directory) and os.path.isdir(_route_directory): + np_dirs = sorted([d for d in os.listdir(_route_directory) if not d.startswith(".")]) + logger.info("{} -> {}".format(route_name, np_dirs)) + # Take the existing sort-order. + image_id = 0 + point_id = 0 # Cannot enumerate as points without images must be skipped. + for point_name in np_dirs: + if self.quit_event.is_set(): + break + np_dir = os.path.join(self.directory, route_name, point_name) + _pattern = np_dir + os.path.sep + im_files = sorted([f for f_ in [glob.glob(_pattern + e) for e in ("*.jpg", "*.jpeg")] for f in f_]) + if len(im_files) < 1: + logger.info("Skipping point '{}' as there are no images for it.".format(point_name)) + continue + if self.load_instructions: + contents = self._get_command(os.path.join(np_dir, "command.json")) + contents = contents if contents else self._get_command(os.path.join(np_dir, point_name + ".json")) + self.point_to_instructions[point_name] = _parse_navigation_instructions(contents) + # Collect images by navigation point. + for im_file in im_files: + self.all_images.append(self.fn_load_image(im_file)) + self.image_index_to_point[image_id] = point_name + self.image_index_to_point_id[image_id] = point_id + image_id += 1 + # Accept the point. + self.points.append(point_name) + point_id += 1 + self.selected_route = route_name + except OSError as e: + logger.info(e) + + def is_open(self): + return self.selected_route in self.routes + + def close(self): + self._reset() + + def quit(self): + self.quit_event.set() + + def list_navigation_points(self): + return self.points + + def has_navigation_point(self, route, point): + _dir = os.path.join(self.directory, route, point) + return os.path.exists(_dir) and os.path.isdir(_dir) + + def list_all_images(self): + return self.all_images + + def get_image(self, image_id): + image_id = -1 if image_id is None else image_id + images = self.list_all_images() + return images[image_id] if len(images) > image_id >= 0 else None + + def get_image_navigation_point(self, idx): + return self.image_index_to_point[idx] + + def get_image_navigation_point_id(self, idx): + return self.image_index_to_point_id[idx] + + def get_instructions(self, point): + return self.point_to_instructions.get(point) + + +class ReloadableDataSource(AbstractRouteDataSource): + def __init__(self, delegate): + self._delegate = delegate + self._lock = threading.Lock() + # Cache the most recent selected route. + self._last_listed_routes = [] + self._last_selected_route = None + + def _do_safe(self, fn): + _acquired = self._lock.acquire(False) + try: + return fn(_acquired) + finally: + if _acquired: + self._lock.release() + + def __len__(self): + return self._do_safe(lambda acquired: len(self._delegate) if acquired else 0) + + def load_routes(self): + with self._lock: + self._delegate.load_routes() + + def list_routes(self): + _acquired = self._lock.acquire(False) + try: + if _acquired: + self._last_listed_routes = self._delegate.list_routes() + return self._last_listed_routes + finally: + if _acquired: + self._lock.release() + + def get_selected_route(self): + _acquired = self._lock.acquire(False) + try: + if _acquired: + self._last_selected_route = self._delegate.get_selected_route() + return self._last_selected_route + finally: + if _acquired: + self._lock.release() + + def open(self, route_name=None): + with self._lock: + self._delegate.open(route_name) + + def is_open(self): + return self._do_safe(lambda acquired: self._delegate.is_open() if acquired else False) + + def close(self): + with self._lock: + self._delegate.close() + + def quit(self): + with self._lock: + self._delegate.quit() + + def list_navigation_points(self): + return self._do_safe(lambda acquired: self._delegate.list_navigation_points() if acquired else []) + + def has_navigation_point(self, route, point): + return self._do_safe(lambda acquired: self._delegate.has_navigation_point(route, point) if acquired else False) + + def list_all_images(self): + return self._do_safe(lambda acquired: self._delegate.list_all_images() if acquired else []) + + def get_image(self, image_id): + return self._do_safe(lambda acquired: self._delegate.get_image(image_id) if acquired else None) + + def get_image_navigation_point(self, idx): + return self._do_safe(lambda acquired: self._delegate.get_image_navigation_point(idx) if acquired else None) + + def get_image_navigation_point_id(self, idx): + return self._do_safe(lambda acquired: self._delegate.get_image_navigation_point_id(idx) if acquired else None) + + def get_instructions(self, point): + return self._do_safe(lambda acquired: self._delegate.get_instructions(point) if acquired else None) diff --git a/jetson_runtime/BYODR_utils/common/option.py b/jetson_runtime/BYODR_utils/common/option.py new file mode 100644 index 00000000..166f07fc --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/option.py @@ -0,0 +1,63 @@ +class PropertyError(ValueError): + def __init__(self, key, msg, suggestions=None): + if suggestions is None: + suggestions = list() + self.key = key + self.message = msg + self.suggestions = suggestions + + def __str__(self): + return "{} - {}".format(self.key, self.message) + + +def str_to_bool(value): + if value.lower() in ("true", "1", "t", "y", "yes"): + return True + elif value.lower() in ("false", "0", "f", "n", "no"): + return False + else: + raise ValueError(f"Cannot convert {value} to a boolean.") + + +def _parse(key, fn_type=(lambda x: x), **kwargs): + try: + return fn_type(kwargs[key]) + except (ValueError, TypeError) as e: + raise PropertyError(key, str(e)) + + +def parse_option(key, fn_type=(lambda x: x), default_value=None, errors=None, **kwargs): + """ + Attempts to parse an option from the given keyword arguments based on the specified key. + + If the key is missing and a default value is provided, the default value is used instead. + Parameters: + - key (str): The key to look for in the keyword arguments. + - fn_type (callable, optional): A function to apply to the value of the found key. Defaults to a no-op lambda that returns the value unchanged. + - default_value (any, optional): The default value to use if the key is not found in the keyword arguments. Defaults to None. + - errors (list, optional): A list to which any encountered PropertyErrors will be appended. If None, a new list is created. Defaults to None. + - **kwargs: Additional keyword arguments among which the function will look for the specified key. + + Returns: + - The value associated with 'key' in the keyword arguments after applying 'fn_type', the default value if the key is missing, or raises a KeyError if the key is missing and no default value is provided. + + Raises: + - KeyError: If the key is not found in the keyword arguments and no default value is provided. + - PropertyError: If there is a ValueError or TypeError when applying 'fn_type' to the value associated with 'key'. + """ + errors = [] if errors is None else errors + try: + if fn_type is bool: + # Use custom boolean parser + return str_to_bool(kwargs[key]) + else: + return _parse(key, fn_type=fn_type, **kwargs) + except KeyError: + if default_value is None: + errors.append(PropertyError(key, "The key is missing and no default value has been set")) + else: + return fn_type(default_value) + + +def hash_dict(**m): + return hash("".join(str(k) + str(m.get(k)) for k in sorted(m.keys()))) diff --git a/jetson_runtime/BYODR_utils/common/protocol.py b/jetson_runtime/BYODR_utils/common/protocol.py new file mode 100644 index 00000000..672a0d92 --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/protocol.py @@ -0,0 +1,59 @@ +from __future__ import absolute_import + +from BYODR_utils.common import timestamp + + + +class MessageStreamProtocol(object): + """ + Safety: + Protocol uses 2 timestamps, remote and local, and does not require the clocks to be synced. + Local means receiver side so incoming messages. + Because the clocks are not synced remote and local timestamps are not directly comparable. + Timestamps: + 1. remote as reported by the sender + 2. local as recorded by the receiver + - + The protocol can be validated or invalidated. + There is a warm-up period with invalidated protocol, after system reboot. + - + The incoming stream needs to be continuous (or uninterrupted) and recent (timely). + Continuity violation + Age violation + """ + + def __init__(self, max_age_ms=200, max_delay_ms=250): + self._max_age_micro = max_age_ms * 1000.0 + self._max_delay_micro = max_delay_ms * 1000.0 + # There is currently no distinction in violation types. + self._n_violations = 0 + self._last_message_time = 0 + self._last_protocol_time = 0 + + def _violation(self): + self._n_violations = 1 if self._n_violations < 1 else min(1e4, self._n_violations + 1) + + def _success(self): + self._n_violations = 0 if self._n_violations > 0 else max(-1e4, self._n_violations - 1) + + def reset(self): + self._n_violations = 0 + self._last_message_time = 0 + self._last_protocol_time = 0 + + def on_message(self, message_timestamp_micro): + # This is our time in microseconds. + local_time = timestamp() + if local_time - self._last_protocol_time > self._max_delay_micro: + self._violation() + elif message_timestamp_micro - self._last_message_time > self._max_age_micro: + self._violation() + else: + self._success() + self._last_message_time = message_timestamp_micro + self._last_protocol_time = local_time + + def check(self): + if timestamp() - self._last_protocol_time > self._max_delay_micro: + self._violation() + return self._n_violations diff --git a/jetson_runtime/BYODR_utils/common/ssh.py b/jetson_runtime/BYODR_utils/common/ssh.py new file mode 100644 index 00000000..521c7ef9 --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/ssh.py @@ -0,0 +1,128 @@ +# TESTED AND WORKING ON +# Firmware version :RUT9_R_00.07.06.1 +# Firmware build date: 2024-01-02 11:11:13 +# Internal modem firmware version: SLM750_4.0.6_EQ101 +# Kernel version: 5.4.259 + + +import logging +import subprocess +import time +import traceback + +import paramiko + +# Declaring the logger +logging.basicConfig(format="%(levelname)s: %(asctime)s %(filename)s %(funcName)s %(lineno)d %(message)s", datefmt="%Y-%m-%d %H:%M:%S %p") + +logging.getLogger().setLevel(logging.INFO) +logger = logging.getLogger(__name__) + +paramiko_logger = logging.getLogger("paramiko") +paramiko_logger.setLevel(logging.CRITICAL) + + +class Router: + def __init__(self, ip=None, username="root", password="Modem001", port=22): + self.ip = ip if ip is not None else self.__get_nano_third_octet() + self.username = username + self.password = password + self.port = int(port) # Default value for SSH port + self.client = None + self.__open_ssh_connection() + + def __get_nano_third_octet(self): + try: + # Fetch the IP address + ip_address = subprocess.check_output("hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", shell=True).decode().strip().split()[0] + + # Trim off the last segment of the IP address + parts = ip_address.split(".") + network_prefix = ".".join(parts[:3]) + "." + router_ip = f"{network_prefix}1" + return router_ip + except subprocess.CalledProcessError as e: + print(f"An error occurred: {e}") + return None + + def __open_ssh_connection(self): + """ + Opens an SSH connection to the router. + """ + try: + self.client = paramiko.SSHClient() + self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + self.client.connect(self.ip, self.port, self.username, self.password) + except Exception as e: + logger.error(f"Failed to open SSH connection: {e}") + self.client = None + + def _execute_ssh_command(self, command, ip=None, file_path=None, file_contents=None, suppress_error_log=False): + """ + Executes a command on the router via SSH and returns the result. + Optionally, can write to a file on the router using SFTP. + """ + router_ip = ip if ip is not None else self.ip + temp_client = None + + try: + if router_ip != self.ip: + # Establish a temporary connection for a different router + temp_client = paramiko.SSHClient() + temp_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + temp_client.connect(router_ip, self.port, self.username, self.password) + client = temp_client + else: + # Check and use the persistent connection for the primary router + if not self.client or not self.client.get_transport() or not self.client.get_transport().is_active(): + self.__open_ssh_connection() + client = self.client + + if file_path and file_contents is not None: + # Handle SFTP file write operation + with client.open_sftp() as sftp: + with sftp.file(file_path, "w") as file: + file.write(file_contents) + # No command output in case of SFTP operation + return None + + # Execute the SSH command + stdin, stdout, stderr = client.exec_command(command) + result = stdout.read().decode().strip() + error = stderr.read().decode().strip() + + if error: + raise Exception(error) + + return result + + except Exception as e: + if not suppress_error_log: + # Log the error + caller = traceback.extract_stack(None, 2)[0][2] + logger.info(f"Error occurred in {caller}: {e}") + return None + + finally: + # Close the temporary client if it was used + if router_ip != self.ip and temp_client: + temp_client.close() + + def __close_ssh_connection(self): + """ + Closes the SSH connection to the router. + """ + if self.client: + self.client.close() + self.client = None + + def fetch_ssid(self): + """Get SSID of current segment""" + output = None + # The loop is to keep calling the ssh function until it returns a value + while output is None: + output = self._execute_ssh_command("uci get wireless.@wifi-iface[0].ssid", suppress_error_log=True) + if output is None: + time.sleep(1) + return output + diff --git a/jetson_runtime/BYODR_utils/common/testing.py b/jetson_runtime/BYODR_utils/common/testing.py new file mode 100644 index 00000000..88e003b0 --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/testing.py @@ -0,0 +1,140 @@ +from __future__ import absolute_import +import collections +from six.moves import map + + +class QueueReceiver(object): + def __init__(self, queue_max_size=100): + """ + A drop-in replacement for ipc ReceiverThread. + :param queue_max_size: Max length of the queue. + """ + self._queue = collections.deque(maxlen=queue_max_size) + self._listeners = [] + self._started = False + + def start(self): + self._started = True + + def is_started(self): + return self._started + + def add_listener(self, c): + self._listeners.append(c) + + def add(self, m): + self._queue.appendleft(m) + list(map(lambda x: x(m), self._listeners)) + + def get_latest(self): + return self._queue[0] if bool(self._queue) else None + + def pop_latest(self): + return self._queue.popleft() if bool(self._queue) else None + + def clear(self): + self._queue.clear() + + def quit(self): + self.clear() + self._listeners = [] + self._started = False + + +class QueueCamera(object): + def __init__(self, queue_max_size=100): + """ + A drop-in replacement for ipc CameraThread. + :param queue_max_size: Max length of the queue. + """ + self._queue = collections.deque(maxlen=queue_max_size) + self._started = False + + def start(self): + self._started = True + + def is_started(self): + return self._started + + def add(self, meta_data, image): + self._queue.appendleft((meta_data, image)) + + def capture(self): + return self._queue[0] if bool(self._queue) else (None, None) + + def clear(self): + self._queue.clear() + + +class CollectPublisher(object): + def __init__(self, topic=""): + """ + A drop-in replacement for ipc JSONPublisher. + :param topic: The default topic. + """ + self._topic = topic + self._map = dict() + + def publish(self, data, topic=None): + _topic = self._topic if topic is None else topic + if _topic not in self._map: + self._map[_topic] = list() + self._map[_topic].append(data) + + def collect(self, topic=None): + _topic = self._topic if topic is None else topic + return self._map.get(_topic) + + def get_latest(self, topic=None): + return self.collect(topic=topic)[-1] + + def clear(self): + self._map.clear() + + +class CollectServer(object): + def __init__(self): + """ + A drop-in replacement for ipc LocalIPCServer. + """ + self._errors = [] + self._capabilities = [] + + def register_start(self, errors, capabilities=None): + capabilities = {} if capabilities is None else capabilities + self._errors.append(errors) + self._capabilities.append(capabilities) + + def collect(self): + return self._errors + + def get_latest(self): + return self._errors[-1] + + def clear(self): + self._errors = [] + + +class CollectJSONClient(object): + """ + A drop-in replacement for ipc JSONZmqClient. + """ + + def __init__(self): + self._list = [] + + def call(self, message, ret=None): + self._list.append(message) + return ret + + def collect(self): + return self._list + + def get_latest(self): + return self._list[-1] + + def clear(self): + self._list = [] + + def quit(self): + self.clear() diff --git a/jetson_runtime/BYODR_utils/common/usbrelay.py b/jetson_runtime/BYODR_utils/common/usbrelay.py new file mode 100644 index 00000000..67ff5503 --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/usbrelay.py @@ -0,0 +1,252 @@ +from __future__ import absolute_import + +import logging +import multiprocessing +import time + +import usb.core +import usb.util +from usb.util import CTRL_IN, CTRL_OUT, CTRL_TYPE_VENDOR + +logger = logging.getLogger(__name__) + + +class SingleChannelUsbRelay(object): + """ + HALJIA USB-relaismodule USB Smart Control Switch Intelligent Switch Control USB Relais module + """ + + def __init__(self, vendor=0x1A86, product=0x7523): + self._vendor = vendor + self._product = product + self._device = None + self._endpoint = None + + def attach(self): + self._device = usb.core.find(idVendor=self._vendor, idProduct=self._product) + if self._device is None: + logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) + return + + try: + if self._device.is_kernel_driver_active(0): + self._device.detach_kernel_driver(0) + + _config = self._device.get_active_configuration() + _intf = _config[(0, 0)] + + self._endpoint = usb.util.find_descriptor( + _intf, + # match the first OUT endpoint + custom_match=(lambda _e: usb.util.endpoint_direction(_e.bEndpointAddress) == usb.util.ENDPOINT_OUT), + ) + + if self._endpoint is None: + logger.error("Endpoint not found.") + except Exception as e: + logger.error(e) + + def open(self): + if self._endpoint is not None: + self._endpoint.write([0xA0, 0x01, 0x00, 0xA1]) + + def close(self): + if self._endpoint is not None: + self._endpoint.write([0xA0, 0x01, 0x01, 0xA2]) + + +class DoubleChannelUsbRelay(object): + """ + ICQUANZX SRD-05VDC-SL-C 2-way + """ + + def __init__(self, vendor=0x16C0, product=0x05DF): + self._vendor = vendor + self._product = product + self._device_on = [[0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], [0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]] + self._device_off = [[0xFC, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], [0xFC, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]] + self._device = None + + def find(self): + return usb.core.find(idVendor=self._vendor, idProduct=self._product) + + def poll(self): + return self.find() is not None + + def attach(self): + self._device = self.find() + self._device = usb.core.find(idVendor=self._vendor, idProduct=self._product) + if self._device is None: + logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) + return + + try: + if self._device.is_kernel_driver_active(0): + self._device.detach_kernel_driver(0) + self._device.set_configuration() + except Exception as e: + logger.error(e) + + def is_attached(self): + return self._device is not None + + def open(self, channel=0): + # assert self.is_attached(), "The device is not attached." + self._device.ctrl_transfer(0x21, 0x09, 0x0300, 0x0000, "".join(chr(n) for n in self._device_off[channel]), 1000) + + def close(self, channel=0): + # assert self.is_attached(), "The device is not attached." + self._device.ctrl_transfer(0x21, 0x09, 0x0300, 0x0000, "".join(chr(n) for n in self._device_on[channel]), 1000) + + +class TransientMemoryRelay(object): + """Fake class, I suppose""" + + def __init__(self, num_channels=4): + self._state = [0] * num_channels + + def open(self, channel=0): + self._state[channel] = 0 + + def close(self, channel=0): + self._state[channel] = 1 + + def states(self): + return [bool(x) for x in self._state] + + +class FourChannelUsbRelay(object): + """ + Conrad Components 393905 Relay Module 5 V/DC + Conrad article 393905 + Conrad supplier 393905 + EAN: 4016138810585 + Type: CP210x + """ + + MAX_GPIO_INDEX = 4 + + CP210X_VENDOR_ID = 0x10C4 + CP210X_PRODUCT_ID = 0xEA60 + + CP210X_REQUEST_TYPE_READ = CTRL_IN | CTRL_TYPE_VENDOR + CP210X_REQUEST_TYPE_WRITE = CTRL_OUT | CTRL_TYPE_VENDOR + + CP210X_REQUEST_VENDOR = 0xFF + + CP210X_VALUE_READ_LATCH = 0x00C2 + CP210X_VALUE_WRITE_LATCH = 0x37E1 + + def __init__(self, vendor=CP210X_VENDOR_ID, product=CP210X_PRODUCT_ID): + """ + Adapted from https://github.com/jjongbloets/CP210xControl/blob/master/CP210xControl/model.py. + """ + self._vendor = vendor + self._product = product + self._device = None + + def find(self): + return usb.core.find(idVendor=self._vendor, idProduct=self._product) + + def poll(self): + return self.find() is not None + + def attach(self): + self._device = self.find() + if self._device is None: + logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) + return + + try: + if self._device.is_kernel_driver_active(0): + self._device.detach_kernel_driver(0) + self._device.set_configuration() + except Exception as e: + logger.error(e) + + def is_attached(self): + return self._device is not None + + def _query(self, request, value, index, length): + # assert self.is_attached(), "The device is not attached." + return self._device.ctrl_transfer(self.CP210X_REQUEST_TYPE_READ, request, value, index, length) + + def _write(self, request, value, index, data): + # assert self.is_attached(), "The device is not attached." + return self._device.ctrl_transfer(self.CP210X_REQUEST_TYPE_WRITE, request, value, index, data) + + def _set_gpio(self, index, value): + mask = 1 << index + values = (0 if value else 1) << index + msg = (values << 8) | mask + return self._write(self.CP210X_REQUEST_VENDOR, self.CP210X_VALUE_WRITE_LATCH, msg, 0) + + def _get_gpio_states(self): + results = [] + response = self._query(self.CP210X_REQUEST_VENDOR, self.CP210X_VALUE_READ_LATCH, 0, 1) + if len(response) > 0: + response = response[0] + for idx in range(self.MAX_GPIO_INDEX): + results.append((response & (1 << idx)) == 0) + return results + + def open(self, channel=0): + self._set_gpio(channel, 0) + + def close(self, channel=0): + self._set_gpio(channel, 1) + + def states(self): + return self._get_gpio_states() + + +class SearchUsbRelayFactory(object): + def __init__(self): + _relay = FourChannelUsbRelay() + # The others are not supported until they expose a read state method. + # if not _relay.poll(): + # _relay = DoubleChannelUsbRelay() + _relay.attach() + self._relay = _relay + + def get_relay(self): + return self._relay + + +class StaticRelayHolder(object): + def __init__(self, relay, default_channels=(0,)): + self._relay = relay + self._default_channels = self._tup_or_li(default_channels) + self._pulse_channels = () + self._lock = multiprocessing.Lock() + + @staticmethod + def _tup_or_li(arg): + return arg if isinstance(arg, tuple) or isinstance(arg, list) else (arg,) + + def _arg_(self, ch=None): + return self._default_channels if ch is None else self._tup_or_li(ch) + + def set_pulse_channels(self, channels): + with self._lock: + self._pulse_channels = self._tup_or_li(channels) + + def open(self, channels=None): + with self._lock: + [self._relay.open() for ch in self._arg_(channels)] + + def close(self, channels=None): + with self._lock: + for ch in self._arg_(channels): + self._relay.close() + if ch in self._pulse_channels: + time.sleep(0.100) + self._relay.open() + + def states(self): + with self._lock: + return self._relay.states() + + def pulse_config(self): + with self._lock: + return [i in self._pulse_channels for i in range(len(self._relay.states()))] diff --git a/jetson_runtime/BYODR_utils/common/video.py b/jetson_runtime/BYODR_utils/common/video.py new file mode 100644 index 00000000..e1b4116b --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/video.py @@ -0,0 +1,121 @@ +from __future__ import absolute_import + +import collections +import logging +import threading +import time + +import gi +import numpy as np + +gi.require_version("Gst", "1.0") +from gi.repository import Gst + +Gst.init(None) + +logger = logging.getLogger(__name__) + + +class RawGstSource(object): + def __init__(self, name="app", boot_time_seconds=20, command="videotestsrc ! decodebin ! videoconvert ! appsink"): + assert "appsink" in command, "Need the appsink present in the gst command." + self.name = name + self.boot_time_seconds = boot_time_seconds + self.command = command.replace("appsink", "appsink name=sink emit-signals=true sync=false async=false max-buffers=1 drop=true") + self._listeners = collections.deque() + self._listeners_lock = threading.Lock() + self._sample_time = None + self.closed = True + self.video_pipe = None + + def _setup(self): + self.video_pipe = Gst.parse_launch(self.command) + self.closed = True + + # noinspection PyUnusedLocal + def _eos(self, bus, msg): + logger.info(msg) + self.close() + + # noinspection PyUnusedLocal + def _error(self, bus, msg): + logger.error(msg) + self.close() + + def _sample(self, sink): + buffer = sink.emit("pull-sample").get_buffer() + array = self.convert_buffer(buffer.extract_dup(0, buffer.get_size())) + with self._listeners_lock: + for listen in self._listeners: + listen(array) + self._sample_time = time.time() + return Gst.FlowReturn.OK + + def convert_buffer(self, buffer): + return buffer + + def add_listener(self, listener): + with self._listeners_lock: + self._listeners.append(listener) + + def remove_listener(self, listener): + with self._listeners_lock: + self._listeners.remove(listener) + + def open(self): + self._setup() + self.video_pipe.set_state(Gst.State.PLAYING) + video_sink = self.video_pipe.get_by_name("sink") + video_sink.connect("new-sample", self._sample) + bus = self.video_pipe.get_bus() + bus.add_signal_watch() + bus.connect("message::eos", self._eos) + bus.connect("message::error", self._error) + self.closed = False + self._sample_time = time.time() + self.boot_time_seconds + logger.info("Source {} opened.".format(self.name)) + + def is_healthy(self, patience): + return self._sample_time and time.time() - self._sample_time < patience + + def is_closed(self): + return self.closed + + def is_open(self): + return not self.is_closed() + + def check(self, patience=0.50): + if self.is_open() and not self.is_healthy(patience=patience): + self.close() + if self.is_closed(): + self.open() + + def close(self): + if self.video_pipe is not None: + self.video_pipe.set_state(Gst.State.NULL) + self.closed = True + logger.info("Source {} closed.".format(self.name)) + + +class GstStreamSource(RawGstSource): + def __init__(self, name, shape, command, fn_convert=(lambda x: x)): + super(GstStreamSource, self).__init__(name=name, command=command) + self._shape = shape + self._fn_convert = fn_convert + + def get_width(self): + return self._shape[1] + + def get_height(self): + return self._shape[0] + + def convert_buffer(self, buffer): + return self._fn_convert(buffer) + + +def create_image_source(name, shape, command): + return GstStreamSource(name, shape, command, fn_convert=(lambda buffer: np.fromstring(buffer, dtype=np.uint8).reshape(shape))) + + +def create_video_source(name, shape, command): + return GstStreamSource(name, shape, command) diff --git a/jetson_runtime/BYODR_utils/common/websocket.py b/jetson_runtime/BYODR_utils/common/websocket.py new file mode 100644 index 00000000..068ed145 --- /dev/null +++ b/jetson_runtime/BYODR_utils/common/websocket.py @@ -0,0 +1,99 @@ +from __future__ import absolute_import + +import json +import logging +import threading +import traceback + +import gi +from tornado import websocket + +gi.require_version("Gst", "1.0") +from gi.repository import Gst + +Gst.init(None) + +logger = logging.getLogger(__name__) + + +class HttpLivePlayerVideoSocket(websocket.WebSocketHandler): + def __init__(self, application, request, **kwargs): + super(HttpLivePlayerVideoSocket, self).__init__(application, request, **kwargs) + self._lock = threading.Lock() + self._streaming = False + + # noinspection PyAttributeOutsideInit + def initialize(self, **kwargs): + self._video = kwargs.get("video_source") + self._io_loop = kwargs.get("io_loop") + + def _push(self, _bytes): + with self._lock: + if self._streaming: + try: + self.write_message(_bytes, binary=True) + except websocket.WebSocketClosedError: + pass + + def _client(self, _bytes): + self._io_loop.add_callback(lambda: self._push(_bytes)) + + # noinspection PyUnusedLocal + @staticmethod + def check_origin(origin): + return True + + def data_received(self, chunk): + pass + + # noinspection PyUnusedLocal + def open(self, *args, **kwargs): + self._video.add_listener(self._client) + self.write_message(json.dumps(dict(action="init", width=self._video.get_width(), height=self._video.get_height()))) + + def on_close(self): + self._video.remove_listener(self._client) + + def on_message(self, message): + try: + with self._lock: + self._streaming = "REQUESTSTREAM" in message + logger.info("On message - streaming = {}.".format(self._streaming)) + except Exception as e: + logger.error("Stream socket@on_message: {} {}".format(e, traceback.format_exc())) + logger.error("Input message:---\n{}\n---".format(message)) + + +class JMuxerVideoStreamSocket(websocket.WebSocketHandler): + # noinspection PyAttributeOutsideInit + def initialize(self, **kwargs): + self._video = kwargs.get("video_source") + self._io_loop = kwargs.get("io_loop") + + def _push(self, _bytes): + try: + self.write_message(_bytes, binary=True) + except websocket.WebSocketClosedError: + pass + + def _client(self, _bytes): + self._io_loop.add_callback(lambda: self._push(_bytes)) + + # noinspection PyUnusedLocal + @staticmethod + def check_origin(origin): + return True + + def data_received(self, chunk): + pass + + # noinspection PyUnusedLocal + def open(self, *args, **kwargs): + self._video.add_listener(self._client) + + def on_close(self): + self._video.remove_listener(self._client) + + @staticmethod + def on_message(message): + logger.info("Unexpected message '{}' received.".format(message)) diff --git a/jetson_runtime/docker-compose.yml b/jetson_runtime/docker-compose.yml index 335881cd..bfdfb387 100644 --- a/jetson_runtime/docker-compose.yml +++ b/jetson_runtime/docker-compose.yml @@ -128,7 +128,6 @@ services: dockerfile: inference/runtime-cp36-jp441.dockerfile restart: always privileged: true - command: ['python3', '-m', 'inference.app', '--user', '/sessions/models', '--routes', '/sessions/routes'] environment: LD_PRELOAD: libgomp.so.1 OMP_PLACES: '{3}' diff --git a/jetson_runtime/following/app.py b/jetson_runtime/following/app.py index 5e7eda07..1a0b33f1 100644 --- a/jetson_runtime/following/app.py +++ b/jetson_runtime/following/app.py @@ -6,8 +6,8 @@ import os import signal -from byodr.utils import Application, hash_dict -from byodr.utils.ipc import json_collector +from BYODR_utils.common import Application, hash_dict +from BYODR_utils.common.ipc import json_collector from fol_utils import FollowingController logger = logging.getLogger(__name__) diff --git a/jetson_runtime/following/fol_utils.py b/jetson_runtime/following/fol_utils.py index 8d30d65e..3edaf9cb 100644 --- a/jetson_runtime/following/fol_utils.py +++ b/jetson_runtime/following/fol_utils.py @@ -5,8 +5,8 @@ import time import cv2 -from byodr.utils.ipc import JSONPublisher -from byodr.utils.option import parse_option +from BYODR_utils.common.ipc import JSONPublisher +from BYODR_utils.common.option import parse_option from ultralytics import YOLO logger = logging.getLogger(__name__) diff --git a/jetson_runtime/inference/inference/app.py b/jetson_runtime/inference/inference/app.py index e05bc423..c350d061 100644 --- a/jetson_runtime/inference/inference/app.py +++ b/jetson_runtime/inference/inference/app.py @@ -16,10 +16,10 @@ from six.moves import range from sklearn.metrics.pairwise import cosine_distances -from byodr.utils import timestamp, Configurable, Application -from byodr.utils.ipc import CameraThread, JSONPublisher, LocalIPCServer, json_collector -from byodr.utils.navigate import FileSystemRouteDataSource, ReloadableDataSource -from byodr.utils.option import parse_option, PropertyError +from BYODR_utils.common import timestamp, Configurable, Application +from BYODR_utils.common.ipc import CameraThread, JSONPublisher, LocalIPCServer, json_collector +from BYODR_utils.common.navigate import FileSystemRouteDataSource, ReloadableDataSource +from BYODR_utils.common.option import parse_option, PropertyError from .image import get_registered_function from .torched import DynamicMomentum, TRTDriver @@ -373,7 +373,7 @@ def finish(self): self._runner.quit() # def run(self): - # from byodr.utils import Profiler + # from BYODR_utils.common import Profiler # profiler = Profiler() # with profiler(): # super(InferenceApplication, self).run() diff --git a/jetson_runtime/inference/inference/image.py b/jetson_runtime/inference/inference/image.py index 1c905016..79f8cb7d 100644 --- a/jetson_runtime/inference/inference/image.py +++ b/jetson_runtime/inference/inference/image.py @@ -5,7 +5,7 @@ import cv2 import numpy as np -from byodr.utils.option import PropertyError +from BYODR_utils.common.option import PropertyError def hwc_bgr_to_yuv(img): diff --git a/jetson_runtime/inference/inference/tests.py b/jetson_runtime/inference/inference/tests.py index da2ddc37..ec540a9e 100644 --- a/jetson_runtime/inference/inference/tests.py +++ b/jetson_runtime/inference/inference/tests.py @@ -4,7 +4,7 @@ import sys from io import open -from byodr.utils.testing import CollectPublisher, QueueReceiver, CollectServer, QueueCamera +from BYODR_utils.common.testing import CollectPublisher, QueueReceiver, CollectServer, QueueCamera from .app import InferenceApplication, TFRunner if sys.version_info > (3,): diff --git a/jetson_runtime/inference/runtime-cp36-jp441.dockerfile b/jetson_runtime/inference/runtime-cp36-jp441.dockerfile index 9f4b2a67..faca5e68 100644 --- a/jetson_runtime/inference/runtime-cp36-jp441.dockerfile +++ b/jetson_runtime/inference/runtime-cp36-jp441.dockerfile @@ -11,3 +11,5 @@ ENV PYTHONPATH "/app:${PYTHONPATH}" WORKDIR /app/inference + +CMD ["python3", "-m", "inference.app", "--user", "/sessions/models", "--routes", "/sessions/routes", "--internal","/app/models"] diff --git a/jetson_runtime/pilot/pilot/app.py b/jetson_runtime/pilot/pilot/app.py index 8ab09d7b..9d5169ac 100644 --- a/jetson_runtime/pilot/pilot/app.py +++ b/jetson_runtime/pilot/pilot/app.py @@ -11,17 +11,18 @@ import threading import traceback -from byodr.utils import Application, ApplicationExit -from byodr.utils.gpio_relay import ThreadSafeGpioRelay -from byodr.utils.ipc import JSONPublisher, LocalIPCServer, json_collector -from byodr.utils.navigate import FileSystemRouteDataSource, ReloadableDataSource -from byodr.utils.option import parse_option -from byodr.utils.usbrelay import SearchUsbRelayFactory, StaticRelayHolder, TransientMemoryRelay from six.moves.configparser import SafeConfigParser from tornado import ioloop, web from tornado.httpserver import HTTPServer from tornado.platform.asyncio import AnyThreadEventLoopPolicy +from BYODR_utils.common import Application, ApplicationExit +from BYODR_utils.common.ipc import JSONPublisher, LocalIPCServer, json_collector +from BYODR_utils.common.navigate import FileSystemRouteDataSource, ReloadableDataSource +from BYODR_utils.common.option import parse_option +from BYODR_utils.common.usbrelay import SearchUsbRelayFactory, StaticRelayHolder, TransientMemoryRelay +from BYODR_utils.JETSON_specific.gpio_relay import ThreadSafeJetsonGpioRelay + from .core import CommandProcessor from .relay import NoopMonitoringRelay, RealMonitoringRelay from .web import RelayConfigRequestHandler, RelayControlRequestHandler @@ -64,7 +65,7 @@ def _check_relay_type(self): _cfg = self._config() gpio_relay = _cfg.get("driver.gpio_relay", "false").strip().lower() == "true" # in case it is saved in lower case from JS in TEL side if gpio_relay: - relay = ThreadSafeGpioRelay() + relay = ThreadSafeJetsonGpioRelay() logger.info("Initialized GPIO Relay") else: relay = SearchUsbRelayFactory().get_relay() diff --git a/jetson_runtime/pilot/pilot/core.py b/jetson_runtime/pilot/pilot/core.py index 5f213bc6..49d2016b 100644 --- a/jetson_runtime/pilot/pilot/core.py +++ b/jetson_runtime/pilot/pilot/core.py @@ -14,9 +14,9 @@ from simple_pid import PID as pid_control from six.moves import zip -from byodr.utils import timestamp, Configurable -from byodr.utils.navigate import NavigationCommand, NavigationInstructions -from byodr.utils.option import parse_option +from BYODR_utils.common import timestamp, Configurable +from BYODR_utils.common.navigate import NavigationCommand, NavigationInstructions +from BYODR_utils.common.option import parse_option logger = logging.getLogger(__name__) diff --git a/jetson_runtime/pilot/pilot/relay.py b/jetson_runtime/pilot/pilot/relay.py index a9ae1d1d..e781667b 100644 --- a/jetson_runtime/pilot/pilot/relay.py +++ b/jetson_runtime/pilot/pilot/relay.py @@ -9,10 +9,10 @@ import six from configparser import ConfigParser -from byodr.utils import timestamp -from byodr.utils.ipc import ReceiverThread, JSONZmqClient -from byodr.utils.option import parse_option, hash_dict -from byodr.utils.protocol import MessageStreamProtocol +from BYODR_utils.common import timestamp +from BYODR_utils.common.ipc import ReceiverThread, JSONZmqClient +from BYODR_utils.common.option import parse_option, hash_dict +from BYODR_utils.common.protocol import MessageStreamProtocol logger = logging.getLogger(__name__) log_format = "%(levelname)s: %(filename)s %(funcName)s %(message)s" diff --git a/jetson_runtime/rosnode/app.py b/jetson_runtime/rosnode/app.py index 5e52a82b..869b14ef 100644 --- a/jetson_runtime/rosnode/app.py +++ b/jetson_runtime/rosnode/app.py @@ -10,8 +10,8 @@ from rclpy.node import Node from std_msgs.msg import Float32 -from byodr.utils import Application, timestamp -from byodr.utils.ipc import json_collector, JSONPublisher +from BYODR_utils.common import Application, timestamp +from BYODR_utils.common.ipc import json_collector, JSONPublisher class Bridge(Node): diff --git a/jetson_runtime/teleop/logbox/app.py b/jetson_runtime/teleop/logbox/app.py index 19f1893d..927dd36e 100644 --- a/jetson_runtime/teleop/logbox/app.py +++ b/jetson_runtime/teleop/logbox/app.py @@ -7,7 +7,7 @@ import traceback from datetime import datetime -from byodr.utils import Application +from BYODR_utils.common import Application from .core import * from .store import Event, create_data_source diff --git a/jetson_runtime/teleop/logbox/core.py b/jetson_runtime/teleop/logbox/core.py index 366bea02..ea727401 100644 --- a/jetson_runtime/teleop/logbox/core.py +++ b/jetson_runtime/teleop/logbox/core.py @@ -7,7 +7,7 @@ from bson.binary import Binary from bson.objectid import ObjectId from pymongo import MongoClient -from byodr.utils import timestamp +from BYODR_utils.common import timestamp TRIGGER_SERVICE_START = 2**0 TRIGGER_SERVICE_END = 2**1 diff --git a/jetson_runtime/teleop/teleop/app.py b/jetson_runtime/teleop/teleop/app.py index 5d29a51f..6fd93167 100644 --- a/jetson_runtime/teleop/teleop/app.py +++ b/jetson_runtime/teleop/teleop/app.py @@ -11,19 +11,22 @@ import signal import threading from concurrent.futures import ThreadPoolExecutor -from byodr.utils import Application, ApplicationExit, hash_dict -from byodr.utils.ipc import CameraThread, JSONPublisher, JSONZmqClient, json_collector -from byodr.utils.navigate import FileSystemRouteDataSource, ReloadableDataSource -from byodr.utils.option import parse_option + from logbox.app import LogApplication, PackageApplication from logbox.core import MongoLogBox, SharedState, SharedUser from logbox.web import DataTableRequestHandler, JPEGImageRequestHandler +from pymongo import MongoClient from tornado import ioloop, web from tornado.httpserver import HTTPServer from tornado.platform.asyncio import AnyThreadEventLoopPolicy + +from BYODR_utils.common import Application, ApplicationExit, hash_dict +from BYODR_utils.common.ipc import CameraThread, JSONPublisher, JSONZmqClient, json_collector +from BYODR_utils.common.navigate import FileSystemRouteDataSource, ReloadableDataSource +from BYODR_utils.common.option import parse_option + from .server import * -from .tel_utils import EndpointHandlers, ThrottleController, FollowingUtils -from pymongo import MongoClient +from .tel_utils import EndpointHandlers, FollowingUtils, ThrottleController logger = logging.getLogger(__name__) diff --git a/jetson_runtime/teleop/teleop/server.py b/jetson_runtime/teleop/teleop/server.py index bb8b5eff..3b2dc28c 100644 --- a/jetson_runtime/teleop/teleop/server.py +++ b/jetson_runtime/teleop/teleop/server.py @@ -17,8 +17,8 @@ import tornado import tornado.ioloop import tornado.web -from byodr.utils import timestamp -from byodr.utils.ssh import Router +from BYODR_utils.common import timestamp +from BYODR_utils.common.ssh import Router from six.moves import range from six.moves.configparser import SafeConfigParser from tornado import web, websocket diff --git a/jetson_runtime/teleop/teleop/tel_utils.py b/jetson_runtime/teleop/teleop/tel_utils.py index b329a62f..54cfef5f 100644 --- a/jetson_runtime/teleop/teleop/tel_utils.py +++ b/jetson_runtime/teleop/teleop/tel_utils.py @@ -11,7 +11,7 @@ import folium import pandas as pd import requests -from byodr.utils import timestamp +from BYODR_utils.common import timestamp # needs to be installed on the router from pysnmp.hlapi import * diff --git a/jetson_runtime/vehicles/carla09/app.py b/jetson_runtime/vehicles/carla09/app.py index d8dd8f07..e6d7bd99 100644 --- a/jetson_runtime/vehicles/carla09/app.py +++ b/jetson_runtime/vehicles/carla09/app.py @@ -11,11 +11,11 @@ from tornado import web, ioloop from tornado.httpserver import HTTPServer -from byodr.utils import Application -from byodr.utils import Configurable -from byodr.utils.ipc import JSONPublisher, ImagePublisher, LocalIPCServer, json_collector -from byodr.utils.option import parse_option -from byodr.utils.websocket import HttpLivePlayerVideoSocket +from BYODR_utils.common import Application +from BYODR_utils.common import Configurable +from BYODR_utils.common.ipc import JSONPublisher, ImagePublisher, LocalIPCServer, json_collector +from BYODR_utils.common.option import parse_option +from BYODR_utils.common.websocket import HttpLivePlayerVideoSocket from vehicle import CarlaHandler from video import NumpyImageVideoSource @@ -148,7 +148,7 @@ def finish(self): self._runner.quit() # def run(self): - # from byodr.utils import Profiler + # from BYODR_utils.common import Profiler # profiler = Profiler() # with profiler(): # super(CarlaApplication, self).run() diff --git a/jetson_runtime/vehicles/carla09/vehicle.py b/jetson_runtime/vehicles/carla09/vehicle.py index baede9a0..5f0b2d05 100644 --- a/jetson_runtime/vehicles/carla09/vehicle.py +++ b/jetson_runtime/vehicles/carla09/vehicle.py @@ -10,9 +10,9 @@ import numpy as np from carla import Transform, Location, Rotation -from byodr.utils import timestamp, Configurable -from byodr.utils.location import GeoTracker -from byodr.utils.option import parse_option +from BYODR_utils.common import timestamp, Configurable +from BYODR_utils.common.location import GeoTracker +from BYODR_utils.common.option import parse_option logger = logging.getLogger(__name__) diff --git a/jetson_runtime/vehicles/carla09/video.py b/jetson_runtime/vehicles/carla09/video.py index 328adce8..499a6d05 100644 --- a/jetson_runtime/vehicles/carla09/video.py +++ b/jetson_runtime/vehicles/carla09/video.py @@ -4,8 +4,8 @@ import gi -from byodr.utils import Configurable -from byodr.utils.option import parse_option +from BYODR_utils.common import Configurable +from BYODR_utils.common.option import parse_option gi.require_version("Gst", "1.0") diff --git a/jetson_runtime/vehicles/rover/app.py b/jetson_runtime/vehicles/rover/app.py index ef08b031..3950c0e7 100644 --- a/jetson_runtime/vehicles/rover/app.py +++ b/jetson_runtime/vehicles/rover/app.py @@ -7,10 +7,10 @@ import shutil import subprocess -from byodr.utils import Application, Configurable, PeriodicCallTrace, timestamp -from byodr.utils.ipc import ImagePublisher, JSONPublisher, LocalIPCServer, ReceiverThread, json_collector -from byodr.utils.location import GeoTracker -from byodr.utils.option import hash_dict, parse_option +from BYODR_utils.common import Application, Configurable, PeriodicCallTrace, timestamp +from BYODR_utils.common.ipc import ImagePublisher, JSONPublisher, LocalIPCServer, ReceiverThread, json_collector +from BYODR_utils.common.location import GeoTracker +from BYODR_utils.common.option import hash_dict, parse_option from configparser import ConfigParser as SafeConfigParser from core import ConfigurableImageGstSource, GpsPollerThreadSNMP, PTZCamera diff --git a/jetson_runtime/vehicles/rover/core.py b/jetson_runtime/vehicles/rover/core.py index e09f38db..e262dbed 100644 --- a/jetson_runtime/vehicles/rover/core.py +++ b/jetson_runtime/vehicles/rover/core.py @@ -6,9 +6,9 @@ import time import requests -from byodr.utils import Configurable -from byodr.utils.option import parse_option -from byodr.utils.video import create_image_source +from BYODR_utils.common import Configurable +from BYODR_utils.common.option import parse_option +from BYODR_utils.common.video import create_image_source # Needs to be installed on the router from pysnmp.hlapi import * diff --git a/pi4_runtime/stream/Dockerfile b/pi4_runtime/stream/Dockerfile index 495fb472..38c432ff 100644 --- a/pi4_runtime/stream/Dockerfile +++ b/pi4_runtime/stream/Dockerfile @@ -1,4 +1,15 @@ -FROM centipede2donald/raspbian-stretch:gst-omx-rpi-0.50.2 +FROM mmastrac/gst-omx-rpi + +ENV DEBIAN_FRONTEND noninteractive + +RUN apt-get --allow-releaseinfo-change update && apt-get install -y \ + nano \ + v4l-utils \ + python3-gi \ + python3-numpy \ + python3-six \ + python3-tornado \ + && apt-get -y clean && rm -rf /var/lib/apt/lists/* # Copy application files @@ -10,5 +21,4 @@ ENV PYTHONPATH "/app:${PYTHONPATH}" WORKDIR /app/stream - CMD ["sleep", "infinity"] \ No newline at end of file diff --git a/pi4_runtime/stream/camera.py b/pi4_runtime/stream/camera.py index 0ecbf155..dceec5be 100644 --- a/pi4_runtime/stream/camera.py +++ b/pi4_runtime/stream/camera.py @@ -19,10 +19,10 @@ from tornado import web, ioloop from tornado.platform.asyncio import AnyThreadEventLoopPolicy -from byodr.utils import Application -from byodr.utils.option import parse_option -from byodr.utils.video import create_video_source -from byodr.utils.websocket import HttpLivePlayerVideoSocket, JMuxerVideoStreamSocket +from BYODR_utils.common import Application +from BYODR_utils.common.option import parse_option +from BYODR_utils.common.video import create_video_source +from BYODR_utils.common.websocket import HttpLivePlayerVideoSocket, JMuxerVideoStreamSocket logger = logging.getLogger(__name__) From df172f1d7541ebe389d2f604470c56a7da68ac58 Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 14:44:54 +0200 Subject: [PATCH 6/9] refactor update with new path --- .github/workflows/docker-compose-test.yml | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/.github/workflows/docker-compose-test.yml b/.github/workflows/docker-compose-test.yml index 52618a53..37b2a4b9 100644 --- a/.github/workflows/docker-compose-test.yml +++ b/.github/workflows/docker-compose-test.yml @@ -15,15 +15,15 @@ jobs: - name: Docker compose build run: | - services="zerotier wireguard httpd teleop pilot ftpd rosnode vehicle" - docker-compose -f docker-compose.yml -f docker-compose.override.yml build $services + cd jetson_runtime + docker-compose build - name: Docker compose up run: | - services="zerotier wireguard httpd teleop pilot ftpd rosnode vehicle" - docker-compose -f docker-compose.yml -f docker-compose.override.yml up -d $services + cd jetson_runtime + docker-compose up -d sleep 30 - docker-compose -f docker-compose.yml -f docker-compose.override.yml down + docker-compose down Pi_Docker: runs-on: ubuntu-20.04 @@ -33,10 +33,13 @@ jobs: uses: actions/checkout@v2 - name: Docker compose build (raspi) - run: docker-compose -f raspi/docker-compose.yml build + run: | + cd pi4_runtime + docker-compose build - name: Docker compose up (raspi) run: | - docker-compose -f raspi/docker-compose.yml up -d + cd pi4_runtime + docker-compose up -d sleep 30 - docker-compose -f raspi/docker-compose.yml down + docker-compose down From 87a4cff273f59b39d8da0c4dd83d2646d06784c1 Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 14:58:58 +0200 Subject: [PATCH 7/9] feat install docker compose --- jetson_runtime/docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/jetson_runtime/docker-compose.yml b/jetson_runtime/docker-compose.yml index bfdfb387..43758b34 100644 --- a/jetson_runtime/docker-compose.yml +++ b/jetson_runtime/docker-compose.yml @@ -65,14 +65,14 @@ services: cpuset: '0' build: context: . - dockerfile: mongodb/Dockerfile + dockerfile: mongodb/Dockerfile restart: always command: ['python3', 'wrap.py'] network_mode: host stop_signal: SIGKILL environment: MONGO_INITDB_ROOT_USERNAME: admin - MONGO_INITDB_ROOT_PASSWORD: robot + MONGO_INITDB_ROOT_PASSWORD: robot volumes: - volume_mongodb_config:/config:rw - volume_mongodb_data:/data/db:rw From 49fc5f6159998d4e1a7492f6fa2123ebcfa713fa Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 14:58:58 +0200 Subject: [PATCH 8/9] feat install docker buildx compose --- .github/workflows/docker-compose-test.yml | 21 ++++++++++++++++++--- jetson_runtime/docker-compose.yml | 4 ++-- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/.github/workflows/docker-compose-test.yml b/.github/workflows/docker-compose-test.yml index 37b2a4b9..134ba5ee 100644 --- a/.github/workflows/docker-compose-test.yml +++ b/.github/workflows/docker-compose-test.yml @@ -13,25 +13,40 @@ jobs: - name: Check out repository uses: actions/checkout@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Install Docker Compose + run: | + sudo apt-get update + sudo apt-get install -y docker-compose + - name: Docker compose build run: | cd jetson_runtime - docker-compose build + docker-compose build - name: Docker compose up run: | cd jetson_runtime - docker-compose up -d + docker-compose up -d sleep 30 docker-compose down Pi_Docker: runs-on: ubuntu-20.04 - steps: - name: Check out repository uses: actions/checkout@v2 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Install Docker Compose + run: | + sudo apt-get update + sudo apt-get install -y docker-compose + - name: Docker compose build (raspi) run: | cd pi4_runtime diff --git a/jetson_runtime/docker-compose.yml b/jetson_runtime/docker-compose.yml index bfdfb387..43758b34 100644 --- a/jetson_runtime/docker-compose.yml +++ b/jetson_runtime/docker-compose.yml @@ -65,14 +65,14 @@ services: cpuset: '0' build: context: . - dockerfile: mongodb/Dockerfile + dockerfile: mongodb/Dockerfile restart: always command: ['python3', 'wrap.py'] network_mode: host stop_signal: SIGKILL environment: MONGO_INITDB_ROOT_USERNAME: admin - MONGO_INITDB_ROOT_PASSWORD: robot + MONGO_INITDB_ROOT_PASSWORD: robot volumes: - volume_mongodb_config:/config:rw - volume_mongodb_data:/data/db:rw From 1f9aaaa90de3a7f322585cf4593c65180a4736ba Mon Sep 17 00:00:00 2001 From: Ahmed Mahfouz Date: Fri, 11 Oct 2024 15:27:52 +0200 Subject: [PATCH 9/9] refactor and delete docker build test --- .github/workflows/docker-compose-test.yml | 60 --- Services_Documentation.txt | 197 --------- cli_wrapper.py | 2 +- .../BYODR_utils/JETSON_specific/gpio_relay.py | 41 -- .../BYODR_utils/JETSON_specific/utilities.py | 20 - .../BYODR_utils/PI_specific/gpio_relay.py | 48 --- .../BYODR_utils/PI_specific/utilities.py | 20 - jetson_runtime/BYODR_utils/common/__init__.py | 200 ---------- jetson_runtime/BYODR_utils/common/ipc.py | 309 --------------- jetson_runtime/BYODR_utils/common/location.py | 106 ----- jetson_runtime/BYODR_utils/common/navigate.py | 374 ------------------ jetson_runtime/BYODR_utils/common/option.py | 63 --- jetson_runtime/BYODR_utils/common/protocol.py | 59 --- jetson_runtime/BYODR_utils/common/ssh.py | 128 ------ jetson_runtime/BYODR_utils/common/testing.py | 140 ------- jetson_runtime/BYODR_utils/common/usbrelay.py | 252 ------------ jetson_runtime/BYODR_utils/common/video.py | 121 ------ .../BYODR_utils/common/websocket.py | 99 ----- 18 files changed, 1 insertion(+), 2238 deletions(-) delete mode 100644 .github/workflows/docker-compose-test.yml delete mode 100644 Services_Documentation.txt delete mode 100644 jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py delete mode 100644 jetson_runtime/BYODR_utils/JETSON_specific/utilities.py delete mode 100644 jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py delete mode 100644 jetson_runtime/BYODR_utils/PI_specific/utilities.py delete mode 100644 jetson_runtime/BYODR_utils/common/__init__.py delete mode 100644 jetson_runtime/BYODR_utils/common/ipc.py delete mode 100644 jetson_runtime/BYODR_utils/common/location.py delete mode 100644 jetson_runtime/BYODR_utils/common/navigate.py delete mode 100644 jetson_runtime/BYODR_utils/common/option.py delete mode 100644 jetson_runtime/BYODR_utils/common/protocol.py delete mode 100644 jetson_runtime/BYODR_utils/common/ssh.py delete mode 100644 jetson_runtime/BYODR_utils/common/testing.py delete mode 100644 jetson_runtime/BYODR_utils/common/usbrelay.py delete mode 100644 jetson_runtime/BYODR_utils/common/video.py delete mode 100644 jetson_runtime/BYODR_utils/common/websocket.py diff --git a/.github/workflows/docker-compose-test.yml b/.github/workflows/docker-compose-test.yml deleted file mode 100644 index 134ba5ee..00000000 --- a/.github/workflows/docker-compose-test.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Docker Compose Test - -on: - push: - branches: - - main - - '*' - -jobs: - Nano_Docker: - runs-on: ubuntu-20.04 - steps: - - name: Check out repository - uses: actions/checkout@v2 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Install Docker Compose - run: | - sudo apt-get update - sudo apt-get install -y docker-compose - - - name: Docker compose build - run: | - cd jetson_runtime - docker-compose build - - - name: Docker compose up - run: | - cd jetson_runtime - docker-compose up -d - sleep 30 - docker-compose down - - Pi_Docker: - runs-on: ubuntu-20.04 - steps: - - name: Check out repository - uses: actions/checkout@v2 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - - name: Install Docker Compose - run: | - sudo apt-get update - sudo apt-get install -y docker-compose - - - name: Docker compose build (raspi) - run: | - cd pi4_runtime - docker-compose build - - - name: Docker compose up (raspi) - run: | - cd pi4_runtime - docker-compose up -d - sleep 30 - docker-compose down diff --git a/Services_Documentation.txt b/Services_Documentation.txt deleted file mode 100644 index 75dffcb7..00000000 --- a/Services_Documentation.txt +++ /dev/null @@ -1,197 +0,0 @@ -Service Architecture -watchdog function -Smart Segment -A smart segment is part of the robot that houses computer systems inside that allow it to move autonomously. Connected segments make the robot act like a caterpillar. -This computer system includes a Raspberry Pi, a Jetson Nano, 2 cameras, a router and 2 motor controllers. - -Hardware -1) AI Camera (camera0) -A smart segment uses Hikvision PTZ Dome Network camera for its AI Camera. -IP: 192.168.1.64 --Input: Footage from its surroundings --Output: Sends H264 encoded video ouput to the Pi’s Docker container service called “Stream0”. - -2) Operator Camera (camera1) -The smart segment also uses a 2nd Hikvision PTZ Dome Network camera for an Operator Camera. -IP: 192.168.1.65 --Input: Footage from its surroundings --Output: Sends H264 encoded video output to the Pi’s Docker container service called “Stream1”. - -3) Raspberry Pi 4B -OS: balena-cloud-byodr- pi-raspberrypi4-64-2.99.27-v14.0.8 -IP: 192.168.1.32 -This OS allows it to communicate with Balena Cloud. Inside the Pi, there are 5 processes running, 4 of which run in their own separate Docker containers. - -4) Nvidia Jetson Nano -OS: balena-cloud-byodr-nano-jetson-nano-2.88.4+rev1-v12.11.0 -IP: 192.168.1.100 -This OS allows it to communicate with Balena Cloud. Inside the Nano, there are 10 processes running, all of which run in their own separate Docker containers. - -5) RUT-955 -IP: 192.168.1.1 -The router inside the segment is called RUT955 from Teltonika. The router has LAN, WAN, 4G, 5G and LTE capabilities. It’s ethernet connectivity is extended with a switch. The router is responsible for all internet connectivity between the segment and the rest of the Internet. -This router also includes an internal relay that works as a switch that lets the battery power the rest of the segment. Only when the router is booted up and the relay switch closes, will the segment receive power to the rest of its internal components. - -6) Motor Controller 1 -The segment uses the Mini FSESC6.7 from Flipsky. It is connected via USB to the ttyACM0 serial port of the Pi. --Input: Commands from the Pi. --Output: Sends power to its respective motor wheel in order to turn it according to its commands. - -7) Motor Controller 2 -The segment uses the Mini FSESC6.7 from Flipsky. It is connected via USB to the ttyACM1 serial port of the Pi. --Input: Commands from the Pi. --Output: Sends power to its respective motor wheel in order to turn it according to its commands. - -Software stack -1) Balena -From Balena, we use their Balena Cloud services, and also use BalenaOS on the Raspberry Pi and Jetson Nano, to make them compatible with Balena Cloud. From the Balena Cloud we can upload new versions of software, update segments OTA, reboot, connect via SSH, and manage segments remotely. - -2) Docker -Docker is a platform for building, shipping, and running applications in containers. Containers are lightweight, portable, and self-sufficient units that contain all the necessary software, libraries, and dependencies to run an application. Docker enables developers to package their applications into containers, which can be easily deployed and run on any platform that supports Docker. With Docker, developers can ensure that their applications run consistently across different environments, from development to production. -The BYODR project includes dockerfiles that can be used to build a Docker image for each service as well as instructions on how to deploy the image onto a robot using Balena Cloud. By using this approach, users can ensure that the software stack is consistent and reproducible across multiple robots, and can easily deploy updates and manage their fleet of cars from a central location. - -3) Zerotier -Zerotier is a “freemium” P2P (Peer to Peer) VPN service that allows devices with internet capabilities to securely connect to P2P virtual software-defined networks. -The Pi has a Zerotier instance running inside it. This means that it is equipped to work with a Zerotier client that is running on our devices, so that we can add the Pi to our VPN network. -Similarly to the Pi, the Nano also has the same functionality regarding Zerotier, although arguably more important here, since it allows the User to connect to the Nano, and by extension the Web server, via a secure zerotier network. - -4) Wireguard -Similarly to Zerotier, Wireguard is also a VPN. The difference here is that Wireguard is used by the Nano in every network procedure it has to go through for safety. Since the Nano has plenty more processes that require a network connection, compared to the Pi, Wireguard is an extra layer of security against attacks. This process is running inside a docker container. --Q: Why do we use Wireguard if we have ZT? --A: Since ZeroTier and WireGuard look similar, the project uses both ZeroTier and WireGuard for different purposes. ZeroTier is used to create a secure network connection between the robot and the user's computer, while WireGuard is used to encrypt the data that is transmitted over that connection. Together, these technologies provide a secure and reliable way for users to remotely control the robots. - -Raspberry Pi docker service descriptions: -1) Stream0 --Input: Receives video stream from the AI camera. --Function: Creates a high quality H264 video output stream --Output: Sends the stream via RTSP to the web server located in Teleop. --Q1: Why does the Pi create the streams, and not just send them from the cameras directly to the nano, bypassing the Pi? - - - -2) Stream1 --Input: Receives video stream from the Operator camera. --Purpose: Similarly to the process above, it creates a high quality H264 video output stream. --Output: Sends the stream via RTSP to the web server located in Teleop. --Q1: How does the AI get the images for itself? From the H264 stream, or someplace else? - -3) Zerotier --Input: Receives input from the user, using the built-in command line. --Function: We can add the Pi to our VPN network. --Output: The Pi can communicate with the software-defined virtual networks that the user has built, via the internet. --Q1: Why does the Pi need the zerotier? - -4) Servos --Input: Receives commands in JSON format from Teleop, Inference, Pilot that request movement from the motors. --Function: Sets up a JSON server that listens on 0.0.0.0:5555 for commands from other processes. Listening to 0.0.0.0 means listening from anywhere that has network access to this device. It also sets up a JSON Publisher so that this service can send JSON data to any services that are listening to this service. Decodes commands received from the other services are decoded and stored in a deque. -This service also initiates an http server listening to port 9101 (default option). --Output: The commands are sent to the Motor controllers via the serial USB connection. --Q1: BalenaCloud lists another service called “pigpiod”. This service and the “servos” service both use the same image in their docker container. What does the pigpiod service do? --Q2: Why does this service have a JSON publisher? Who does it send data to? - -5) Battery Management System (BMS) [The only service that does not run in a docker container] --Input: Receives data from the BMS inside the battery itself. --Function: The Pi uses an I2C Pi Hat to communicate with the special battery that the segment uses. From here, the Pi can give a “pulse” to the battery in order to “reset” it. This system also allows for seamless use of 2 or more of the same battery, on the same segment. This process is exclusively hardware based, so it is not running in a container. --Output: Sends data to the BMS inside the battery. - -Jetson Nano docker service descriptions: -1) HTTPD --Input: Listens for data requests from Teleop, Pilot, Stream1, Stream0. The sources are listed in the configuration file (called haproxy.conf) that the proxy server uses. --Function: This service sets up a proxy server (Intermediate server between the client and the actual HTTP server) using HAProxy, for load balancing and request forwarding. --Output: Forwards requests to the same services as above, but taking into account load balancing. The destinations are listed in the configuration files that the proxy server uses. - -2) Inference --Input 1: Receives stream from AI camera with the socket url being 'ipc:///byodr/camera_0.sock' --Input 2: Receives routes from teleop with the socket url being 'ipc:///byodr/teleop.sock' --Input 3: Receives timestamps from teleop with the socket url being 'ipc:///byodr/teleop_c.sock' --Function: This service is responsible for an interface for generating steering angles and making predictions based on images. These actions are based on a trained neural network model. If this service has input from Teleop, they override the self-driving directions of the model. -This service also initiates an IPC server with url 'ipc:///byodr/inference_c.sock', and a JSON publisher with url 'ipc:///byodr/inference.sock' --Output: Sends data to the Servos service for proper motor control, but I cannot spot where and how this happens. --Q1: How does Inference, Pilot and Teleop work together, if they work together? --Q2: How does Inference send its data to the Pi for proper movement? - -3) Zerotier --Input: Receives input from the user, using the buildin command line. --Function: The Nano can be added into a virtual network. --Output: Can communicate securely with nodes of the same network. - -4) WireGuard --Input: Receives data from the Nano and the Router. --Function: Encrypts the data of the Nano. --Output: The data send by the Nano towards the internet are encrypted. --Q: Why do we use Wireguard if we have ZT? --A: Since ZeroTier and WireGuard look similar, the project uses both ZeroTier and WireGuard for different purposes. ZeroTier is used to create a secure network connection between the robot and the user's computer, while WireGuard is used to encrypt the data that is transmitted over that connection. Together, these technologies provide a secure and reliable way for users to remotely control the robots. - -5) Teleop --Input 1: Receives stream from Stream0 service of the Pi --Input 2: Receives stream from Stream1 service of the Pi --Input 3: Receives data in a JSON format from the Pilot service --Input 4: Receives data in a JSON format from the Vehicle service --Input 5: Receives data in a JSON format from the Inference service --Input 6: Receives input from the Operator’s method of control --Function: This service includes a web server that listens for inputs from multiple sources that are later used to move the robot. The key presses from the operator are registered and reflected upon the robot using this service. -This service includes a logger that logs information regarding the manual control of the robot. -In addition, there is a function in this server that encodes the streams from the cameras to MJPEG. -It also hosts the site design files necessary to draw the Web App. --Output 1: Robot movement according to user’s commands --Output 2: Live video feed on the web app --Output 3: MJPEG stream capability --Output 4: Logs and messages produced during operation are stored MongoDB. --Q1: How does “Teleop” translate user input into robot movement? --Q2: How does it communicate with the cameras, “Pilot”, “Inference” and “Vehicle”? --Q3: What data does it receive and send to the Pilot, Vehicle and Inference services? --Q4: From where does it receive its navigation images? --Q5: What does it do with the images? - -6) Vehicle --Input 1: Receives data in a JSON format from the Pilot service --Input 2: Receives data in a JSON format from the Teleop service --Function: This process sets up a server that connects to a CARLA simulator and communicates with it to control a self-driving car. Carla is an open-source simulator for autonomous driving research. It is used to simulate the robot’s behavior in a virtual environment. --Output 1: Sends the data to a server running an instance of CARLA. The data sent will properly represent a segment inside the simulation. --Output 2: Sends the data to a server running an instance of CARLA. The data sent will properly represent a segment inside the simulation. --Q1: Is this process exclusively used to send data to the CARLA simulator, and nothing else regarding the driving of the robot? --Q2: Where is the CARLA simulation hosted? --Q3: What do the video streams created in the server do exactly? - -7) ROS Node --Input 1: Receives data in a JSON format from the Pilot service --Input 2: Receives data in a JSON format from the Teleop service --Function: This service defines a ROS2 node which connects to a teleop node and a pilot node, and switches the driving mode to Autopilot or Manual, depending on user input. It also sets a max speed for the segment. --Output: Sends ROS commands in JSON format to the Pilot service --Q1: Why exactly do we need this service? --Q2: Does the communication with other services imply the existence of multiple nodes? --Q3: Why does it publish json data only to the Pilot, and not to both Pilot and Teleop? - -8) Pilot --Input 1: Receives data in a JSON format from the Teleop service --Input 2: Receives data in a JSON format from the Rosnode service --Input 3: Receives data in a JSON format from the Vehicle service --Input 4: Receives data in a JSON format from the Inference service --Input 5: Receives IPC chatter in a JSON format from the Teleop service --Function: This process sets up a JSON publisher and a local IPC server to send data to other services that have JSON collectors. It also is responsible for controlling the segment’s autonomous movement by using a pre-trained AI model. --Output: Sends JSON commands to the Servos service to enable the robot to drive autonomously. --Q1: This process is exclusively used by the robot for its autonomous driving? --Q2: How does this service cooperate with “Inference”? --Q3: Why does this service start an HTTP server? --Q4: What is an IPC chatter json receiver? --Q5: What is a local ipc server? (It uses _c in its name => c = chatter?) - -9) MongoDB --Input: Receives data from the segment, and stores any and all logs produced by the other services (?) --Function: This service creates a default MongoDB user and starts a configurable MongoDB server on the local machine. --Output: Stores logs in its builtin database --Q1: What does the DB store inside it? --Q2: How does the DB get the data that it stores? - -10) FTPD -Input: Receives the newly trained model from the training server. --Function: This service creates a Pure-FTPd Server with a predefined set of commands. This server is used to send its training data to the server, and similarly, receive the trained model from the server. --Output: Sends data to the AI training server with parameters for its specific training. --Q1: Is this the code that connects the Nano to the Firezilla FTP server? (Mentioned in the readthedocs) --Q2: How does this ftp server store, send and receive data from the training server? - - -General questions -Q1: How json receivers and publishers work? Because this is used to send data one to another. -Q2: If all segments are in a zerotier network, does any data sent between the segments encrypted? - diff --git a/cli_wrapper.py b/cli_wrapper.py index 72cd6984..cb4d3dcb 100644 --- a/cli_wrapper.py +++ b/cli_wrapper.py @@ -164,11 +164,11 @@ def execute(self): if __name__ == "__main__": - # How to run: python cli_wrapper.py jetson_runtime push 192.168.1.100 --debug # Command-line parsing if len(sys.argv) < 2: print("Usage: python cli_wrapper.py [balena_command] [balena_args]") + # Example: python cli_wrapper.py jetson_runtime push 192.168.1.100 --debug sys.exit(1) target_dir_name = sys.argv[1] diff --git a/jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py b/jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py deleted file mode 100644 index 722a8776..00000000 --- a/jetson_runtime/BYODR_utils/JETSON_specific/gpio_relay.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import absolute_import - -import threading - -import Jetson.GPIO as GPIO # type: ignore - - -class ThreadSafeJetsonGpioRelay(object): - """ - Thread-safe class for managing a GPIO relay on a Jetson Nano. - """ - - def __init__(self, pin=15): - self.pin = pin - self.state = False # False for OFF, True for ON - self.lock = threading.Lock() - GPIO.setmode(GPIO.BOARD) # Set the pin numbering system to BOARD - GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.LOW) - - def open(self): - """Turns the relay ON (sets the GPIO pin LOW).""" - with self.lock: - GPIO.output(self.pin, GPIO.LOW) - self.state = False - - def close(self): - """Turns the relay OFF (sets the GPIO pin HIGH).""" - with self.lock: - GPIO.output(self.pin, GPIO.HIGH) - self.state = True - - def toggle(self): - """Toggles the relay state.""" - with self.lock: - self.state = not self.state - GPIO.output(self.pin, GPIO.LOW if self.state else GPIO.HIGH) - - def states(self): - """Returns the current state of the relay.""" - with self.lock: - return self.state diff --git a/jetson_runtime/BYODR_utils/JETSON_specific/utilities.py b/jetson_runtime/BYODR_utils/JETSON_specific/utilities.py deleted file mode 100644 index 730993ea..00000000 --- a/jetson_runtime/BYODR_utils/JETSON_specific/utilities.py +++ /dev/null @@ -1,20 +0,0 @@ -import subprocess - - -class Nano: - @staticmethod - def get_ip_address(): - try: - ip_addresses = ( - subprocess.check_output( - "hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", - shell=True, - ) - .decode() - .strip() - ) - # Split in case there are multiple local IP addresses - return ip_addresses - except subprocess.CalledProcessError as e: - print(f"An error occurred: {e}") - return None diff --git a/jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py b/jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py deleted file mode 100644 index 27593b41..00000000 --- a/jetson_runtime/BYODR_utils/PI_specific/gpio_relay.py +++ /dev/null @@ -1,48 +0,0 @@ -from __future__ import absolute_import - -import threading - -import RPi.GPIO as GPIO # type: ignore - - -class ThreadSafePi4GpioRelay: - """Thread-safe class for managing a GPIO relay on a Raspberry Pi.""" - - def __init__(self, pin=15): - self.pin = pin - self.state = False # False for OFF, True for ON - self.lock = threading.Lock() - GPIO.setmode(GPIO.BOARD) # Set the pin numbering system to BOARD - GPIO.setwarnings(False) - GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.LOW) - - def open(self): - """Turns the relay ON (sets the GPIO pin LOW).""" - with self.lock: - print("opened the relay") - GPIO.output(self.pin, GPIO.LOW) - self.state = False - - def close(self): - """Turns the relay OFF (sets the GPIO pin HIGH).""" - with self.lock: - GPIO.output(self.pin, GPIO.HIGH) - current_state = GPIO.input(self.pin) - print(f"closed the relay, current state: {current_state}") - self.state = True - - def toggle(self): - """Toggles the relay state.""" - with self.lock: - self.state = not self.state - GPIO.output(self.pin, GPIO.LOW if self.state else GPIO.HIGH) - - def get_state(self): - """Returns the current state of the relay.""" - with self.lock: - return self.state - - def cleanup(self): - """Cleans up the GPIO state.""" - GPIO.cleanup(self.pin) # Reset the specific pin before setup - GPIO.setup(self.pin, GPIO.OUT, initial=GPIO.LOW) diff --git a/jetson_runtime/BYODR_utils/PI_specific/utilities.py b/jetson_runtime/BYODR_utils/PI_specific/utilities.py deleted file mode 100644 index e5e89eab..00000000 --- a/jetson_runtime/BYODR_utils/PI_specific/utilities.py +++ /dev/null @@ -1,20 +0,0 @@ -import subprocess - - -class RaspberryPi: - @staticmethod - def get_ip_address(): - try: - ip_addresses = ( - subprocess.check_output( - "hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", - shell=True, - ) - .decode() - .strip() - ) - # Split in case there are multiple local IP addresses - return ip_addresses - except subprocess.CalledProcessError as e: - print(f"An error occurred: {e}") - return None diff --git a/jetson_runtime/BYODR_utils/common/__init__.py b/jetson_runtime/BYODR_utils/common/__init__.py deleted file mode 100644 index b9d7d751..00000000 --- a/jetson_runtime/BYODR_utils/common/__init__.py +++ /dev/null @@ -1,200 +0,0 @@ -from __future__ import absolute_import - -import collections -import logging -import multiprocessing -import signal -import time -import traceback -from abc import ABCMeta, abstractmethod -from cProfile import Profile -from contextlib import contextmanager - -import numpy as np -import six - -from BYODR_utils.common.option import hash_dict - -logger = logging.getLogger(__name__) - - -def timestamp(value=None): - """ - Timestamp as integer to retain precision e.g. when serializing to string. - """ - ts = time.time() if value is None else value - return int(ts * 1e6) - - -def entropy(x, eps=1e-20): - return abs(-np.sum(x * np.log(np.clip(x, eps, 1.0)))) - - -class Profiler(Profile): - """ - Custom Profile class with a __call__() context manager method to enable profiling. - Use: - profiler = Profiler() - with profiler(): - - profiler.dump_stats('prof.stats') - -- - python -c "import pstats; p = pstats.Stats('prof.stats'); p.sort_stats('time').print_stats(50)" - python -c "import pstats; p = pstats.Stats('prof.stats'); p.sort_stats('cumulative').print_stats(50)" - """ - - def __init__(self, *args, **kwargs): - super(Profile, self).__init__(*args, **kwargs) - self.disable() # Profiling initially off. - - @contextmanager - def __call__(self): - self.enable() - yield # Execute code to be profiled. - self.disable() - - -class Configurable(six.with_metaclass(ABCMeta, object)): - def __init__(self): - self._lock = multiprocessing.Lock() - self._errors = [] - self._hash = -1 - self._num_starts = 0 - - # noinspection PyUnusedLocal - @abstractmethod - def internal_start(self, **kwargs): - return [] - - @abstractmethod - def internal_quit(self, restarting=False): - pass - - def get_errors(self): - return self._errors - - def get_num_starts(self): - return self._num_starts - - def is_reconfigured(self, **kwargs): - return self._hash != hash_dict(**kwargs) - - def start(self, **kwargs): - with self._lock: - self._errors = self.internal_start(**kwargs) - self._hash = hash_dict(**kwargs) - self._num_starts += 1 - - def quit(self, restarting=False): - with self._lock: - self.internal_quit(restarting) - - def join(self): - self.quit() - - def restart(self, **kwargs): - _reconfigured = self.is_reconfigured(**kwargs) - if _reconfigured: - if self._num_starts > 0: - self.quit(restarting=True) - self.start(**kwargs) - return _reconfigured - - -class Application(object): - def __init__(self, run_hz=10, quit_event=None): - self.logger = logging.getLogger(__name__) - self._hz = run_hz - self._sleep = 0.100 - self.set_hz(run_hz) - if quit_event is None: - self.quit_event = multiprocessing.Event() - signal.signal(signal.SIGINT, lambda sig, frame: self._interrupt()) - signal.signal(signal.SIGTERM, lambda sig, frame: self._interrupt()) - else: - self.quit_event = quit_event - # Recent window to calculate the actual processing frequency. - self._rt_queue = collections.deque(maxlen=50) - - def _interrupt(self): - self.logger.info("Received interrupt, quitting.") - self.quit() - - @staticmethod - def _latest_or_none(receiver, patience): - candidate = receiver() - _time = candidate.get("time", 0) if candidate is not None else 0 - _on_time = (timestamp() - _time) < patience - return candidate if _on_time else None - - def get_hz(self): - return self._hz - - def get_actual_hz(self): - return (1.0 / np.mean(self._rt_queue)) if self._rt_queue else 0 - - def set_hz(self, hz): - self._hz = hz - self._sleep = 1.0 / hz - - def active(self): - return not self.quit_event.is_set() - - def quit(self): - self.quit_event.set() - - def setup(self): - pass - - def step(self): - pass - - def finish(self): - pass - - def run(self): - try: - self.setup() - while self.active(): - _start = time.time() - self.step() - _duration = time.time() - _start - time.sleep(max(0.0, self._sleep - _duration)) - # Report the actual clock frequency which includes the user specified wait time. - self._rt_queue.append(time.time() - _start) - except Exception as e: - # Quit first to be sure - the traceback may in some cases raise another exception. - self.quit() - self.logger.error(e) - self.logger.error(traceback.format_exc()) - except KeyboardInterrupt: - self.quit() - finally: - self.finish() - - -class ApplicationExit(object): - def __init__(self, event, cb): - self._event = event - self._cb = cb - - def __call__(self, *args, **kwargs): - if self._event.is_set(): - try: - self._cb() - except Exception as e: - logger.info(e) - logger.info(traceback.format_exc()) - - -class PeriodicCallTrace(object): - def __init__(self, seconds=1.0): - self._seconds_micro = seconds * 1e6 - self._last = timestamp() - - def __call__(self, *args, **kwargs): - _callback = args[0] - _now = timestamp() - if _now - self._last > self._seconds_micro: - _callback() - self._last = _now diff --git a/jetson_runtime/BYODR_utils/common/ipc.py b/jetson_runtime/BYODR_utils/common/ipc.py deleted file mode 100644 index 78600c85..00000000 --- a/jetson_runtime/BYODR_utils/common/ipc.py +++ /dev/null @@ -1,309 +0,0 @@ -from __future__ import absolute_import - -import collections -import datetime -import json -import logging -import multiprocessing -import os -import sys -import threading -import time - -import numpy as np -import zmq - -from BYODR_utils.common import timestamp - -if sys.version_info > (3,): - # noinspection PyShadowingBuiltins - buffer = memoryview - - def receive_string(subscriber): - return subscriber.recv_string() - - def send_string(sender, val, flags=0): - return sender.send_string(val, flags) - -else: - - def receive_string(subscriber): - return subscriber.recv() - - def send_string(sender, val, flags=0): - return sender.send(val, flags) - - -logger = logging.getLogger(__name__) - - -class JSONPublisher(object): - def __init__(self, url, topic="", hwm=1, clean_start=True): - if clean_start and url.startswith("ipc://") and os.path.exists(url[6:]): - os.remove(url[6:]) - publisher = zmq.Context().socket(zmq.PUB) - publisher.set_hwm(hwm) - publisher.bind(url) - self._publisher = publisher - self._topic = topic - - def publish(self, data, topic=None): - _topic = self._topic if topic is None else topic - if data is not None: - data = dict((k, v) for k, v in data.items() if v is not None) - send_string(self._publisher, "{}:{}".format(_topic, json.dumps(data)), zmq.NOBLOCK) - - -class ImagePublisher(object): - def __init__(self, url, topic="", hwm=1, clean_start=True): - if clean_start and url.startswith("ipc://") and os.path.exists(url[6:]): - os.remove(url[6:]) - publisher = zmq.Context().socket(zmq.PUB) - publisher.set_hwm(hwm) - publisher.bind(url) - self._publisher = publisher - self._topic = topic.encode("utf-8") # Encode the topic to bytes at initialization - - def publish(self, _img, topic=None): - _topic = self._topic if topic is None else topic.encode("utf-8") - # json.dumps(...) returns a string, it needs to be encoded into bytes. - self._publisher.send_multipart( - [ - _topic, - json.dumps(dict(time=timestamp(), shape=_img.shape)).encode("utf-8"), - np.ascontiguousarray(_img, dtype=np.uint8), - ], - flags=zmq.NOBLOCK, - ) - - -class JSONReceiver(object): - def __init__(self, url, topic=b"", hwm=1, receive_timeout_ms=2, pop=False): - subscriber = zmq.Context().socket(zmq.SUB) - subscriber.set_hwm(hwm) - subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - subscriber.setsockopt(zmq.LINGER, 0) - subscriber.connect(url) - subscriber.setsockopt(zmq.SUBSCRIBE, topic) - self._pop = pop - self._unpack = hwm == 1 - self._subscriber = subscriber - self._lock = threading.Lock() - self._queue = collections.deque(maxlen=hwm) - - def consume(self): - with self._lock: - try: - # Does not replace local queue messages when none are available. - self._queue.appendleft(json.loads(receive_string(self._subscriber).split(":", 1)[1])) - except zmq.Again: - pass - - def get(self): - _view = self._queue[0] if (self._queue and self._unpack) else list(self._queue) if self._queue else None - if self._pop: - self._queue.clear() - return _view - - def peek(self): - return self._queue[0] if self._queue else None - - -class CollectorThread(threading.Thread): - def __init__(self, receivers, event=None, hz=1000): - super(CollectorThread, self).__init__() - _list = isinstance(receivers, tuple) or isinstance(receivers, list) - self._receivers = receivers if _list else [receivers] - self._quit_event = multiprocessing.Event() if event is None else event - self._sleep = 1.0 / hz - - def get(self, index=0): - # Get the latest message without blocking. - # _receiver.consume() -- blocks; perform at thread.run() - return self._receivers[index].get() - - def peek(self, index=0): - return self._receivers[index].peek() - - def quit(self): - self._quit_event.set() - - def run(self): - while not self._quit_event.is_set(): - # Empty the receiver queues to not block upstream senders. - list(map(lambda receiver: receiver.consume(), self._receivers)) - time.sleep(self._sleep) - - -def json_collector(url, topic, event, receive_timeout_ms=1000, hwm=1, pop=False): - return CollectorThread(JSONReceiver(url, topic, hwm=hwm, receive_timeout_ms=receive_timeout_ms, pop=pop), event=event) - - -class ReceiverThread(threading.Thread): - def __init__(self, url, event=None, topic=b"", hwm=1, receive_timeout_ms=1): - super(ReceiverThread, self).__init__() - subscriber = zmq.Context().socket(zmq.SUB) - subscriber.set_hwm(hwm) - subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - subscriber.setsockopt(zmq.LINGER, 0) - subscriber.connect(url) - subscriber.setsockopt(zmq.SUBSCRIBE, topic) - self._subscriber = subscriber - self._quit_event = multiprocessing.Event() if event is None else event - self._queue = collections.deque(maxlen=1) - self._listeners = [] - - def add_listener(self, c): - self._listeners.append(c) - - def get_latest(self): - return self._queue[0] if bool(self._queue) else None - - def pop_latest(self): - return self._queue.popleft() if bool(self._queue) else None - - def quit(self): - self._quit_event.set() - - def run(self): - while not self._quit_event.is_set(): - try: - _latest = json.loads(receive_string(self._subscriber).split(":", 1)[1]) - self._queue.appendleft(_latest) - list(map(lambda x: x(_latest), self._listeners)) - except zmq.Again: - pass - - -class CameraThread(threading.Thread): - def __init__(self, url, event, topic=b"", hwm=1, receive_timeout_ms=25): - super(CameraThread, self).__init__() - subscriber = zmq.Context().socket(zmq.SUB) - subscriber.set_hwm(hwm) - subscriber.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - subscriber.setsockopt(zmq.LINGER, 0) - subscriber.connect(url) - subscriber.setsockopt(zmq.SUBSCRIBE, topic) - self._subscriber = subscriber - self._quit_event = event - self._images = collections.deque(maxlen=1) - - def capture(self): - return self._images[0] if bool(self._images) else (None, None) - - def run(self): - while not self._quit_event.is_set(): - try: - [_, md, data] = self._subscriber.recv_multipart() - md = json.loads(md) - height, width, channels = md["shape"] - img = np.frombuffer(buffer(data), dtype=np.uint8) - img = img.reshape((height, width, channels)) - self._images.appendleft((md, img)) - except ValueError as e: - logger.warning(e) - except zmq.Again: - pass - - -class JSONServerThread(threading.Thread): - def __init__(self, url, event, hwm=1, receive_timeout_ms=50): - super(JSONServerThread, self).__init__() - server = zmq.Context().socket(zmq.REP) - server.set_hwm(hwm) - server.setsockopt(zmq.RCVTIMEO, receive_timeout_ms) - server.setsockopt(zmq.LINGER, 0) - server.bind(url) - self._server = server - self._quit_event = event - self._queue = collections.deque(maxlen=1) - self._listeners = [] - - def add_listener(self, c): - self._listeners.append(c) - - def on_message(self, message): - self._queue.appendleft(message) - list(map(lambda x: x(message), self._listeners)) - - def get_latest(self): - return self._queue[0] if bool(self._queue) else None - - def pop_latest(self): - return self._queue.popleft() if bool(self._queue) else None - - def serve(self, request): - return {} - - def run(self): - while not self._quit_event.is_set(): - try: - message = json.loads(receive_string(self._server)) - self.on_message(message) - send_string(self._server, json.dumps(self.serve(message))) - except zmq.Again: - pass - - -class LocalIPCServer(JSONServerThread): - def __init__(self, name, url, event, receive_timeout_ms=50): - super(LocalIPCServer, self).__init__(url, event, receive_timeout_ms) - self._name = name - self._m_startup = collections.deque(maxlen=1) - self._m_capabilities = collections.deque(maxlen=1) - - def register_start(self, errors, capabilities=None): - capabilities = {} if capabilities is None else capabilities - self._m_startup.append((datetime.datetime.utcnow().strftime("%b %d %H:%M:%S.%s UTC"), errors)) - self._m_capabilities.append(capabilities) - - def serve(self, message): - try: - if message.get("request") == "system/startup/list" and self._m_startup: - ts, errors = self._m_startup[-1] - messages = ["No errors"] - if errors: - d_errors = dict() # Merge to obtain distinct keys. - [d_errors.update({error.key: error.message}) for error in errors] - messages = ["{} - {}".format(k, d_errors[k]) for k in d_errors.keys()] - return {self._name: {ts: messages}} - elif message.get("request") == "system/service/capabilities" and self._m_capabilities: - return {self._name: self._m_capabilities[-1]} - except IndexError: - pass - return {} - - -class JSONZmqClient(object): - def __init__(self, urls, hwm=1, receive_timeout_ms=200): - self._urls = urls if isinstance(urls, list) else [urls] - self._receive_timeout = receive_timeout_ms - self._context = None - self._socket = None - self._hwm = hwm - self._create(self._urls) - - def _create(self, locations): - context = zmq.Context() - socket = context.socket(zmq.REQ) - socket.set_hwm(self._hwm) - socket.setsockopt(zmq.RCVTIMEO, self._receive_timeout) - socket.setsockopt(zmq.LINGER, 0) - [socket.connect(location) for location in locations] - self._context = context - self._socket = socket - - def quit(self): - if self._context is not None: - self._context.destroy() - - def call(self, message): - ret = {} - for i in range(len(self._urls)): - try: - send_string(self._socket, json.dumps(message), zmq.NOBLOCK) - ret.update(json.loads(receive_string(self._socket))) - except zmq.ZMQError: - j = i + 1 - self._create(self._urls[j:] + self._urls[:j]) - return ret diff --git a/jetson_runtime/BYODR_utils/common/location.py b/jetson_runtime/BYODR_utils/common/location.py deleted file mode 100644 index 953cdbee..00000000 --- a/jetson_runtime/BYODR_utils/common/location.py +++ /dev/null @@ -1,106 +0,0 @@ -import collections - -import cachetools -from geographiclib.geodesic import Geodesic - - -def _distance_bearing(from_position, to_position): - c_latitude, c_longitude = to_position - p_latitude, p_longitude = from_position - # noinspection PyUnresolvedReferences - _g = Geodesic.WGS84.Inverse(p_latitude, p_longitude, c_latitude, c_longitude) - # Distance in meters. - _distance = _g["s12"] - # The azimuth is the heading measured clockwise from north. - # azi2 is the "forward" azimuth, i.e., the heading that takes you beyond point 2 not back to point 1. - _bearing = _g["azi2"] - return _distance, _bearing - - -class GeoTracker(object): - """ - A class for tracking geographical positions with the ability to calculate bearing between positions - based on a minimum distance criterion. - - Attributes: - _min_distance (float): Minimum distance in meters required to consider positions distinct for bearing calculations. - _positions (collections.deque): A deque to store the sequence of recent geographic positions. - _cache (cachetools.TTLCache): Cache for storing recent position calculations to reduce computation. - """ - - def __init__(self, cache_ttl=10.0, min_distance_meters=0.10): - self._min_distance = min_distance_meters - self._positions = collections.deque(maxlen=8) - self._cache = cachetools.TTLCache(maxsize=100, ttl=cache_ttl) - - def _begin(self, current): - """ - Handles the initialization or reset of tracking when there are no or insufficient previous positions. - - Parameters: - current (tuple or None): The current geographic position as a tuple (latitude, longitude) or None. - - Returns: - tuple: The current or last known latitude, longitude, and None for bearing (since bearing cannot be calculated). - """ - n_positions = len(self._positions) - if n_positions == 0: - if current is None: - return None, None, None - else: - self._positions.append(current) - return current[0], current[1], None - if current is None: - current = self._positions[-1] - return current[0], current[1], None - else: - distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) - if distance >= self._min_distance: - self._positions.append(current) - return current[0], current[1], None - - def _track(self, current): - """ - Tracks the current position and calculates the bearing if possible based on the stored positions. - - Parameters: - current (tuple or None): The current geographic position as a tuple (latitude, longitude) or None. - - Returns: - tuple: Latitude, longitude, and bearing (if calculable); otherwise, None for the bearing. - """ - n_positions = len(self._positions) - if n_positions < 2: - return self._begin(current) - if current is None: - current = self._positions[-1] - distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) - return current[0], current[1], bearing - else: - distance, bearing = _distance_bearing(self._positions[0], (current[0], current[1])) - if distance >= self._min_distance: - self._positions.append(current) - return current[0], current[1], bearing - - def clear(self): - """ - Clears the stored positions and cache, resetting the tracker to an initial state. - """ - self._positions.clear() - - def track(self, position): - """ - Retrieves or calculates the latitude, longitude, and bearing of a given position, utilizing caching to optimize. - - Parameters: - position (tuple): The current geographic position as a tuple (latitude, longitude). - - Returns: - tuple: Latitude, longitude, and bearing (if calculable based on movement and distance); otherwise, None for the bearing. - """ - _key = position - res = self._cache.get(_key) - if res is None: - res = self._track(position) - self._cache[_key] = res - return res diff --git a/jetson_runtime/BYODR_utils/common/navigate.py b/jetson_runtime/BYODR_utils/common/navigate.py deleted file mode 100644 index 028811ba..00000000 --- a/jetson_runtime/BYODR_utils/common/navigate.py +++ /dev/null @@ -1,374 +0,0 @@ -from __future__ import absolute_import - -import glob -import json -import logging -import multiprocessing -import os -import threading -from abc import ABCMeta, abstractmethod - -from BYODR_utils.common import timestamp - - -logger = logging.getLogger(__name__) - - -def _translate_navigation_direction(value): - if value is not None: - value = value.lower() - if value == "left": - return NavigationCommand.LEFT - elif value == "right": - return NavigationCommand.RIGHT - elif value == "ahead": - return NavigationCommand.AHEAD - elif value == "default": - return NavigationCommand.DEFAULT - # No change in direction. - return None - - -class NavigationCommand(object): - DEFAULT, LEFT, AHEAD, RIGHT = (0, 1, 2, 3) - - def __init__(self, sleep=None, direction=None, speed=None): - self._time = None - self._sleep = sleep - self._direction = direction - self._speed = speed - - def get_time(self): - return self._time - - def set_time(self, value): - self._time = value - return self - - def get_sleep(self): - return self._sleep - - def get_direction(self): - return self._direction - - def get_speed(self): - return self._speed - - -class NavigationInstructions(object): - def __init__(self, version=1, commands=None): - self._version = version - commands = commands or [] - if not isinstance(commands, tuple) and not isinstance(commands, list): - commands = [commands] - self._commands = commands - - def get_commands(self): - return self._commands - - -def _parse_navigation_instructions(m): - """ - { - "version": 1, - "pilot": {"direction": "ahead" } - } - - { - "version": 1, - "pilot": [{"speed": 0}, {"sleep": 30, "direction": "left", "speed": 1}] - } - """ - - version = m.get("version", 1) - commands = [] - pilot = m.get("pilot") - if pilot is not None: - nodes = pilot if isinstance(pilot, list) else [pilot] - for node in nodes: - commands.append( - NavigationCommand( - sleep=None if node.get("sleep") is None else float(node.get("sleep")), - direction=_translate_navigation_direction(node.get("direction")), - speed=None if node.get("speed") is None else float(node.get("speed")), - ) - ) - return NavigationInstructions(version, commands) - - -class AbstractRouteDataSource(object): - __metaclass__ = ABCMeta - - @abstractmethod - def __len__(self): - raise NotImplementedError() - - @abstractmethod - def load_routes(self): - raise NotImplementedError() - - @abstractmethod - def list_routes(self): - raise NotImplementedError() - - @abstractmethod - def get_selected_route(self): - raise NotImplementedError() - - @abstractmethod - def open(self, route_name=None): - raise NotImplementedError() - - @abstractmethod - def is_open(self): - raise NotImplementedError() - - @abstractmethod - def close(self): - raise NotImplementedError() - - @abstractmethod - def quit(self): - raise NotImplementedError() - - @abstractmethod - def list_navigation_points(self): - raise NotImplementedError() - - @abstractmethod - def has_navigation_point(self, route, point): - raise NotImplementedError() - - @abstractmethod - def list_all_images(self): - raise NotImplementedError() - - @abstractmethod - def get_image(self, image_id): - raise NotImplementedError() - - @abstractmethod - def get_image_navigation_point(self, idx): - raise NotImplementedError() - - @abstractmethod - def get_image_navigation_point_id(self, idx): - raise NotImplementedError() - - @abstractmethod - def get_instructions(self, point): - raise NotImplementedError() - - -class FileSystemRouteDataSource(AbstractRouteDataSource): - - def __init__(self, directory, fn_load_image=(lambda x: x), load_instructions=True): - self.directory = directory - self.fn_load_image = fn_load_image - self.load_instructions = load_instructions - self.quit_event = multiprocessing.Event() - self._load_timestamp = 0 - self.routes = [] - self.selected_route = None - # Route specific data follows. - self.points = [] - self.all_images = [] - self.image_index_to_point = {} - self.image_index_to_point_id = {} - self.point_to_instructions = {} - self._check_exists() - - def _check_exists(self): - directory = self.directory - self._exists = directory is not None and os.path.exists(directory) and os.path.isdir(directory) - - def _reset(self): - self.selected_route = None - self.points = [] - self.all_images = [] - self.image_index_to_point = {} - self.image_index_to_point_id = {} - self.point_to_instructions = {} - self.quit_event.clear() - - def load_routes(self): - self._check_exists() - if not self._exists: - self._reset() - else: - _now = timestamp() # In micro seconds. - if _now - self._load_timestamp > 1e6: - # Each route is a sub-directory of the base folder. - self.routes = [d for d in os.listdir(self.directory) if not d.startswith(".")] - self._load_timestamp = _now - logger.info("Directory '{}' contains the following routes {}.".format(self.directory, self.routes)) - - @staticmethod - def _get_command(fname): - try: - with open(fname) as f: - return json.load(f) - except IOError: - return {} - - def __len__(self): - # Zero when no route selected. - return len(self.points) - - def list_routes(self): - return self.routes - - def get_selected_route(self): - return self.selected_route - - def open(self, route_name=None): - # Reopening the selected route constitutes a reload of the disk state. - self._reset() - if self._exists and route_name in self.routes: - try: - # Load the route navigation points. - _route_directory = os.path.join(self.directory, route_name) - if os.path.exists(_route_directory) and os.path.isdir(_route_directory): - np_dirs = sorted([d for d in os.listdir(_route_directory) if not d.startswith(".")]) - logger.info("{} -> {}".format(route_name, np_dirs)) - # Take the existing sort-order. - image_id = 0 - point_id = 0 # Cannot enumerate as points without images must be skipped. - for point_name in np_dirs: - if self.quit_event.is_set(): - break - np_dir = os.path.join(self.directory, route_name, point_name) - _pattern = np_dir + os.path.sep - im_files = sorted([f for f_ in [glob.glob(_pattern + e) for e in ("*.jpg", "*.jpeg")] for f in f_]) - if len(im_files) < 1: - logger.info("Skipping point '{}' as there are no images for it.".format(point_name)) - continue - if self.load_instructions: - contents = self._get_command(os.path.join(np_dir, "command.json")) - contents = contents if contents else self._get_command(os.path.join(np_dir, point_name + ".json")) - self.point_to_instructions[point_name] = _parse_navigation_instructions(contents) - # Collect images by navigation point. - for im_file in im_files: - self.all_images.append(self.fn_load_image(im_file)) - self.image_index_to_point[image_id] = point_name - self.image_index_to_point_id[image_id] = point_id - image_id += 1 - # Accept the point. - self.points.append(point_name) - point_id += 1 - self.selected_route = route_name - except OSError as e: - logger.info(e) - - def is_open(self): - return self.selected_route in self.routes - - def close(self): - self._reset() - - def quit(self): - self.quit_event.set() - - def list_navigation_points(self): - return self.points - - def has_navigation_point(self, route, point): - _dir = os.path.join(self.directory, route, point) - return os.path.exists(_dir) and os.path.isdir(_dir) - - def list_all_images(self): - return self.all_images - - def get_image(self, image_id): - image_id = -1 if image_id is None else image_id - images = self.list_all_images() - return images[image_id] if len(images) > image_id >= 0 else None - - def get_image_navigation_point(self, idx): - return self.image_index_to_point[idx] - - def get_image_navigation_point_id(self, idx): - return self.image_index_to_point_id[idx] - - def get_instructions(self, point): - return self.point_to_instructions.get(point) - - -class ReloadableDataSource(AbstractRouteDataSource): - def __init__(self, delegate): - self._delegate = delegate - self._lock = threading.Lock() - # Cache the most recent selected route. - self._last_listed_routes = [] - self._last_selected_route = None - - def _do_safe(self, fn): - _acquired = self._lock.acquire(False) - try: - return fn(_acquired) - finally: - if _acquired: - self._lock.release() - - def __len__(self): - return self._do_safe(lambda acquired: len(self._delegate) if acquired else 0) - - def load_routes(self): - with self._lock: - self._delegate.load_routes() - - def list_routes(self): - _acquired = self._lock.acquire(False) - try: - if _acquired: - self._last_listed_routes = self._delegate.list_routes() - return self._last_listed_routes - finally: - if _acquired: - self._lock.release() - - def get_selected_route(self): - _acquired = self._lock.acquire(False) - try: - if _acquired: - self._last_selected_route = self._delegate.get_selected_route() - return self._last_selected_route - finally: - if _acquired: - self._lock.release() - - def open(self, route_name=None): - with self._lock: - self._delegate.open(route_name) - - def is_open(self): - return self._do_safe(lambda acquired: self._delegate.is_open() if acquired else False) - - def close(self): - with self._lock: - self._delegate.close() - - def quit(self): - with self._lock: - self._delegate.quit() - - def list_navigation_points(self): - return self._do_safe(lambda acquired: self._delegate.list_navigation_points() if acquired else []) - - def has_navigation_point(self, route, point): - return self._do_safe(lambda acquired: self._delegate.has_navigation_point(route, point) if acquired else False) - - def list_all_images(self): - return self._do_safe(lambda acquired: self._delegate.list_all_images() if acquired else []) - - def get_image(self, image_id): - return self._do_safe(lambda acquired: self._delegate.get_image(image_id) if acquired else None) - - def get_image_navigation_point(self, idx): - return self._do_safe(lambda acquired: self._delegate.get_image_navigation_point(idx) if acquired else None) - - def get_image_navigation_point_id(self, idx): - return self._do_safe(lambda acquired: self._delegate.get_image_navigation_point_id(idx) if acquired else None) - - def get_instructions(self, point): - return self._do_safe(lambda acquired: self._delegate.get_instructions(point) if acquired else None) diff --git a/jetson_runtime/BYODR_utils/common/option.py b/jetson_runtime/BYODR_utils/common/option.py deleted file mode 100644 index 166f07fc..00000000 --- a/jetson_runtime/BYODR_utils/common/option.py +++ /dev/null @@ -1,63 +0,0 @@ -class PropertyError(ValueError): - def __init__(self, key, msg, suggestions=None): - if suggestions is None: - suggestions = list() - self.key = key - self.message = msg - self.suggestions = suggestions - - def __str__(self): - return "{} - {}".format(self.key, self.message) - - -def str_to_bool(value): - if value.lower() in ("true", "1", "t", "y", "yes"): - return True - elif value.lower() in ("false", "0", "f", "n", "no"): - return False - else: - raise ValueError(f"Cannot convert {value} to a boolean.") - - -def _parse(key, fn_type=(lambda x: x), **kwargs): - try: - return fn_type(kwargs[key]) - except (ValueError, TypeError) as e: - raise PropertyError(key, str(e)) - - -def parse_option(key, fn_type=(lambda x: x), default_value=None, errors=None, **kwargs): - """ - Attempts to parse an option from the given keyword arguments based on the specified key. - - If the key is missing and a default value is provided, the default value is used instead. - Parameters: - - key (str): The key to look for in the keyword arguments. - - fn_type (callable, optional): A function to apply to the value of the found key. Defaults to a no-op lambda that returns the value unchanged. - - default_value (any, optional): The default value to use if the key is not found in the keyword arguments. Defaults to None. - - errors (list, optional): A list to which any encountered PropertyErrors will be appended. If None, a new list is created. Defaults to None. - - **kwargs: Additional keyword arguments among which the function will look for the specified key. - - Returns: - - The value associated with 'key' in the keyword arguments after applying 'fn_type', the default value if the key is missing, or raises a KeyError if the key is missing and no default value is provided. - - Raises: - - KeyError: If the key is not found in the keyword arguments and no default value is provided. - - PropertyError: If there is a ValueError or TypeError when applying 'fn_type' to the value associated with 'key'. - """ - errors = [] if errors is None else errors - try: - if fn_type is bool: - # Use custom boolean parser - return str_to_bool(kwargs[key]) - else: - return _parse(key, fn_type=fn_type, **kwargs) - except KeyError: - if default_value is None: - errors.append(PropertyError(key, "The key is missing and no default value has been set")) - else: - return fn_type(default_value) - - -def hash_dict(**m): - return hash("".join(str(k) + str(m.get(k)) for k in sorted(m.keys()))) diff --git a/jetson_runtime/BYODR_utils/common/protocol.py b/jetson_runtime/BYODR_utils/common/protocol.py deleted file mode 100644 index 672a0d92..00000000 --- a/jetson_runtime/BYODR_utils/common/protocol.py +++ /dev/null @@ -1,59 +0,0 @@ -from __future__ import absolute_import - -from BYODR_utils.common import timestamp - - - -class MessageStreamProtocol(object): - """ - Safety: - Protocol uses 2 timestamps, remote and local, and does not require the clocks to be synced. - Local means receiver side so incoming messages. - Because the clocks are not synced remote and local timestamps are not directly comparable. - Timestamps: - 1. remote as reported by the sender - 2. local as recorded by the receiver - - - The protocol can be validated or invalidated. - There is a warm-up period with invalidated protocol, after system reboot. - - - The incoming stream needs to be continuous (or uninterrupted) and recent (timely). - Continuity violation - Age violation - """ - - def __init__(self, max_age_ms=200, max_delay_ms=250): - self._max_age_micro = max_age_ms * 1000.0 - self._max_delay_micro = max_delay_ms * 1000.0 - # There is currently no distinction in violation types. - self._n_violations = 0 - self._last_message_time = 0 - self._last_protocol_time = 0 - - def _violation(self): - self._n_violations = 1 if self._n_violations < 1 else min(1e4, self._n_violations + 1) - - def _success(self): - self._n_violations = 0 if self._n_violations > 0 else max(-1e4, self._n_violations - 1) - - def reset(self): - self._n_violations = 0 - self._last_message_time = 0 - self._last_protocol_time = 0 - - def on_message(self, message_timestamp_micro): - # This is our time in microseconds. - local_time = timestamp() - if local_time - self._last_protocol_time > self._max_delay_micro: - self._violation() - elif message_timestamp_micro - self._last_message_time > self._max_age_micro: - self._violation() - else: - self._success() - self._last_message_time = message_timestamp_micro - self._last_protocol_time = local_time - - def check(self): - if timestamp() - self._last_protocol_time > self._max_delay_micro: - self._violation() - return self._n_violations diff --git a/jetson_runtime/BYODR_utils/common/ssh.py b/jetson_runtime/BYODR_utils/common/ssh.py deleted file mode 100644 index 521c7ef9..00000000 --- a/jetson_runtime/BYODR_utils/common/ssh.py +++ /dev/null @@ -1,128 +0,0 @@ -# TESTED AND WORKING ON -# Firmware version :RUT9_R_00.07.06.1 -# Firmware build date: 2024-01-02 11:11:13 -# Internal modem firmware version: SLM750_4.0.6_EQ101 -# Kernel version: 5.4.259 - - -import logging -import subprocess -import time -import traceback - -import paramiko - -# Declaring the logger -logging.basicConfig(format="%(levelname)s: %(asctime)s %(filename)s %(funcName)s %(lineno)d %(message)s", datefmt="%Y-%m-%d %H:%M:%S %p") - -logging.getLogger().setLevel(logging.INFO) -logger = logging.getLogger(__name__) - -paramiko_logger = logging.getLogger("paramiko") -paramiko_logger.setLevel(logging.CRITICAL) - - -class Router: - def __init__(self, ip=None, username="root", password="Modem001", port=22): - self.ip = ip if ip is not None else self.__get_nano_third_octet() - self.username = username - self.password = password - self.port = int(port) # Default value for SSH port - self.client = None - self.__open_ssh_connection() - - def __get_nano_third_octet(self): - try: - # Fetch the IP address - ip_address = subprocess.check_output("hostname -I | awk '{for (i=1; i<=NF; i++) if ($i ~ /^192\\.168\\./) print $i}'", shell=True).decode().strip().split()[0] - - # Trim off the last segment of the IP address - parts = ip_address.split(".") - network_prefix = ".".join(parts[:3]) + "." - router_ip = f"{network_prefix}1" - return router_ip - except subprocess.CalledProcessError as e: - print(f"An error occurred: {e}") - return None - - def __open_ssh_connection(self): - """ - Opens an SSH connection to the router. - """ - try: - self.client = paramiko.SSHClient() - self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - self.client.connect(self.ip, self.port, self.username, self.password) - except Exception as e: - logger.error(f"Failed to open SSH connection: {e}") - self.client = None - - def _execute_ssh_command(self, command, ip=None, file_path=None, file_contents=None, suppress_error_log=False): - """ - Executes a command on the router via SSH and returns the result. - Optionally, can write to a file on the router using SFTP. - """ - router_ip = ip if ip is not None else self.ip - temp_client = None - - try: - if router_ip != self.ip: - # Establish a temporary connection for a different router - temp_client = paramiko.SSHClient() - temp_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) - temp_client.connect(router_ip, self.port, self.username, self.password) - client = temp_client - else: - # Check and use the persistent connection for the primary router - if not self.client or not self.client.get_transport() or not self.client.get_transport().is_active(): - self.__open_ssh_connection() - client = self.client - - if file_path and file_contents is not None: - # Handle SFTP file write operation - with client.open_sftp() as sftp: - with sftp.file(file_path, "w") as file: - file.write(file_contents) - # No command output in case of SFTP operation - return None - - # Execute the SSH command - stdin, stdout, stderr = client.exec_command(command) - result = stdout.read().decode().strip() - error = stderr.read().decode().strip() - - if error: - raise Exception(error) - - return result - - except Exception as e: - if not suppress_error_log: - # Log the error - caller = traceback.extract_stack(None, 2)[0][2] - logger.info(f"Error occurred in {caller}: {e}") - return None - - finally: - # Close the temporary client if it was used - if router_ip != self.ip and temp_client: - temp_client.close() - - def __close_ssh_connection(self): - """ - Closes the SSH connection to the router. - """ - if self.client: - self.client.close() - self.client = None - - def fetch_ssid(self): - """Get SSID of current segment""" - output = None - # The loop is to keep calling the ssh function until it returns a value - while output is None: - output = self._execute_ssh_command("uci get wireless.@wifi-iface[0].ssid", suppress_error_log=True) - if output is None: - time.sleep(1) - return output - diff --git a/jetson_runtime/BYODR_utils/common/testing.py b/jetson_runtime/BYODR_utils/common/testing.py deleted file mode 100644 index 88e003b0..00000000 --- a/jetson_runtime/BYODR_utils/common/testing.py +++ /dev/null @@ -1,140 +0,0 @@ -from __future__ import absolute_import -import collections -from six.moves import map - - -class QueueReceiver(object): - def __init__(self, queue_max_size=100): - """ - A drop-in replacement for ipc ReceiverThread. - :param queue_max_size: Max length of the queue. - """ - self._queue = collections.deque(maxlen=queue_max_size) - self._listeners = [] - self._started = False - - def start(self): - self._started = True - - def is_started(self): - return self._started - - def add_listener(self, c): - self._listeners.append(c) - - def add(self, m): - self._queue.appendleft(m) - list(map(lambda x: x(m), self._listeners)) - - def get_latest(self): - return self._queue[0] if bool(self._queue) else None - - def pop_latest(self): - return self._queue.popleft() if bool(self._queue) else None - - def clear(self): - self._queue.clear() - - def quit(self): - self.clear() - self._listeners = [] - self._started = False - - -class QueueCamera(object): - def __init__(self, queue_max_size=100): - """ - A drop-in replacement for ipc CameraThread. - :param queue_max_size: Max length of the queue. - """ - self._queue = collections.deque(maxlen=queue_max_size) - self._started = False - - def start(self): - self._started = True - - def is_started(self): - return self._started - - def add(self, meta_data, image): - self._queue.appendleft((meta_data, image)) - - def capture(self): - return self._queue[0] if bool(self._queue) else (None, None) - - def clear(self): - self._queue.clear() - - -class CollectPublisher(object): - def __init__(self, topic=""): - """ - A drop-in replacement for ipc JSONPublisher. - :param topic: The default topic. - """ - self._topic = topic - self._map = dict() - - def publish(self, data, topic=None): - _topic = self._topic if topic is None else topic - if _topic not in self._map: - self._map[_topic] = list() - self._map[_topic].append(data) - - def collect(self, topic=None): - _topic = self._topic if topic is None else topic - return self._map.get(_topic) - - def get_latest(self, topic=None): - return self.collect(topic=topic)[-1] - - def clear(self): - self._map.clear() - - -class CollectServer(object): - def __init__(self): - """ - A drop-in replacement for ipc LocalIPCServer. - """ - self._errors = [] - self._capabilities = [] - - def register_start(self, errors, capabilities=None): - capabilities = {} if capabilities is None else capabilities - self._errors.append(errors) - self._capabilities.append(capabilities) - - def collect(self): - return self._errors - - def get_latest(self): - return self._errors[-1] - - def clear(self): - self._errors = [] - - -class CollectJSONClient(object): - """ - A drop-in replacement for ipc JSONZmqClient. - """ - - def __init__(self): - self._list = [] - - def call(self, message, ret=None): - self._list.append(message) - return ret - - def collect(self): - return self._list - - def get_latest(self): - return self._list[-1] - - def clear(self): - self._list = [] - - def quit(self): - self.clear() diff --git a/jetson_runtime/BYODR_utils/common/usbrelay.py b/jetson_runtime/BYODR_utils/common/usbrelay.py deleted file mode 100644 index 67ff5503..00000000 --- a/jetson_runtime/BYODR_utils/common/usbrelay.py +++ /dev/null @@ -1,252 +0,0 @@ -from __future__ import absolute_import - -import logging -import multiprocessing -import time - -import usb.core -import usb.util -from usb.util import CTRL_IN, CTRL_OUT, CTRL_TYPE_VENDOR - -logger = logging.getLogger(__name__) - - -class SingleChannelUsbRelay(object): - """ - HALJIA USB-relaismodule USB Smart Control Switch Intelligent Switch Control USB Relais module - """ - - def __init__(self, vendor=0x1A86, product=0x7523): - self._vendor = vendor - self._product = product - self._device = None - self._endpoint = None - - def attach(self): - self._device = usb.core.find(idVendor=self._vendor, idProduct=self._product) - if self._device is None: - logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) - return - - try: - if self._device.is_kernel_driver_active(0): - self._device.detach_kernel_driver(0) - - _config = self._device.get_active_configuration() - _intf = _config[(0, 0)] - - self._endpoint = usb.util.find_descriptor( - _intf, - # match the first OUT endpoint - custom_match=(lambda _e: usb.util.endpoint_direction(_e.bEndpointAddress) == usb.util.ENDPOINT_OUT), - ) - - if self._endpoint is None: - logger.error("Endpoint not found.") - except Exception as e: - logger.error(e) - - def open(self): - if self._endpoint is not None: - self._endpoint.write([0xA0, 0x01, 0x00, 0xA1]) - - def close(self): - if self._endpoint is not None: - self._endpoint.write([0xA0, 0x01, 0x01, 0xA2]) - - -class DoubleChannelUsbRelay(object): - """ - ICQUANZX SRD-05VDC-SL-C 2-way - """ - - def __init__(self, vendor=0x16C0, product=0x05DF): - self._vendor = vendor - self._product = product - self._device_on = [[0xFF, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], [0xFF, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]] - self._device_off = [[0xFC, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00], [0xFC, 0x02, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]] - self._device = None - - def find(self): - return usb.core.find(idVendor=self._vendor, idProduct=self._product) - - def poll(self): - return self.find() is not None - - def attach(self): - self._device = self.find() - self._device = usb.core.find(idVendor=self._vendor, idProduct=self._product) - if self._device is None: - logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) - return - - try: - if self._device.is_kernel_driver_active(0): - self._device.detach_kernel_driver(0) - self._device.set_configuration() - except Exception as e: - logger.error(e) - - def is_attached(self): - return self._device is not None - - def open(self, channel=0): - # assert self.is_attached(), "The device is not attached." - self._device.ctrl_transfer(0x21, 0x09, 0x0300, 0x0000, "".join(chr(n) for n in self._device_off[channel]), 1000) - - def close(self, channel=0): - # assert self.is_attached(), "The device is not attached." - self._device.ctrl_transfer(0x21, 0x09, 0x0300, 0x0000, "".join(chr(n) for n in self._device_on[channel]), 1000) - - -class TransientMemoryRelay(object): - """Fake class, I suppose""" - - def __init__(self, num_channels=4): - self._state = [0] * num_channels - - def open(self, channel=0): - self._state[channel] = 0 - - def close(self, channel=0): - self._state[channel] = 1 - - def states(self): - return [bool(x) for x in self._state] - - -class FourChannelUsbRelay(object): - """ - Conrad Components 393905 Relay Module 5 V/DC - Conrad article 393905 - Conrad supplier 393905 - EAN: 4016138810585 - Type: CP210x - """ - - MAX_GPIO_INDEX = 4 - - CP210X_VENDOR_ID = 0x10C4 - CP210X_PRODUCT_ID = 0xEA60 - - CP210X_REQUEST_TYPE_READ = CTRL_IN | CTRL_TYPE_VENDOR - CP210X_REQUEST_TYPE_WRITE = CTRL_OUT | CTRL_TYPE_VENDOR - - CP210X_REQUEST_VENDOR = 0xFF - - CP210X_VALUE_READ_LATCH = 0x00C2 - CP210X_VALUE_WRITE_LATCH = 0x37E1 - - def __init__(self, vendor=CP210X_VENDOR_ID, product=CP210X_PRODUCT_ID): - """ - Adapted from https://github.com/jjongbloets/CP210xControl/blob/master/CP210xControl/model.py. - """ - self._vendor = vendor - self._product = product - self._device = None - - def find(self): - return usb.core.find(idVendor=self._vendor, idProduct=self._product) - - def poll(self): - return self.find() is not None - - def attach(self): - self._device = self.find() - if self._device is None: - logger.error("Device vendor={} product={} not found.".format(self._vendor, self._product)) - return - - try: - if self._device.is_kernel_driver_active(0): - self._device.detach_kernel_driver(0) - self._device.set_configuration() - except Exception as e: - logger.error(e) - - def is_attached(self): - return self._device is not None - - def _query(self, request, value, index, length): - # assert self.is_attached(), "The device is not attached." - return self._device.ctrl_transfer(self.CP210X_REQUEST_TYPE_READ, request, value, index, length) - - def _write(self, request, value, index, data): - # assert self.is_attached(), "The device is not attached." - return self._device.ctrl_transfer(self.CP210X_REQUEST_TYPE_WRITE, request, value, index, data) - - def _set_gpio(self, index, value): - mask = 1 << index - values = (0 if value else 1) << index - msg = (values << 8) | mask - return self._write(self.CP210X_REQUEST_VENDOR, self.CP210X_VALUE_WRITE_LATCH, msg, 0) - - def _get_gpio_states(self): - results = [] - response = self._query(self.CP210X_REQUEST_VENDOR, self.CP210X_VALUE_READ_LATCH, 0, 1) - if len(response) > 0: - response = response[0] - for idx in range(self.MAX_GPIO_INDEX): - results.append((response & (1 << idx)) == 0) - return results - - def open(self, channel=0): - self._set_gpio(channel, 0) - - def close(self, channel=0): - self._set_gpio(channel, 1) - - def states(self): - return self._get_gpio_states() - - -class SearchUsbRelayFactory(object): - def __init__(self): - _relay = FourChannelUsbRelay() - # The others are not supported until they expose a read state method. - # if not _relay.poll(): - # _relay = DoubleChannelUsbRelay() - _relay.attach() - self._relay = _relay - - def get_relay(self): - return self._relay - - -class StaticRelayHolder(object): - def __init__(self, relay, default_channels=(0,)): - self._relay = relay - self._default_channels = self._tup_or_li(default_channels) - self._pulse_channels = () - self._lock = multiprocessing.Lock() - - @staticmethod - def _tup_or_li(arg): - return arg if isinstance(arg, tuple) or isinstance(arg, list) else (arg,) - - def _arg_(self, ch=None): - return self._default_channels if ch is None else self._tup_or_li(ch) - - def set_pulse_channels(self, channels): - with self._lock: - self._pulse_channels = self._tup_or_li(channels) - - def open(self, channels=None): - with self._lock: - [self._relay.open() for ch in self._arg_(channels)] - - def close(self, channels=None): - with self._lock: - for ch in self._arg_(channels): - self._relay.close() - if ch in self._pulse_channels: - time.sleep(0.100) - self._relay.open() - - def states(self): - with self._lock: - return self._relay.states() - - def pulse_config(self): - with self._lock: - return [i in self._pulse_channels for i in range(len(self._relay.states()))] diff --git a/jetson_runtime/BYODR_utils/common/video.py b/jetson_runtime/BYODR_utils/common/video.py deleted file mode 100644 index e1b4116b..00000000 --- a/jetson_runtime/BYODR_utils/common/video.py +++ /dev/null @@ -1,121 +0,0 @@ -from __future__ import absolute_import - -import collections -import logging -import threading -import time - -import gi -import numpy as np - -gi.require_version("Gst", "1.0") -from gi.repository import Gst - -Gst.init(None) - -logger = logging.getLogger(__name__) - - -class RawGstSource(object): - def __init__(self, name="app", boot_time_seconds=20, command="videotestsrc ! decodebin ! videoconvert ! appsink"): - assert "appsink" in command, "Need the appsink present in the gst command." - self.name = name - self.boot_time_seconds = boot_time_seconds - self.command = command.replace("appsink", "appsink name=sink emit-signals=true sync=false async=false max-buffers=1 drop=true") - self._listeners = collections.deque() - self._listeners_lock = threading.Lock() - self._sample_time = None - self.closed = True - self.video_pipe = None - - def _setup(self): - self.video_pipe = Gst.parse_launch(self.command) - self.closed = True - - # noinspection PyUnusedLocal - def _eos(self, bus, msg): - logger.info(msg) - self.close() - - # noinspection PyUnusedLocal - def _error(self, bus, msg): - logger.error(msg) - self.close() - - def _sample(self, sink): - buffer = sink.emit("pull-sample").get_buffer() - array = self.convert_buffer(buffer.extract_dup(0, buffer.get_size())) - with self._listeners_lock: - for listen in self._listeners: - listen(array) - self._sample_time = time.time() - return Gst.FlowReturn.OK - - def convert_buffer(self, buffer): - return buffer - - def add_listener(self, listener): - with self._listeners_lock: - self._listeners.append(listener) - - def remove_listener(self, listener): - with self._listeners_lock: - self._listeners.remove(listener) - - def open(self): - self._setup() - self.video_pipe.set_state(Gst.State.PLAYING) - video_sink = self.video_pipe.get_by_name("sink") - video_sink.connect("new-sample", self._sample) - bus = self.video_pipe.get_bus() - bus.add_signal_watch() - bus.connect("message::eos", self._eos) - bus.connect("message::error", self._error) - self.closed = False - self._sample_time = time.time() + self.boot_time_seconds - logger.info("Source {} opened.".format(self.name)) - - def is_healthy(self, patience): - return self._sample_time and time.time() - self._sample_time < patience - - def is_closed(self): - return self.closed - - def is_open(self): - return not self.is_closed() - - def check(self, patience=0.50): - if self.is_open() and not self.is_healthy(patience=patience): - self.close() - if self.is_closed(): - self.open() - - def close(self): - if self.video_pipe is not None: - self.video_pipe.set_state(Gst.State.NULL) - self.closed = True - logger.info("Source {} closed.".format(self.name)) - - -class GstStreamSource(RawGstSource): - def __init__(self, name, shape, command, fn_convert=(lambda x: x)): - super(GstStreamSource, self).__init__(name=name, command=command) - self._shape = shape - self._fn_convert = fn_convert - - def get_width(self): - return self._shape[1] - - def get_height(self): - return self._shape[0] - - def convert_buffer(self, buffer): - return self._fn_convert(buffer) - - -def create_image_source(name, shape, command): - return GstStreamSource(name, shape, command, fn_convert=(lambda buffer: np.fromstring(buffer, dtype=np.uint8).reshape(shape))) - - -def create_video_source(name, shape, command): - return GstStreamSource(name, shape, command) diff --git a/jetson_runtime/BYODR_utils/common/websocket.py b/jetson_runtime/BYODR_utils/common/websocket.py deleted file mode 100644 index 068ed145..00000000 --- a/jetson_runtime/BYODR_utils/common/websocket.py +++ /dev/null @@ -1,99 +0,0 @@ -from __future__ import absolute_import - -import json -import logging -import threading -import traceback - -import gi -from tornado import websocket - -gi.require_version("Gst", "1.0") -from gi.repository import Gst - -Gst.init(None) - -logger = logging.getLogger(__name__) - - -class HttpLivePlayerVideoSocket(websocket.WebSocketHandler): - def __init__(self, application, request, **kwargs): - super(HttpLivePlayerVideoSocket, self).__init__(application, request, **kwargs) - self._lock = threading.Lock() - self._streaming = False - - # noinspection PyAttributeOutsideInit - def initialize(self, **kwargs): - self._video = kwargs.get("video_source") - self._io_loop = kwargs.get("io_loop") - - def _push(self, _bytes): - with self._lock: - if self._streaming: - try: - self.write_message(_bytes, binary=True) - except websocket.WebSocketClosedError: - pass - - def _client(self, _bytes): - self._io_loop.add_callback(lambda: self._push(_bytes)) - - # noinspection PyUnusedLocal - @staticmethod - def check_origin(origin): - return True - - def data_received(self, chunk): - pass - - # noinspection PyUnusedLocal - def open(self, *args, **kwargs): - self._video.add_listener(self._client) - self.write_message(json.dumps(dict(action="init", width=self._video.get_width(), height=self._video.get_height()))) - - def on_close(self): - self._video.remove_listener(self._client) - - def on_message(self, message): - try: - with self._lock: - self._streaming = "REQUESTSTREAM" in message - logger.info("On message - streaming = {}.".format(self._streaming)) - except Exception as e: - logger.error("Stream socket@on_message: {} {}".format(e, traceback.format_exc())) - logger.error("Input message:---\n{}\n---".format(message)) - - -class JMuxerVideoStreamSocket(websocket.WebSocketHandler): - # noinspection PyAttributeOutsideInit - def initialize(self, **kwargs): - self._video = kwargs.get("video_source") - self._io_loop = kwargs.get("io_loop") - - def _push(self, _bytes): - try: - self.write_message(_bytes, binary=True) - except websocket.WebSocketClosedError: - pass - - def _client(self, _bytes): - self._io_loop.add_callback(lambda: self._push(_bytes)) - - # noinspection PyUnusedLocal - @staticmethod - def check_origin(origin): - return True - - def data_received(self, chunk): - pass - - # noinspection PyUnusedLocal - def open(self, *args, **kwargs): - self._video.add_listener(self._client) - - def on_close(self): - self._video.remove_listener(self._client) - - @staticmethod - def on_message(message): - logger.info("Unexpected message '{}' received.".format(message))