From 53364e5a1309fd2fd6924c64838031d61cb6131a Mon Sep 17 00:00:00 2001 From: Edgar Riba Date: Thu, 2 Nov 2023 20:32:40 +0100 Subject: [PATCH] remove kivy apps docu (#234) --- .../docs/dashboard/dashboard_user_guide.md | 4 +- website/docs/examples/examples_index.md | 70 ----- .../tutorials/camera_streamer/00_intro.md | 68 ---- .../camera_streamer/01_template_starter.md | 78 ----- .../camera_streamer/02_kivy_definition.md | 77 ----- .../camera_streamer/03_camera_stream.md | 210 ------------- .../introduction/00_tutorial_intro.md | 70 ----- .../introduction/01_background_knowledge.md | 88 ------ .../introduction/02_template_overview.md | 295 ------------------ .../tutorials/virtual_joystick/00_overview.md | 92 ------ .../virtual_joystick/01_template_starter.md | 80 ----- .../virtual_joystick/02_device_streams.md | 290 ----------------- .../03_virtual_joystick_widget.md | 142 --------- .../virtual_joystick/04_auto_control.md | 100 ------ .../virtual_joystick/05_further_exercises.md | 41 --- website/sidebars.js | 23 +- 16 files changed, 2 insertions(+), 1726 deletions(-) delete mode 100644 website/docs/tutorials/camera_streamer/00_intro.md delete mode 100644 website/docs/tutorials/camera_streamer/01_template_starter.md delete mode 100644 website/docs/tutorials/camera_streamer/02_kivy_definition.md delete mode 100644 website/docs/tutorials/camera_streamer/03_camera_stream.md delete mode 100644 website/docs/tutorials/introduction/00_tutorial_intro.md delete mode 100644 website/docs/tutorials/introduction/01_background_knowledge.md delete mode 100644 website/docs/tutorials/introduction/02_template_overview.md delete mode 100644 website/docs/tutorials/virtual_joystick/00_overview.md delete mode 100644 website/docs/tutorials/virtual_joystick/01_template_starter.md delete mode 100644 website/docs/tutorials/virtual_joystick/02_device_streams.md delete mode 100644 website/docs/tutorials/virtual_joystick/03_virtual_joystick_widget.md delete mode 100644 website/docs/tutorials/virtual_joystick/04_auto_control.md delete mode 100644 website/docs/tutorials/virtual_joystick/05_further_exercises.md diff --git a/website/docs/dashboard/dashboard_user_guide.md b/website/docs/dashboard/dashboard_user_guide.md index 0c81a3146..96b118e82 100644 --- a/website/docs/dashboard/dashboard_user_guide.md +++ b/website/docs/dashboard/dashboard_user_guide.md @@ -83,9 +83,7 @@ This screen is where the autonomous control of the amiga comes to life. Here you If you are controlling the Amiga with a brain or other micro-controller, here is where you will enable & monitor the auto control commands sent to your Amiga. -Auto control examples include the -[Virtual Joystick Example](/docs/tutorials/virtual_joystick/virtual-joystick-overview) (brain) -and the [FPV example](/docs/examples/FPV/) (micro-controller). +Auto control examples include the [FPV example](/docs/examples/FPV/) (micro-controller). ## General Settings Tab diff --git a/website/docs/examples/examples_index.md b/website/docs/examples/examples_index.md index b0227ad1b..662a25bbd 100644 --- a/website/docs/examples/examples_index.md +++ b/website/docs/examples/examples_index.md @@ -135,76 +135,6 @@ out. This example acts as an `OakCameraClient` in a standalone Python script. -## Brain App Tutorials - -These are the examples for ready-to-run apps and learning how you -can develop your own custom applications for the [**farm-ng Brain**](/docs/brain/). - -:::tip -The tutorials build off of one another and are better when -followed in order. -::: - -### [00 - Tutorial Introduction](/docs/tutorials/introduction/tutorial-introduction) - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -This tutorial introduces necessary background knowledge and walks -you through the -[**`amiga-app-template`**](https://github.com/farm-ng/amiga-app-template). - -The topics covered in this tutorial include: - -- Creating kivy applications -- GRPC / asyncio application development - -### [01 - Camera Streamer Tutorial](/docs/tutorials/camera_streamer/camera-streamer-overview) - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -This tutorial is designed to teach you to implement the -`OakCameraClient` in a GUI application using -[**Kivy**](https://kivy.org/) following along the -[**`camera-streamer`**](https://github.com/farm-ng/camera-streamer) example application built using the -[**`amiga-app-template`**](https://github.com/farm-ng/amiga-app-template). - -The topics covered in this tutorial include: - -- Creating kivy applications -- GRPC / asyncio application development -- Streaming an Oak camera with the camera client - -### [02 - Virtual Joystick Tutorial](/docs/tutorials/virtual_joystick/virtual-joystick-overview) - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -This tutorial is designed to enable you to develop your own -custom applications that uses camera streams and controls your -Amiga over the CAN bus following along the -[**`virtual-joystick`**](https://github.com/farm-ng/virtual-joystick) -example application built using the -[**`amiga-app-template`**](https://github.com/farm-ng/amiga-app-template). - -The topics covered in this tutorial include: - -- Creating kivy applications -- GRPC / asyncio application development -- Streaming an Oak camera with the camera client -- Streaming Amiga state information with the canbus client -- Auto control mode of Amiga robot with the canbus client - ### [Developing Custom Applications](/docs/brain/custom-applications.mdx) :::caution deprecation warning diff --git a/website/docs/tutorials/camera_streamer/00_intro.md b/website/docs/tutorials/camera_streamer/00_intro.md deleted file mode 100644 index ddd0ddc75..000000000 --- a/website/docs/tutorials/camera_streamer/00_intro.md +++ /dev/null @@ -1,68 +0,0 @@ ---- -id: camera-streamer-overview -title: 00 - Camera Streamer Overview ---- - -# Camera Streamer Overview - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -:::tip -This tutorial builds off of the -[**Tutorial Introduction**](/docs/tutorials/introduction/tutorial-introduction), -so please check that out if you have not already. -::: - -## [Link to `camera-streamer`](https://github.com/farm-ng/camera-streamer) - -This example application and tutorial is designed to get you -started developing your own basic applications and deploying them -to the Amiga brain. - -On the brain, there are multiple gRPC services running in the -background, including an oak camera service per camera device on -your Amiga. -You will see how to interact with one of these services through -the camera client in a basic kivy application, -using gRPC and asyncio in that application. - -The topics covered in this tutorial include: - -- Creating kivy applications -- GRPC / asyncio application development -- Streaming an Oak camera with the camera client - -## Block diagram - -```mermaid - flowchart BT; - - subgraph kivy_window - direction LR - ImageTexture - end - - subgraph AmigaOS - OakCameraServices - CanbusService - end - - subgraph CameraStreamerApp - OakCameraClient -- decoded jpeg --> ImageTexture - end - - subgraph OakCameraServices - direction LR - Oak0 - Oak1 - Oak2 - Oak3 - end - - Oak0 -- streamFrames rpc --> OakCameraClient - -``` diff --git a/website/docs/tutorials/camera_streamer/01_template_starter.md b/website/docs/tutorials/camera_streamer/01_template_starter.md deleted file mode 100644 index 58d24f4c5..000000000 --- a/website/docs/tutorials/camera_streamer/01_template_starter.md +++ /dev/null @@ -1,78 +0,0 @@ ---- -id: template-starter -title: 01 - Template Starter ---- -# Template Starter - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -## Set up your Amiga brain template - -Follow the instructions in [**Developing Custom Applications**](/docs/brain/custom-applications.mdx) -to set up an application from the [**amiga-app-template**](https://github.com/farm-ng/amiga-app-template). - -Be sure not to skip the [**Customizing an app**](/docs/brain/brain-apps#customizing-an-app) -required to properly configure -the library and app you are creating. - -:::tip -Feel free to choose any name, but it's recommended to go with -something like `camera_tutorial` or `streamer_tutorial`for the -repository. -That difference will make it easier to differentiate between the [**`farm-ng/camera-streamer`**](https://github.com/farm-ng/camera-streamer) -example and your own implementation if they both get cloned onto -the same machine. - -We will assume you went with `camera_tutorial` in this tutorial. -::: - -## Rename the `TemplateApp` - -Now that you've created the template, it's time to rename it for -your app. -In this tutorial, we'll go with `CameraApp`. - -Navigate to `main.py` in your app and open it with a text editor. - -:::tip -We recommend [**Visual Studio Code**](https://code.visualstudio.com/)! -::: - -There's 3 places to change the templated name: - -```Python -# 1. Rename the class -class CameraApp(App): - def __init__(self) -> None: - super().__init__() - -... - -# 2. Rename the program -if __name__ == "__main__": - parser = argparse.ArgumentParser(prog="camera-tutorial") - -... - -# 3. Run with the new class name -try: - loop.run_until_complete(CameraApp().app_func()) -except asyncio.CancelledError: - pass -``` - -### Run the app - template - -Now sync the app to the Brain and launch it with the -[**Deploy Instructions**](/docs/brain/brain-apps#develop-and-test-in-the-robot) -for syncing the app onto the Amiga Brain! - -After the requirements install, you should see the template kivy -app pop up. -Try out the `Back` button to exit the app. - -![app-template](https://user-images.githubusercontent.com/53625197/217021857-aede9e9b-0f85-4b15-971f-c45944a3813c.png) diff --git a/website/docs/tutorials/camera_streamer/02_kivy_definition.md b/website/docs/tutorials/camera_streamer/02_kivy_definition.md deleted file mode 100644 index 5ff0b2dee..000000000 --- a/website/docs/tutorials/camera_streamer/02_kivy_definition.md +++ /dev/null @@ -1,77 +0,0 @@ ---- -id: kivy-definition -title: 02 - Kivy Definition ---- -# Kivy Definition - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -:::info -In the [**`src/res/main.kv`**](https://github.com/farm-ng/camera-streamer/blob/main/src/res/main.kv) -file of the -[**camera-streamer**](https://github.com/farm-ng/camera-streamer) -app we define the kivy app. -You should open that file for reference as you follow along. -::: - -Our app will have two components: - -1. A [**`TabbedPanel`**](https://kivy.org/doc/stable/api-kivy.uix.tabbedpanel.html) of -[**`Image`**](https://kivy.org/doc/stable/api-kivy.uix.image.html) -widgets for displaying the camera streams -2. A [**`Button`**](https://kivy.org/doc/stable/api-kivy.uix.button.html) -for exiting the app - -## Starting with the template - -The [**`src/res/main.kv`**](https://github.com/farm-ng/amiga-app-template/blob/main/src/res/main.kv) -file of the -[**amiga-app-template**](https://github.com/farm-ng/amiga-app-template) -defines a root of a `RelativeLayout`, with a -`Button`, and a `Label`, -as explained in [**kivy app definition**](/docs/tutorials/introduction/template-overview#kivy-app-definition). - -We will keep the `RelativeLayout` root and the `Button` for -exiting the app, but remove the `Label` widget. -Instead, we will add a `TabbedPanel` for displaying the image -streams from our Oak device. - -The `RelativeLayout` allows for overlapping widgets, -which is nice as we can allow the image streams to take up the -full size of our window with the button drawn on top. -Notice we draw the `Button` after the `TabbedPannel` so it is -visible and usable. - -## TabbedPanel of Image widgets - -To conveniently package the 4 image streams from the oak camera -in the kivy `Window`, we will add the `Image` widgets as a -`TabbedPanel`. -The `TabbedPanel` is used to select between different pages, -which in our case is 4 `Image` Widgets. - -For the `TabbedPanel`, we do not need a `default_tab` and we -assign the `text` each tab will display. - -We assign each `Image` widget an `id` so it can be easily -referenced in the Python `App`. -This will allow us to set the correct image stream to the correct -tab, so the `rgb` image stream is shown on the `rgb` panel and -the `left` stereo camera stream is displayed on the `left` tab. - -In the Python app, we can access a `Widget` directly using the -widget `id` in two ways: - -```Python -# As a dictionary -FOO_WIDGET = self.root.ids['FOO_WIDGET_ID'] -# As an attribute -FOO_WIDGET = self.root.ids.FOO_WIDGET_ID -``` - -- Reference: [**TabbedPanel**](https://kivy.org/doc/stable/api-kivy.uix.tabbedpanel.html) -- Reference: [**Image**](https://kivy.org/doc/stable/api-kivy.uix.image.html) diff --git a/website/docs/tutorials/camera_streamer/03_camera_stream.md b/website/docs/tutorials/camera_streamer/03_camera_stream.md deleted file mode 100644 index c890e0c83..000000000 --- a/website/docs/tutorials/camera_streamer/03_camera_stream.md +++ /dev/null @@ -1,210 +0,0 @@ ---- -id: camera-stream -title: 03 - Python Implementation ---- -# Python Implementation - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -:::info -The Python implementation of the -[**camera-streamer**](https://github.com/farm-ng/camera-streamer) -app can be found at -[**`src/main.py`**](https://github.com/farm-ng/camera-streamer/blob/main/src/main.py). -You should open that file for reference as you follow along. -::: - -## Add a camera stream - -The main method we'll add to our app is a camera stream. -This will: - -- Use the [**`OakCameraClient`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/oak/camera_client.py) -- Display images as kivy [**`Image`**](https://kivy.org/doc/stable/api-kivy.uix.label.html) - widgets in our `TabbedPanel`. - -This task listens to the camera client's stream and populates the -tabbed panel with all 4 image streams from the oak camera. -In this task we connect to a "server streaming" RPC, as described -in [**gRPC core concepts**](https://grpc.io/docs/what-is-grpc/core-concepts/). - -### Setup - -Once the `root` of the kivy `App` is created, we loop "forever" -(until the app is closed). - -First we check the state of the `OakCameraClient`, which forwards -the state of the Oak camera service. -When the service is in the `IDLE` state it is available, but no -client has yet connected to it. -When the service is in the `RUNNING` state it is available and -has a client connected to it. -In this case, that's your `OakCameraClient`! - -### Connection logic - -If the service is in one of these available states (`IDLE` or -`RUNNING`), you want to create a stream with your client. - -If the service is not in one of these available states (`IDLE` or -`RUNNING`), you want to cancel the stream (if it exists) and -re-create it once it is again available. - -When creating the `response_stream` we use the -[**`stream_frames()`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/oak/camera_client.py) -call. -This wraps the GRPC service stub `StreamFramesRequest`, which -takes the `every_n` argument used to throttle the rate of images -in the stream. - -### Read the stream - -The asyncio grpc stream allows your client to wait, in a -non-blocking way, for a new message from the service to be put on -the stream queue. - -If a service crashes unexpectedly, it is ideal to handle this -gracefully with the client. - -We receive a `StreamFramesReply` and access the `OakSyncFrame` -proto message it contains, both defined in -[**oak.proto**](https://github.com/farm-ng/farm-ng-amiga/blob/main/protos/farm_ng/oak/oak.proto), -from the response. -This contains all of the available camera streams from the Oak -device you are connected to. -Remember, the Oak camera devices have 3 cameras and, in this -case, send 4 image streams (rgb, left, right, & disparity). - -### Decode and display - -Finally, we can decode and display the images received from the -stream. - -For each of the image streams, we update the `Image` widget -`Texture` in the `TabbedPanel` with the corresponding decoded -image. -The `Image` widgets in the `TabbedPanel` accessed by their kivy -id. - -## Other notes - -### `farm_ng` Imports - -We import the necessary `farm_ng` libraries for creating the -camera client and interacting with the camera service. - -### Image decoding - -We will use `TurboJPEG` as the image decoder (it is much faster -than kivy's default image decoder), so we add that as an import -in our `main.py` file. - -In order to import this, we must add the library `PyTurboJPEG` to -the -[**`setup.cfg`**](https://github.com/farm-ng/camera-streamer/blob/main/setup.cfg) -file so the dependency installs. - -We also construct an instance of this image decoder and assign it -as a class variable of our `CameraApp` so it is not created every -time we decode an image. - -### Command line Arguments - -We add a few command line arguments used by the `OakCameraClient` -at the bottom of the app and pass these to the `CameraApp` class -through the constructor. - -These include the `address` and `port` of the oak device we will -stream and the `stream_every_n` argument that allows you to -throttle the stream rate of your camera, if you wish to save -computational resources. - -### entry.sh - -There are **required arguments** that must be set in the -[**`entry.sh`**](https://github.com/farm-ng/camera-streamer/blob/main/entry.sh) -file and **optional arguments** that take on a default value, -unless overridden in the command line. - -Since `port` is required, we add `--port 50051` to the `python` -call in [**`entry.sh`**](https://github.com/farm-ng/camera-streamer/blob/main/entry.sh) -to set the script to use the `Oak0` device (`Oak1` would be on `50052`, `Oak2` on `50053`, -and so on). - -When launching your app on the Brain with the button, any -required args being passed to `main.py` must already be specified -in `entry.sh`. - -When launching your app on your computer, or on the brain but -from an SSH terminal, you can add additional arguments to change -the default value of the optional arguments. -The `$@` in `python` call in `entry.sh` is what allows for this. - -For example, to run the app from your computer, while the camera -runs on the brain nearby: - -```Python -cd camera_tutorial/ -./entry.sh --address -``` - -To run the app on the amiga, with changing a default command line -arg: - -```Python -ssh amiga - # Password: amiga -# Now in an ssh terminal -cd ~/apps/ -./camera_tutorial/entry.sh --stream-every-n 2 -``` - -:::caution -If you launch the `Camera Streamer` app with the command line, -it is currently possible to have touch interactions with the launcher behind. -This will cause other installed apps to unexpectedly launch over the app you are trying to use. - -Please see instructions for a workaround under: -[**FAQ - Brain App Development**](/docs/reference/faq#brain-app-development) -::: - -### App icon - -We replace the `app_logo.png` with an icon from -. -When developing your own app, you can: - -1. Choose a suitable symbol or icon for your app -2. Tweak the appearance parameters, including moving to the -largest 'Optical Size' available -3. Export it as a `.png` file - -For following along with this tutorial, feel free to download the -image from -[**src/assets/app_logo.png**](https://github.com/farm-ng/camera-streamer/blob/main/src/assets/app_logo.png). - -:::info note -The brain may not display the app icon immediately when it is -cloned onto your machine. -You can trigger a `Refresh App Buttons` on the settings screen to -apply the newly downloaded app icon. -This also is applicable if you change the app icon and want to -display the new icon. -::: - -### `app_func()` - -Here we create the `OakCameraClient` and add the `stream_camera` -asyncio task to our tasks list. - -## Run it - -Run the app on the brain by launching with the app button or run -it through a terminal as described in -[**Command line arguments**](#command-line-arguments). - -![camera-streamer](https://user-images.githubusercontent.com/53625197/216075393-6e578a01-677e-4279-b224-70fd3f73ce5f.png) diff --git a/website/docs/tutorials/introduction/00_tutorial_intro.md b/website/docs/tutorials/introduction/00_tutorial_intro.md deleted file mode 100644 index 52c5f9e38..000000000 --- a/website/docs/tutorials/introduction/00_tutorial_intro.md +++ /dev/null @@ -1,70 +0,0 @@ ---- -id: tutorial-introduction -title: 00 - Introduction ---- -# Tutorial Introduction - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -This first tutorial is designed to provide you with: - -1. References and the basic background knowledge that your Amiga -app development will be based on. -2. An understanding of the -[**`amiga-app-template`**](https://github.com/farm-ng/amiga-app-template) -you will use as the base of your custom apps. - -## Your system - -The app development process works best on Ubuntu 20.04, but we -also support Windows and Mac systems. - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - - - -The instructions should work as written on native Linux machines! - - - - -You should run this with windows Subsystem for Linux (WSL). -This is a well supported and documented environment. - -[**Install WSL**](https://learn.microsoft.com/en-us/windows/wsl/install) - -You should install **WSL2**, using the **Ubuntu 20.04** -distribution. - -One option: -[**Ubuntu 20.04 from Microsoft store**](https://apps.microsoft.com/store/detail/ubuntu-2004/9N6SVWS3RX71) - -For the most part, you should be able to use WSL to run the -native Linux commands. - - - - -Everything should work as with Linux, though there may be some -unmet dependencies you can install with `brew`. -E.g., - -```bash -brew install wget -``` - -Some of the scripts may fail, and we're working through that. -For instance, if you come into an `md5sum` issue, you'll need to -change `md5sum` to `md5`. - -We are actively working on this support, so please reach out with -an issues you encounter so we can help you through them and -resolve it promptly! - - - diff --git a/website/docs/tutorials/introduction/01_background_knowledge.md b/website/docs/tutorials/introduction/01_background_knowledge.md deleted file mode 100644 index d66b84881..000000000 --- a/website/docs/tutorials/introduction/01_background_knowledge.md +++ /dev/null @@ -1,88 +0,0 @@ ---- -id: background-knowledge -title: 01 - Background Knowledge ---- -# Background knowledge - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -The Amiga brain app development meets at the intersection of three key libraries, -as well as a few publicly available libraries developed by the **farm-ng** team: - -1. [**gRPC**](https://grpc.io/) -2. [**asyncio**](https://docs.python.org/3/library/asyncio.html) -3. [**kivy**](https://kivy.org/) -4. [**farm-ng libraries**](#farm-ng-libraries) - -:::info -Currently we are only supporting Python app development, but our -infrastructure allows for C++ app development support in the near -future. -::: - -## gRPC - -The best place to start to gain an understanding of gRPC is the -[**gRPC introduction**](https://grpc.io/docs/what-is-grpc/introduction/), -followed by the [**gRPC core concepts**](https://grpc.io/docs/what-is-grpc/core-concepts/). - -gRPC is used as our communication protocol between services -(devices like cameras running in the background) and clients (how -you connect to services in your app). -The communication is done through Protocol Buffers, defined in `*. -proto` files in our [**farm-ng libraries**](#farm-ng-libraries). - -## asyncio - -The best place to start to gain an understanding of asyncio is -the [**asyncio docs**](https://docs.python.org/3/library/asyncio.html). - -We use asyncio in order to run multiple concurrent tasks in our -applications. -This is crucial to the system design to prevent high rate robotic -control from being blocked by time consuming processes, such as -image processing. - -In the virtual joystick example, we have multiple, concurrent -`while` loops running that: - -- Receive the camera stream (from the camera service) -- Receive the canbus stream (from the canbus service) -- Draw the joystick (in [**kivy**](#kivy)) -- Send canbus commands (to the canbus service) - -## kivy - -The best place to start to gain an understanding of kivy is the -[**kivy Getting Started >> Introduction**](https://kivy.org/doc/stable/gettingstarted/intro.html). - -We use kivy to draw our apps and handle touch screen interactions -for our interactive apps. -kivy can be coded in its own language -([**the Kv language**](https://kivy.org/doc/stable/guide/lang.html)), in Python, or in -some combination of both! - -We tend to define our apps in the kv language, and add -interaction in Python code. -In the example, however, we also demonstrate creating a custom -kivy `Widget` in Python! - -## farm-ng libraries - -We have some libraries that are imported by the brain -infrastructure and are used in our apps. -They are: - -- defined as python packages (installed with `pip` by pointing to -the repo) -- contain the `.proto` definitions used in our gRPC communications -- contain the gRPC clients you can use to interact with the Amiga -brain services - -See: [**farm_ng_core**](https://github.com/farm-ng/farm-ng-core) - -See: [**farm_ng_amiga**](https://github.com/farm-ng/farm-ng-amiga) diff --git a/website/docs/tutorials/introduction/02_template_overview.md b/website/docs/tutorials/introduction/02_template_overview.md deleted file mode 100644 index 58a138174..000000000 --- a/website/docs/tutorials/introduction/02_template_overview.md +++ /dev/null @@ -1,295 +0,0 @@ ---- -id: template-overview -title: 02 - Template Overview ---- -# Template Overview - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -This section explains all of the Python and kivy code in the -[**`amiga-app-template`**](https://github.com/farm-ng/amiga-app-template), -to help understand the base before you add anything custom. - -:::info -This corresponds to the state of the `amiga-app-template` on -January 31, 2023. -Some details of the implementation may have changed slightly by -the time you are reading. -::: - -## Imports - -```Python -# Copyright (c) farm-ng, inc. Amiga Development Kit License, -# Version 0.1 -import argparse -import asyncio -import os -from typing import List - -from amiga_package import ops - -# Must come before kivy imports -os.environ["KIVY_NO_ARGS"] = "1" - -# gui configs must go before any other kivy import -from kivy.config import Config # noreorder # noqa: E402 - -Config.set("graphics", "resizable", False) -Config.set("graphics", "width", "1280") -Config.set("graphics", "height", "800") -Config.set("graphics", "fullscreen", "false") -Config.set("input", "mouse", "mouse,disable_on_activity") -Config.set("kivy", "keyboard_mode", "systemanddock") - -# kivy imports -from kivy.app import App # noqa: E402 -from kivy.lang.builder import Builder # noqa: E402 -``` - -The template starts with generic Python imports that are used in -the app, followed by the custom lib imports, then kivy imports -and configuration. - -Before any kivy imports, we must explicitly state that the -command line args for the app are to be used, rather than the -default kivy command line args, with -`os.environ["KIVY_NO_ARGS"] = "1"`. - -Notice we import kivy `Config` and define the config parameters -we recommend for running kivy applications on the brain. -This should come before importing any other Kivy modules, as -stated in -[**kivy - Configuration object**](https://kivy.org/doc/stable/api-kivy.config.html). - -Finally we import the remaining kivy modules with the -`# noqa: E402` flag, so any `pre-commit` formatters don't move -these imports above the kivy configuration setting. - -## kivy app definition - -Contents of `res/main.kv` - -```Python -RelativeLayout: - Button: - id: back_btn_layout - pos_hint: {"x": 0.0, "top": 1.0} - background_color: 0, 0, 0, 0 - size_hint: 0.1, 0.1 - background_normal: "assets/back_button.png" - on_release: app.on_exit_btn() - Image: - source: "assets/back_button_normal.png" if self. - parent.state == "normal" else "assets/ - back_button_down.png" - pos: self.parent.pos - size: self.parent.size - Label: - id: counter_label - text: "Tic: 0" - font_size: 40 -``` - -Next we define our application in the Kv language. -This definition can be a `"""` string at the top of a `.py` file -or can be defined in a separate `.kv` file. -Either can be imported by the -[**kivy Builder**](https://kivy.org/doc/stable/api-kivy.lang.builder.html). -Here we use a separate .kv file -[**`res/main.py`**](https://github.com/farm-ng/amiga-app-template/blob/main/src/res/main.kv). - -:::tip -Throughout this tutorial we'll explain the kivy app created in -this example, but this is not intended as a thorough introduction -to using kivy. Try the [**kivy tutorials**](https://kivy.org/doc/stable/tutorials-index.html) -and use the [**kivy API**](https://kivy.org/doc/stable/api-index.html) -for more information on creating custom applications with kivy. -::: - -### RelativeLayout - -Two key components of kivy are -[**`Layouts`**](https://kivy.org/doc/stable/gettingstarted/layouts.html#) and -[**`Widgets`**](https://kivy.org/doc/stable/api-kivy.uix.html). -The root of our template app is a `RelativeLayout`, which -contains a `Button` and a `Label` widget. -The `RelativeLayout` allows us to position the -[**Back button**](#back-button) (and any widgets or nested -layouts we may add in the future) in relative coordinates. - -- Reference: [**Relative Layout**](https://kivy.org/doc/stable/api-kivy.uix.relativelayout.html) - -### Back button - -This `Button` is used to exit the app when it is pressed, by -calling the [**`TemplateApp.on_exit_btn()`**](#on_exit_button) -method. - -:::info -To be precise it's actually when the button is released due to -using the `on_release:` keyword. -::: - -Since the `TemplateApp` inherits from the kivy `App` class, -methods and variables of the `TemplateApp` can be linked with the -`app.foo_variable` or `app.bar_method()` -We define the `Button` with two images, one that shows most of -the time, and another that shows while the button is pressed down. -You can also define a button with a string, if you want to -quickly add buttons without finding an icon. - -:::tip -[**Material Icons**](https://github.com/google/material-design-icons) -is a nice place to find symbols to use for app buttons / UI -features. -::: - -- Reference: [**Button**](https://kivy.org/doc/stable/api-kivy.uix.button.html) - -## TemplateApp - -```Python -class TemplateApp(App): - """Base class for the main Kivy app.""" - - def __init__(self) -> None: - super().__init__() - - self.counter: int = 0 - - self.async_tasks: List[asyncio.Task] = [] -``` - -We define the `TemplateApp` to inherit from the kivy `App` class, -so it has all the features of a generic `App`, plus anything we -add to it. - -All we add here is a placeholder for the `TemplateApp` class -methods that will each be added as an `asyncio.Task`. - -### build - -```Python -def build(self): - return Builder.load_file("res/main.kv") - -``` - -`build` is a default kivy `App` method that we must overwrite -with our app's details. - -To load the `.kv` definition of our app, we use the built-in -method: - -```Python -Builder.load_file(KV_FILE) -``` - -### on_exit_button - -```Python -def on_exit_btn(self) -> None: - """Kills the running kivy application.""" - App.get_running_app().stop() -``` - -This simple method stops the running kivy app. -When an app was launched on the Amiga Brain through the Launcher -app, this will return the Brain state to the Launcher app. - -### app_func - -```Python -async def app_func(self): - async def run_wrapper() -> None: - # we don't actually need to set asyncio as the lib - # because it is - # the default, but it doesn't hurt to be explicit - await self.async_run(async_lib="asyncio") - for task in self.async_tasks: - task.cancel() - - # Placeholder task - self.async_tasks.append(asyncio.ensure_future(self. - template_function())) - - return await asyncio.gather(run_wrapper(), *self.async_tasks) -``` - -We use the `app_func` pattern, with the nested `run_wrapper`, to -build, run, and manage the list of long duration, asynchronous -tasks required by the app. - -Here we build the list of `async` methods that will run -simultaneously for the life of our app. -Currently this list only consists of a placeholder method called -[**`template_function()`**](#template_function) that we will -later replace with tasks that actually do something. - -Each method is added as an `asyncio.Task` following the pattern -used to add `self.template_function()`. - -### template_function - -```Python -async def template_function(self) -> None: - """Placeholder forever loop.""" - while self.root is None: - await asyncio.sleep(0.01) - - while True: - await asyncio.sleep(1.0) - - # increment the counter using internal libs and update - # the gui - self.counter = ops.add(self.counter, 1) - self.root.ids.counter_label.text = ( - f"{'Tic' if self.counter % 2 == 0 else 'Tac'}: {self. - counter}" - ) -``` - -In all of our `async` functions, we should wait for the root of -the kivy App to be initialized before doing anything in the -function. -Often these functions will rely on the kivy app, so this prevents -unexpected crashes. - -In this placeholder, the `while` loop doesn't do anything besides -update the text of the `Label` widget to alternate between -`Tic` & `Tac` every second. - -:::tip -The custom defined async functions must be defined with the -`async` decorator and any blocking tasks with the `await` keyword. -::: - -## Command line args and execution - -```Python -if __name__ == "__main__": - parser = argparse.ArgumentParser(prog="template-app") - - # Add additional command line arguments here - - args = parser.parse_args() - - loop = asyncio.get_event_loop() - try: - loop.run_until_complete(TemplateApp().app_func()) - except asyncio.CancelledError: - pass - loop.close() -``` - -Finally we run the app! -There is infrastructure in place for defining command line args, -which you'll likely want in your apps so you don't have to hard -code configurations. -The last six lines are a useful pattern for cleanly running your -app with `asyncio`. diff --git a/website/docs/tutorials/virtual_joystick/00_overview.md b/website/docs/tutorials/virtual_joystick/00_overview.md deleted file mode 100644 index 09a5ad13e..000000000 --- a/website/docs/tutorials/virtual_joystick/00_overview.md +++ /dev/null @@ -1,92 +0,0 @@ ---- -id: virtual-joystick-overview -title: 00 - Virtual Joystick Overview ---- - -# Virtual Joystick Overview - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -:::tip -This tutorial builds off of the -[**Tutorial Introduction**](/docs/tutorials/introduction/tutorial-introduction) and the -[**Camera Streamer Tutorial**](/docs/tutorials/camera_streamer/camera-streamer-overview), -so please check those out if you have not already. -::: - -## [Link to `virtual-joystick`](https://github.com/farm-ng/virtual-joystick) - -This example application and tutorial is designed to enable you -to develop your own custom applications and deploy them to the -Amiga brain. -The goal of this tutorial is to take you step-by-step from the -template repository to the full -[**`virtual-joystick`**](https://github.com/farm-ng/virtual-joystick) example. -Then you can mirror what you've done here in your own custom app -development! - -On the brain, there are multiple gRPC services running in the -background, including the oak camera service and the canbus -service. -We will teach you how to interact with these two services through -the camera client and canbus client, respectively. -We will also show you how to create a basic kivy application, a -custom kivy widget, and use gRPC and asyncio in that application. - -The topics covered in this tutorial include: - -- Creating kivy applications -- Custom kivy widgets -- GRPC / asyncio application development -- Streaming an Oak camera with the camera client -- Streaming Amiga state information with the canbus client -- Auto control mode of Amiga robot with the canbus client - -:::info -We hope that after completing this tutorial, and the -predecessors, you are prepared to develop your own custom Amiga -brain applications on the Amiga brain. -If you feel we missed any key details, please let us know at -[**discourse.farm-ng.com**](https://discourse.farm-ng.com/) so we -can help you through it and add it to the tutorial for everyone -else to benefit from! -::: - -## Block diagram - -```mermaid - flowchart BT; - subgraph kivy_window - direction LR - VirtualJoystickWidget - ImageTexture - Displayed_Labels - end - - subgraph AmigaOS - OakCameraServices - CanbusService - end - - subgraph VirtualJoystickApp - VirtualJoystickWidget -- commands --> CanbusClient - CanbusClient -- measured rates --> Displayed_Labels - OakCameraClient -- decoded jpeg --> ImageTexture - end - - subgraph OakCameraServices - direction LR - Oak0 - Oak1 - Oak2 - Oak3 - end - - Oak0 -- streamFrames rpc --> OakCameraClient - CanbusService -- streamCanbusMessages rpc --> CanbusClient - CanbusService <-- sendCanbusMessage rpc --> CanbusClient -``` diff --git a/website/docs/tutorials/virtual_joystick/01_template_starter.md b/website/docs/tutorials/virtual_joystick/01_template_starter.md deleted file mode 100644 index 68b14e4b4..000000000 --- a/website/docs/tutorials/virtual_joystick/01_template_starter.md +++ /dev/null @@ -1,80 +0,0 @@ ---- -id: template-starter -title: 01 - Template Starter ---- -# Template Starter - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -## Set up your Amiga brain template - -Follow the instructions in [**Developing Custom Applications**](/docs/brain/custom-applications.mdx) -to clone and set up an application from the -[**amiga-app-template**](https://github.com/farm-ng/amiga-app-template). - -Be sure not to skip the -[**Customizing an app**](/docs/brain/brain-apps#customizing-an-app) -required to properly configure -the library and app you are creating. - -:::tip -Feel free to choose any repository name, but it's recommended to -go with something like `joystick_tutorial` for the repository. -That difference will make it easier to differentiate between the -[**`farm-ng/virtual-joystick`**](https://github.com/farm-ng/virtual-joystick) -example and your own implementation if they both get cloned onto -the same machine. - -We will assume you went with `joystick_tutorial` in this tutorial. -::: - -## Rename the `TemplateApp` - -Now that you've created the template, it's time to rename it for -your app. -In this tutorial, we'll go with `VirtualJoystickApp`. - -Navigate to `main.py` in your app and open it with a text editor. - -:::tip -We recommend [**Visual Studio Code**](https://code.visualstudio.com/)! -::: - -There's 3 places to change the templated name: - -```Python -# 1. Rename the class -class VirtualJoystickApp(App): - def __init__(self) -> None: - super().__init__() - -... - -# 2. Rename the program -if __name__ == "__main__": - parser = argparse.ArgumentParser(prog="joystick-tutorial") - -... - -# 3. Run with the new class name -try: - loop.run_until_complete(VirtualJoystickApp().app_func()) -except asyncio.CancelledError: - pass -``` - -### Run the app - template - -Now sync the app to the Brain and launch it with the -[**Deploy Instructions**](/docs/brain/brain-apps#develop-and-test-in-the-robot) -for syncing the app onto the Amiga Brain! - -After the requirements install, you should see the template kivy -app pop up. -Try out the `Back` button to exit the app. - -![app-template](https://user-images.githubusercontent.com/53625197/217021857-aede9e9b-0f85-4b15-971f-c45944a3813c.png) diff --git a/website/docs/tutorials/virtual_joystick/02_device_streams.md b/website/docs/tutorials/virtual_joystick/02_device_streams.md deleted file mode 100644 index f7da579fb..000000000 --- a/website/docs/tutorials/virtual_joystick/02_device_streams.md +++ /dev/null @@ -1,290 +0,0 @@ ---- -id: device-streams -title: 02 - Device Streams ---- -# Device Streams - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -:::info -In the -[**`src/res/main.kv`**](https://github.com/farm-ng/virtual-joystick/blob/main/src/res/main.kv) -and [**`src/main.py`**](https://github.com/farm-ng/virtual-joystick/blob/main/src/main.py) -files of the -[**virtual-joystick**](https://github.com/farm-ng/virtual-joystick) -app we define the kivy app and Python implementation of the -`VirtualJoystickApp`. - -You should open these files for reference as you follow along. -::: - -## Camera stream - -:::info -You should have already gone through the -[**Camera Streamer Tutorial**](/docs/tutorials/camera_streamer/camera-streamer-overview) -based on the -[**camera-streamer**](https://github.com/farm-ng/camera-streamer) -example app. -Understanding these instructions will rely on understanding those! -::: - -You can see we define the camera stream in the same as in the -[**Camera Streamer Tutorial**](/docs/tutorials/camera_streamer/camera-streamer-overview). -This time however, we nest the `TabbedPanel` of `Image` widgets -in a -[**`BoxLayout`**](https://kivy.org/doc/stable/api-kivy.uix.boxlayout.html) -(with `orientation: horizontal`) so we can arrange some other -widgets next to our `TabbedPanel`. - -## Canbus stream - -### Kivy definition - -The first `Widget` we will arrange next to the `TabbedPanel` is -another `BoxLayout` (with `orientation: vertical`), used for -displaying real time data streamed by the canbus client. - -This `BoxLayout` will contain multiple widgets displaying -information streamed from the canbus service, through the -[**`CanbusClient`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/canbus_client.py). -One of the easiest widgets to add for conveying information is -the [**`Label`**](https://kivy.org/doc/stable/api-kivy.uix.label.html) -widget, so we arrange a few of these (and potentially some empty -placeholder widgets) in the box layout. -Unlike with a `RelativeLayout`, where you can position each -widget precisely, a `BoxLayout` requires empty widgets if you -want to leave some blank space. - -You can see the use of -[**`size_hint_x`**](https://kivy.org/doc/stable/api-kivy.uix.widget.html#kivy.uix.widget.Widget.size_hint_x) -& -[**`size_hint_y`**](https://kivy.org/doc/stable/api-kivy.uix.widget.html#kivy.uix.widget.Widget.size_hint_y) -to adjust the ***relative*** size of the widgets to their parent. -For us, this means shrinking the relative size of the `BoxLayout` -of `Label` widgets displaying the streamed canbus values (in the -x direction). - -- Reference: -[**Box Layout**](https://kivy.org/doc/stable/api-kivy.uix.boxlayout.html) -- Reference: -[**Label**](https://kivy.org/doc/stable/api-kivy.uix.label.html) - -### Python canbus stream - -You will notice in [`src/main.py`](https://github.com/farm-ng/virtual-joystick/blob/main/src/main.py) -that there is a lot of similarity between the `stream_camera` and -`stream_canbus` methods of the `VirtualJoystickApp`. - -Both methods handle connecting to a "server streaming" RPC, as -described in [**gRPC core concepts**](https://grpc.io/docs/what-is-grpc/core-concepts/). -They only differ in the client used to connect -([**`OakCameraClient`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/oak/camera_client.py) -vs -[**`CanbusClient`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/canbus_client.py)) -and what is done with the received message. - -#### Setup - -This is just like -[**Camera Streamer - Camera Stream - Setup**](/docs/tutorials/camera_streamer/camera-stream#setup) -section, except we use the `CanbusClient` to connect to the -canbus service rather than the `OakCameraClient` connecting to -the oak camera service. - -#### Connection logic - -This is just like -[**Camera Streamer - Camera Stream - Connection Logic**](/docs/tutorials/camera_streamer/camera-stream#connection-logic) -section, except we use the -[**`stream`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/canbus_client.py) -method of the `CanbusClient` to read the response stream. -This wraps the GRPC service stub `streamCanbusMessages`. - -#### Read the stream - -This is just like -[**Camera Streamer - Camera Stream - Read the Stream**](/docs/tutorials/camera_streamer/camera-stream#read-the-stream) -section, except we receive a `StreamCanbusReply` proto message, -defined in -[**canbus.proto**](https://github.com/farm-ng/farm-ng-amiga/blob/main/protos/farm_ng/canbus/canbus.proto), -from our canbus service. - -This ultimately contains (in a nested proto definition) an -iterable container where each message is a proto defined -`RawCanbusMessage`, also defined in -[**canbus.proto**](https://github.com/farm-ng/farm-ng-amiga/blob/main/protos/farm_ng/canbus/canbus.proto) - -#### Decode and display - -We parse every proto defined `RawCanbusMessage` to extract the -[**`AmigaTpdo1`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/packet.py) -(Amiga state) message, if the `RawCanbusMessage` contains an -`AmigaTpdo1` message. - -:::tip -The [**`AmigaTpdo1`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/packet.py) -message comes from the dashboard and contains the: - -- state of the Amiga (AmigaControlState) -- measured speed (forward positive) -- measured angular rate (left positive) - -This is the information you'll use for closed loop control! -::: - -The canbus service reformats and forwards all CAN messages to the -canbus client, so there are a lot of messages to filter out. -The [**`parse_amiga_tpdo1_proto`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/packet.py) -returns `None` if the `RawCanbusMessage` does not contain an `AmigaTpdo1` message. - -:::info -If you're curious to learn more about CAN bus in general, see -[**CSS Electronics - CAN Bus Explained**](https://www.csselectronics.com/pages/can-bus-simple-intro-tutorial). -In this virtual joystick tutorial, we are only teaching you to -interact with the canbus client through Amiga state messages. -::: - -To display the values in the `Label` widgets we use a kivy -[**`StringProperty`**](https://kivy.org/doc/stable/api-kivy.properties.html#kivy.properties.StringProperty) -for each value. -These are bound to the corresponding `Label` widget text fields, -so we only need to update the value of the `StringProperty` and -we do not need to update the text field of the `Label` explicitly. - -### Other notes - -#### `farm_ng` Imports - -We import the necessary `farm_ng` libraries for creating the -camera and canbus clients and interacting with both services. - -#### Command line Arguments - -We now have two device services to connect to, an oak camera and -the canbus, running on different ports. -We name them accordingly and set them both as required. - -Similar to the [**Camera Streamer - Camera Stream - Command line Arguments**](/docs/tutorials/camera_streamer/camera-stream#command-line-arguments), -we add a few command line arguments used by the `OakCameraClient` -and the `CanbusClient` at the bottom of the app and pass these to -the `VirtualJoystickApp` class through the constructor. - -These include the `address` of the brain (common to all devices) -and the `port` of both devices we will stream, as well as the -`stream_every_n` argument for the oak device. - -#### entry.sh - -As in the [**Camera Streamer - Camera Stream - entry.sh**](/docs/tutorials/camera_streamer/camera-stream#entrysh), -the required arguments are added to the `entry.sh` file. -Since `camera_port` and `canbus_port` are required, we add -`--camera-port 50051` and `--canbus-port 50060` to the `python` -call in -[`entry.sh`](https://github.com/farm-ng/virtual-joystick/blob/main/entry.sh) -to set the script to use the `Oak0` device and the canbus. - -:::info -If you want to use a different oak device than `Oak0`, hard code -the corresponding `camera-port` value. -`Oak1` would be on `50052`, `Oak2` on `50053`, and so on... -::: - -When launching your app on the Brain with the button, any -required args being passed to `main.py` must already be specified -in `entry.sh`. - -When launching your app on your computer, or on the brain but -from an SSH terminal, you can add additional arguments to change -the default value of the optional arguments. -The `$@` in `python` call in `entry.sh` is what allows for this. - -For example, to run the app from your computer, while the camera -runs on the brain nearby: - -```Python -cd joystick_tutorial/ -./entry.sh --address -``` - -To run the app on the amiga, with changing a default command line -arg: - -```Python -ssh amiga - # Password : amiga -# Now in an ssh terminal -cd ~/apps/ -./joystick_tutorial/entry.sh --stream-every-n 2 -``` - -:::caution -If you launch the `Virtual Joystick` app with the command line, -it is currently possible to have touch interactions with the launcher behind. -This will cause other installed apps to unexpectedly launch over the app you are trying to use. - -Please see instructions for a workaround under: -[**FAQ - Brain App Development**](/docs/reference/faq#brain-app-development) -::: - -#### App icon - -We replace the `app_logo.png` with an icon from . -When developing your own app, you can: - -1. Choose a suitable symbol or icon for your app -2. Tweak the appearance parameters, including moving to the -largest 'Optical Size' available -3. Export it as a `.png` file - -For following along with this tutorial, feel free to download the -image from [src/assets/app_logo.png](https://github.com/farm-ng/virtual-joystick/blob/main/src/assets/app_logo.png). - -:::info note -The brain may not display the app icon immediately when it is -cloned onto your machine. -You can trigger a `Refresh App Buttons` on the settings screen to -apply the newly downloaded app icon. -This also is applicable if you change the app icon and want to -display the new icon. -::: - -#### `app_func()` - -Here we create the `OakCameraClient` and `CanbusClient` and add -the `stream_camera` `stream_canbus` asyncio tasks to our tasks -list. - -### Run it - -Run the app on the brain by launching with the app button or run -it through a terminal as described in -[Command line arguments](#command-line-arguments). - -:::caution -Make sure all of your cables are disconnected from the Amiga -before driving around! -::: - -You should see the `AmigaTpdo1` values update in realtime as you -drive the amiga and change between various command states. See -[**Amiga Control States**](../../dashboard/control_states.mdx) -and [**`AmigaControlState`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/packet.py) -for more information on the `state` parameter. - -You should also see camera stream to the right of the -`AmigaTpdo1` values from the canbus. -Check all four tabs to investigate the different camera streams -provided by the oak camera. - -![camera_stream](https://user-images.githubusercontent.com/53625197/200481937-5fc317bc-614d-4446-89f5-9df70471c3f6.png) - -### References - -- [**farm_ng.canbus.canbus_client**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/canbus_client.py) -- [**farm_ng.canbus.packet**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/packet.py) -- [**canbus.proto**](https://github.com/farm-ng/farm-ng-amiga/blob/main/protos/farm_ng/canbus/canbus.proto) diff --git a/website/docs/tutorials/virtual_joystick/03_virtual_joystick_widget.md b/website/docs/tutorials/virtual_joystick/03_virtual_joystick_widget.md deleted file mode 100644 index 0004101ff..000000000 --- a/website/docs/tutorials/virtual_joystick/03_virtual_joystick_widget.md +++ /dev/null @@ -1,142 +0,0 @@ ---- -id: virtual-joystick-widget -title: 03 - Virtual Joystick Widget ---- -# Virtual Joystick Widget - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -We will now define a custom widget, the `VirtualJoystickWidget`, -in kivy and Python to give an introduction to kivy drawing. -We define the custom widget such that it can be imported just -like a kivy API widget! - -This widget will be used to drive the robot by moving the virtual -joystick on the Brain screen. -The driving behavior is modelled after the behavior of driving -with the joystick on the pendant. - -:::info -In the [**`libs/virtual_joystick/res/joystick.kv`**](https://github.com/farm-ng/virtual-joystick/blob/main/libs/virtual_joystick/res/joystick.kv) -and [**`libs/virtual_joystick/joystick.py`**](https://github.com/farm-ng/virtual-joystick/blob/main/libs/virtual_joystick/joystick.py) -files of the -[**virtual-joystick**](https://github.com/farm-ng/virtual-joystick) -app we define the custom widget in kivy and Python. - -You should open these files for reference as you follow along. -::: - -## kivy Definition - -We first define a few custom arguments for defining the drawn -`joystick` that are linked to the -[**`Ellipse`**](https://kivy.org/doc/stable/api-kivy.graphics.html#kivy.graphics.Ellipse) -widget used to draw the joystick circle (the one with `id: joystick`). -Because these values are linked, they can be updated on the -Python side of the `VirtualJoystickWidget` and the kivy drawing -will update accordingly. - -An important component to understand is the kivy -[**`Canvas`**](https://kivy.org/doc/stable/api-kivy.graphics.instructions.html), -which is the root object used for drawing by a `Widget`. -As you can see, both the background `Ellipse` and the joystick -`Ellipse` are drawn within the scope of the canvas. -Also note how the -[**`Color`**](https://kivy.org/doc/stable/api-kivy.graphics.html#kivy.graphics.Color) - is set before drawing each widget in `rgba` format, allowing - color and alpha adjustments. - -## Python Implementation - -### Builder - -By building the `.kv` definition of the `VirtualJoystickWidget` -in the Python constructor, -the widget can be imported just like a kivy API widget. -That means you can import it into your Python definition of your -`App` (i.e., `main.py`), and reference it both there and in your -kivy app definition (i.e., `main.kv`) - -Explore -[**kivy `Builder`**](https://kivy.org/doc/stable/api-kivy.lang.builder.html) -for more details. - -### kivy `Clock` - -We schedule regular updates to the linked variables containing -the pose of the drawn joystick using the [**kivy Clock**](https://kivy.org/doc/stable/api-kivy.clock.html). -kivy provides multiple options for scheduling tasks, which you -can explore in their API. - -Updating these linked values will cause the drawn Widget to -automatically update. -You could alternatively update these values as they are -calculated in the touch handling methods if you don't want to use -the kivy `Clock`. - -:::caution -Do not schedule long running, blocking tasks with the kivy clock -or you will freeze the app while the task executes. -The kivy clock (which runs on the main loop) should only be used -to schedule very quick actions. - -Blocking tasks should be scheduled as an `asyncio` task! -::: - -### Touch handling - -The [**`on_touch_down()`**](https://kivy.org/doc/stable/api-kivy.uix.widget.html#kivy.uix.widget.Widget.on_touch_down), -[**`on_touch_move()`**](https://kivy.org/doc/stable/api-kivy.uix.widget.html#kivy.uix.widget.Widget.on_touch_move), -and -[**`on_touch_up()`**](https://kivy.org/doc/stable/api-kivy.uix.widget.html#kivy.uix.widget.Widget.on_touch_up) -calls are triggered for all `Widget`s within a kivy `App` whenever there is a touch interaction (by default). - -We overwrite the default behavior of these methods to move the -pose of the joystick whenever we touch and/or move within the -`VirtualJoystickWidget`, -and recenter the joystick upon release. - -From the [**kivy `Widget` class docs**](https://kivy.org/doc/stable/api-kivy.uix.widget.html), -which all widgets inherit from: - -> `on_touch_down()`, `on_touch_move()`, `on_touch_up()` don’t do -any sort of collisions. -> If you want to know if the touch is inside your widget, use -`collide_point()`. - -So we filter `on_touch_down()` & `on_touch_move()` with -[**`collide_point()`**](https://kivy.org/doc/stable/api-kivy.uix.widget.html#kivy.uix.widget.Widget.collide_point) -to only perform our custom behavior when the touch occurred within our `VirtualJoystickWidget`. - -Because we want to recenter the joystick regardless of which -widget the `touch_up` occurred in, we do not filter -`on_touch_up()` with `collide_point()`. - -### Vec2 - -We also define a simple container called `Vec2` for handling the -`x` & `y` values of the joystick coordinates in -[**libs/virtual_joystick/utils.py**](https://github.com/farm-ng/virtual-joystick/blob/main/libs/virtual_joystick/utils.py) -and import this into `joystick.py`. - -## Add it to the app - -- Import this widget in [**`src/main.py`**](https://github.com/farm-ng/virtual-joystick/blob/main/src/main.py) -- Add the `VirtualJoystickWidget` next to the `TabbedPanel` in -the `BoxLayout` of app's kivy definition in -[**`src/res/main.kv`**](https://github.com/farm-ng/virtual-joystick/blob/main/src/res/main.kv) - -## Run it - -Now sync the app to the Brain and launch it. - -You should now see the virtual joystick between the camera stream -(far right) and the `AmigaTpdo1` values from the canbus (left). -Try moving the joystick around with your finger and releasing it, -but note: It won't drive yet! - -![joystick](https://user-images.githubusercontent.com/53625197/200641720-c722fa9f-f6a3-4918-a4f0-d7270b73fd43.png) diff --git a/website/docs/tutorials/virtual_joystick/04_auto_control.md b/website/docs/tutorials/virtual_joystick/04_auto_control.md deleted file mode 100644 index d4f673a5f..000000000 --- a/website/docs/tutorials/virtual_joystick/04_auto_control.md +++ /dev/null @@ -1,100 +0,0 @@ ---- -id: auto-control -title: 04 - Auto Control ---- -# Auto Control - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -Finally, we will use this virtual joystick and the canbus -client / service connection to control the Amiga to complete the -full Virtual Joystick example. - -## Control the Amiga - -This is done with a third forever-running `asyncio` task for -sending CAN messages. - -This task waits for a `RUNNING` canbus client state, -to ensure there is feedback on the measured speeds before sending -any commands. -This is a must for closed loop control! - -Once the canbus client is in a full `RUNNING` state, -the task initializes the -[**Bidirectional streaming RPC**](https://grpc.io/docs/what-is-grpc/core-concepts/#bidirectional-streaming-rpc) -called -[**sendCanbusMessage**](https://github.com/farm-ng/farm-ng-amiga/blob/main/protos/farm_ng/canbus/canbus.proto). - -This passes a -[**`Python Generator`**](https://wiki.python.org/moin/Generators) -that constructs and yields a `SendCanbusMessageRequest` -containing a `RawCanbusMessage`, -both proto definitions from [**canbus.proto**](https://github.com/farm-ng/farm-ng-amiga/blob/main/protos/farm_ng/canbus/canbus.proto). - -The `RawCanbusMessage` encodes an -[**`AmigaRpdo1`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/packet.py) -auto control request that is forward by the canbus service to the -Amiga dashboard. -This includes requested state, speed, and angular rate of the -Amiga. -As you can see, the requested speed and angular rate are based on -the position of the `VirtualJoystickWidget`. - -:::tip -The `AmigaRpdo1` message is only a request. The vehicle control -unit (VCU) in the Amiga dashboard has safety critical logic that -prevents unsafe auto control. -::: - -Because this is a bi-directional stream, a response is returned -for each `RawCanbusMessage` the generator yields. -We can check the status of each of these responses, and exit the -generator loop if the service does not respond with a confirmed -`success`. -Once the canbus service is ready for streaming control again, it -should re-initiate automatically. - -### generator details - -The pose generator yields an [**`AmigaRpdo1`**](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/farm_ng/canbus/packet.py) -(auto control command) for the canbus client to send on the bus -at the specified period (recommended 50hz) based on the on-screen -joystick position. - -Each loop of the generator we sleep to enforce the ideal rate of -streaming `AmigaRpdo1` CAN messages, which is 50 hz. -You can modify the period parameter, but go too slow and you lose -responsiveness, and go too fast and you risk saturating the CAN -bus, which can cause loss of communication between all components -on the bus. - -### Add this as a task - -Remember to add the `send_can_msgs()` method to the `asyncio. -Task` in our list in `app_func()`! - -## Run it - -Now sync the app to the Brain and launch it. - -:::caution -Make sure all your cables are disconnected from the Amiga and no -one is in the way of the Amiga before driving around! -::: - -Everything should look just like the last checkpoint, but now you -can drive the Amiga the the virtual joystick! - -Navigate to the auto tab on the dashboard and enter the `Auto -Ready` state. -The Brain should take control and enter the `Auto Active` state -right away, allowing you to drive with the screen. -You should see the state on the dashboard match that displayed on -the Brain in your app. - -![auto_control](https://user-images.githubusercontent.com/53625197/200641685-a712fb2d-66f7-4ec2-bf92-e6d96c93cadb.png) diff --git a/website/docs/tutorials/virtual_joystick/05_further_exercises.md b/website/docs/tutorials/virtual_joystick/05_further_exercises.md deleted file mode 100644 index 682168808..000000000 --- a/website/docs/tutorials/virtual_joystick/05_further_exercises.md +++ /dev/null @@ -1,41 +0,0 @@ ---- -id: further-exercises -title: 05 - Further Exercises ---- -# Further Exercises - -:::caution deprecation warning -This is out-of-date for brains running `v2.x` Amiga OS software.
-This tutorial only applies to brains running Amiga OS `v1.x` versions.
-Please check back for an updated tutorial for brains running `v2.x` Amiga OS software. -::: - -Optionally, go beyond the tutorial and try to add features to this example. -Two options are: - -## Adjustable rates - -Define two kivy [**`Slider`**](https://kivy.org/doc/stable/api-kivy.uix.slider.html) -widgets that allow changing `max_speed` & `max_angular_rate`! -Play around with where you can put these and how you can link -them directly to the value in the `VirtualJoystickApp`. - -Just remember, the actual rates the amiga drives at are limited -by the vehicle control unit (VCU), so don't be surprised if the -true max speed doesn't reflect the slider. - -## Toggle between Auto modes - -Try to add a kivy `Button` widget that toggles the requested -`AmigaControlState` so the brain is not constantly trying to take -control of the dashboard while running. - -Or more advanced, add a button that starts/stops the sending of -canbus messages. -This could require stopping the stream of messages from the -generator, signalling to the `sendCanbusMessage` to stop, and -re-initializing the `sendCanbusMessage` RPC later. - -:::tip possible hint -sending `grpc.aio.EOF` might do it... -::: diff --git a/website/sidebars.js b/website/sidebars.js index 2ed983cf1..9e011c9ad 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -239,28 +239,7 @@ const sidebars = { "type": "category" }, ], - "Brain App Tutorials" : { - "00 - Tutorial Introduction" :[ - "tutorials/introduction/tutorial-introduction", - "tutorials/introduction/background-knowledge", - "tutorials/introduction/template-overview", - ], - "01 - Camera Streamer Tutorial" :[ - "tutorials/camera_streamer/camera-streamer-overview", - "tutorials/camera_streamer/template-starter", - "tutorials/camera_streamer/kivy-definition", - "tutorials/camera_streamer/camera-stream", - ], - "02 - Virtual Joystick Tutorial" :[ - "tutorials/virtual_joystick/virtual-joystick-overview", - "tutorials/virtual_joystick/template-starter", - "tutorials/virtual_joystick/device-streams", - "tutorials/virtual_joystick/virtual-joystick-widget", - "tutorials/virtual_joystick/auto-control", - "tutorials/virtual_joystick/further-exercises", - ], - }, - "Amiga Brain Development": [ + "Amiga Brain Development": [ "brain/brain-apps", "brain/ros-bridge" ]