From 3fccf544add104423964ca46b60746b88024fd53 Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 11:50:20 +0000 Subject: [PATCH 01/14] update camera client --- website/docs/examples/camera_client/README.md | 80 +++++++------------ 1 file changed, 29 insertions(+), 51 deletions(-) diff --git a/website/docs/examples/camera_client/README.md b/website/docs/examples/camera_client/README.md index b423a796f..2d32ece58 100644 --- a/website/docs/examples/camera_client/README.md +++ b/website/docs/examples/camera_client/README.md @@ -35,7 +35,7 @@ pip3 install -r requirements.txt ### 3. Execute the Python script ```bash -python3 main.py --port 50051 +python3 main.py --service-config service_config.json ``` :::info @@ -55,75 +55,53 @@ the `WifiClient` (coming soon) ```bash python3 main.py --help -# usage: amiga-camera-app [-h] --port PORT [--address ADDRESS] -#[--stream-every-n STREAM_EVERY_N] - +# usage: amiga-camera-stream [-h] --service-config SERVICE_CONFIG +# # optional arguments: # -h, --help show this help message and exit -# --port PORT The camera port. -# --address ADDRESS The camera address -# --stream-every-n STREAM_EVERY_N -# Streaming frequency +# --service-config SERVICE_CONFIG +# The camera config. ``` -Usage example: - -```bash -python3 main.py --address 192.168.1.93 --port 50051 -``` +To customize the run, you need to update the `service_config.json` by modifying the `host` and `port` fields. ### 5. Code overview -Basic structure to consume from the camera client in an async -fashion. +In this example we use the `EventClient` with the subscribe method to receive the camera stream. ```python -from farm_ng.oak.client import OakCameraClient, -OakCameraClientConfig -from farm_ng.oak import oak_pb2 - -async def main(address: str, port: int, stream_every_n: int) -> -None: +async def main(service_config_path: Path) -> None: + """Run the camera service client. - # configure the camera client - config = OakCameraClientConfig(address=address, port=port) - client = OakCameraClient(config) + Args: + service_config_path (Path): The path to the camera service config. + """ + # create a client to the camera service + config: EventServiceConfig = proto_from_json_file(service_config_path, EventServiceConfig()) - # get the streaming object - response_stream = client.stream_frames(every_n=stream_every_n) + # instantiate the image decoder + image_decoder = ImageDecoder() - # start the streaming service - await client.connect_to_service() + async for event, message in EventClient(config).subscribe(config.subscriptions[0], decode=True): + print(f"Timestamps: {event.timestamps[-2]}") + print(f"Meta: {message.meta}") + print("###################\n") - while True: - # query the service state - state: oak_pb2.OakServiceState = await client.get_state() + # cast image data bytes to numpy and decode + image = np.from_dlpack(image_decoder.decode(message.image_data)) - if state.value != oak_pb2.OakServiceState.RUNNING: - print("Camera is not streaming!") - continue + # visualize the image + cv2.namedWindow("image", cv2.WINDOW_NORMAL) + cv2.imshow("image", image) + cv2.waitKey(1) - response: oak_pb2.StreamFramesReply = await - response_stream.read() - if response and response.status == oak_pb2.ReplyStatus.OK: - # get the sync frame - frame: oak_pb2.OakSyncFrame = response.frame - print(f"Got frame: {frame.sequence_num}") - print(f"Device info: {frame.device_info}") - print("#################################\n") if __name__ == "__main__": - parser = argparse.ArgumentParser(prog="amiga-camera-app") - parser.add_argument("--port", type=int, required=True, - help="The camera port.") - parser.add_argument("--address", type=str, - default="localhost", help="The camera address") - parser.add_argument("--stream-every-n", type=int, default=4, - help="Streaming frequency") + parser = argparse.ArgumentParser(prog="amiga-camera-stream") + parser.add_argument("--service-config", type=Path, required=True, help="The camera config.") args = parser.parse_args() - asyncio.run(main(args.address, args.port, args. - stream_every_n)) + asyncio.run(main(args.service_config)) ``` :::tip From e10d8c3a2a4263723c8a48b4930ba930b91c52a8 Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 12:00:49 +0000 Subject: [PATCH 02/14] update the calibration readme --- .../examples/camera_calibration/README.md | 94 +++++++++++++++++++ .../camera_calibration/camera-calibration.md | 74 --------------- website/docs/examples/camera_client/README.md | 2 +- 3 files changed, 95 insertions(+), 75 deletions(-) create mode 100644 website/docs/examples/camera_calibration/README.md delete mode 100644 website/docs/examples/camera_calibration/camera-calibration.md diff --git a/website/docs/examples/camera_calibration/README.md b/website/docs/examples/camera_calibration/README.md new file mode 100644 index 000000000..696879531 --- /dev/null +++ b/website/docs/examples/camera_calibration/README.md @@ -0,0 +1,94 @@ +--- +id: camera-calibration +title: Camera Calibration +--- + +## Camera Calibration + +The requirements to run this example are to have a +[**farm-ng brain**](/docs/brain/) running Oak cameras and that +your PC is on the same local network as the brain. + +### 1. Install the [farm-ng Brain ADK package](/docs/brain/brain-install) + +### 2. Setup + +:::tip + +It is recommended to also install these dependencies and run the +example in the brain ADK virtual environment. + +::: + +Create first a virtual environment + +```bash +python3 -m venv venv +source venv/bin/activate +``` + +```bash +# assuming you're already in the amiga-dev-kit/ directory +cd farm-ng-amiga/py/examples/camera_calibration +``` + +### 3. Install the example's dependencies + +```bash +pip3 install -r requirements.txt +``` + +### 4. Execute the Python script + +```bash +python3 main.py --service-config service_config.json +``` + +:::info +By default, the camera address is assumed top be `localhost`. +::: + +### 5. Customize run + +```bash +# usage: amiga-camera-calibration [-h] --service-config SERVICE_CONFIG +# +# optional arguments: +# -h, --help show this help message and exit +# --service-config SERVICE_CONFIG +# The camera config. +``` + +### 6. Code overview + +In this example we use the `EventClient` with the `request_rreply` method to receive the camera camera calibration. The `request_reply` method is a coroutine that returns a `Future` object. The `Future` object is used to retrieve the result of the request. + +The path to the calibration service is `/calibration` and the request message is `Empty`. The response message is `OakCalibration`, which is automatically decoded by the `request_reply` method using the `decode=True` argument. + +```python +async def main(service_config_path: Path) -> None: + """Request the camera calibration from the camera service. + + Args: + service_config_path (Path): The path to the camera service config. + """ + # create a client to the camera service + config: EventServiceConfig = proto_from_json_file(service_config_path, EventServiceConfig()) + + # get the calibration message + calibration: oak_pb2.OakCalibration = await EventClient(config).request_reply("/calibration", Empty(), decode=True) + print(calibration) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(prog="amiga-camera-calibration") + parser.add_argument("--service-config", type=Path, required=True, help="The camera config.") + args = parser.parse_args() + + asyncio.run(main(args.service_config)) +``` + +:::tip +We highgly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html). +::: diff --git a/website/docs/examples/camera_calibration/camera-calibration.md b/website/docs/examples/camera_calibration/camera-calibration.md deleted file mode 100644 index 9ddc51ce2..000000000 --- a/website/docs/examples/camera_calibration/camera-calibration.md +++ /dev/null @@ -1,74 +0,0 @@ ---- -id: camera-calibration -title: Camera Calibration ---- - -## Camera Calibration - -The requirements to run this example are to have a -[**farm-ng brain**](/docs/brain/) running Oak cameras and that -your PC is on the same local network as the brain. - -### 1. Install the [farm-ng Brain ADK package](/docs/brain/brain-install) - -### 2. Setup - -:::tip - -It is recommended to also install these dependencies and run the -example in the brain ADK virtual environment. - -::: - -Create first a virtual environment - -```bash -python3 -m venv venv -source venv/bin/activate -``` - -```bash -# assuming you're already in the amiga-dev-kit/ directory -cd farm-ng-amiga/py/examples/camera_calibration -``` - -### 3. Install the example's dependencies - -```bash -pip3 install -r requirements.txt -``` - -### 4. Execute the Python script - -```bash -python3 read_oak_calibration.py --port 50051 -``` - -:::info -By default, the camera address is assumed top be `localhost`. -::: - -### 5. Customize run of the Script - -Usage example: - -```bash -python3 read_oak_calibration.py --address 192.168.1.93 --port 50051 -``` - -:::info -The `--address` will be the IP address of the Amiga. If you need help understanding the -structure of the command line use `python3 read_oak_calibration.py --help`. -::: - -### 6. Code overview - -Here you can review the code and gain a closer look at how this -example is done. - -#### [Link to `camera_calibration/read_oak_calibration.py`](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/examples/camera_calibration/read_oak_calibration.py) - -:::tip -We highgly recommend to have some basic knowledge about -[**`asyncio`**](https://docs.python.org/3/library/asyncio.html). -::: diff --git a/website/docs/examples/camera_client/README.md b/website/docs/examples/camera_client/README.md index 2d32ece58..006b8c3ce 100644 --- a/website/docs/examples/camera_client/README.md +++ b/website/docs/examples/camera_client/README.md @@ -67,7 +67,7 @@ To customize the run, you need to update the `service_config.json` by modifying ### 5. Code overview -In this example we use the `EventClient` with the subscribe method to receive the camera stream. +In this example we use the `EventClient` with the `subscribe` method to receive the camera stream. ```python async def main(service_config_path: Path) -> None: From b82c98448f942de967e07343e954b46c81ea02f8 Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 13:11:43 +0000 Subject: [PATCH 03/14] update camera settings example --- .../docs/examples/camera_settings/README.md | 120 ++++++++++++++++++ 1 file changed, 120 insertions(+) create mode 100644 website/docs/examples/camera_settings/README.md diff --git a/website/docs/examples/camera_settings/README.md b/website/docs/examples/camera_settings/README.md new file mode 100644 index 000000000..3e16e9796 --- /dev/null +++ b/website/docs/examples/camera_settings/README.md @@ -0,0 +1,120 @@ +--- +id: camera-settings +title: Camera Settings +--- + +## Camera Settings + +The requirements to run this example are to have a +[**farm-ng brain**](/docs/brain/) running Oak cameras and that +your PC is on the same local network as the brain. + +### 1. Install the [farm-ng Brain ADK package](/docs/brain/brain-install) + +### 2. Setup + +:::tip + +It is recommended to also install these dependencies and run the +example in the brain ADK virtual environment. + +::: + +Create first a virtual environment + +```bash +python3 -m venv venv +source venv/bin/activate +``` + +```bash +# assuming you're already in the amiga-dev-kit/ directory +cd farm-ng-amiga/py/examples/camera_settings +``` + +### 3. Install the example's dependencies + +```bash +pip3 install -r requirements.txt +``` + +### 4. Execute the Python script + +```bash +python3 main.py --service-config service_config.json --camera-settings camera_settings.json +``` + +### 5. Customize run + +```bash +# usage: amiga-camera-settings [-h] --service-config SERVICE_CONFIG [--camera-settings CAMERA_SETTINGS] [--stream-name {rgb,mono}] +# +# optional arguments: +# -h, --help show this help message and exit +# --service-config SERVICE_CONFIG +# The camera service config. +# --camera-settings CAMERA_SETTINGS +# The camera control settings. +# --stream-name {rgb,mono} +# The stream name to set the settings for. +``` + +### 6. Code overview + +In this example we use the `EventClient` with the `request_rreply` method to receive and set the camera settings. The `request_reply` method is a coroutine that returns a `Future` object. The `Future` object is used to retrieve the result of the request. + +To get the settings, the path to the camera service is `/camera_settings/` and the request message is `Empty`. The reply message is `oak_pb2.CameraSettings` and contains the current camera settings. + +To set the settings, the path to the camera service is `/camera_settings/` and the request message is `oak_pb2.CameraSettings`. + +You can explore the files `service_config.json` to customize the camera service configuration and `camera_settings.json` to customize the camera settings. + +```python +async def main(service_config_path: Path, settings_config_path: Path, stream_name: str) -> None: + """Request the camera calibration from the camera service. + + Args: + service_config_path (Path): The path to the camera service config. + settings_config_path (Path): The path to the camera settings config. + stream_name (str): The stream name to set the settings for. + """ + # create a client to the camera service + config: EventServiceConfig = proto_from_json_file(service_config_path, EventServiceConfig()) + + # create camera setting from the json file + camera_settings_request: oak_pb2.CameraSettings | Empty = Empty() + if settings_config_path: + camera_settings_request = proto_from_json_file(settings_config_path, oak_pb2.CameraSettings()) + + # send a request to the camera service + # the camera service will reply with the current camera settings + # available settings are: + # - /camera_settings/rgb + # - /camera_settings/mono + camera_settings: oak_pb2.CameraSettings = await EventClient(config).request_reply( + f"/camera_settings/{stream_name}", camera_settings_request, decode=True + ) + + print(camera_settings) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(prog="amiga-camera-settings") + parser.add_argument("--service-config", type=Path, required=True, help="The camera service config.") + parser.add_argument("--camera-settings", type=Path, required=False, help="The camera control settings.") + parser.add_argument( + "--stream-name", + type=str, + choices=["rgb", "mono"], + default="rgb", + help="The stream name to set the settings for.", + ) + args = parser.parse_args() + + asyncio.run(main(args.service_config, args.camera_settings, args.stream_name)) +``` + +:::tip +We highgly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html). +::: From f12f44cab00bd83834fe4489336e7db2274ffba6 Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 13:33:28 +0000 Subject: [PATCH 04/14] add the camera to pointcloud example --- .../docs/examples/camera_pointcloud/README.md | 162 ++++++++++++++++++ .../docs/examples/camera_settings/README.md | 2 +- 2 files changed, 163 insertions(+), 1 deletion(-) create mode 100644 website/docs/examples/camera_pointcloud/README.md diff --git a/website/docs/examples/camera_pointcloud/README.md b/website/docs/examples/camera_pointcloud/README.md new file mode 100644 index 000000000..576f9889c --- /dev/null +++ b/website/docs/examples/camera_pointcloud/README.md @@ -0,0 +1,162 @@ +--- +id: camera-pointcloud +title: Camera Pointcloud +--- + +## Camera Pointcloud + +The requirements to run this example are to have a +[**farm-ng brain**](/docs/brain/) running Oak cameras and that +your PC is on the same local network as the brain. + +### 1. Install the [farm-ng Brain ADK package](/docs/brain/brain-install) + +### 2. Setup + +:::tip + +It is recommended to also install these dependencies and run the +example in the brain ADK virtual environment. + +::: + +Create first a virtual environment + +```bash +python3 -m venv venv +source venv/bin/activate +``` + +```bash +# assuming you're already in the amiga-dev-kit/ directory +cd farm-ng-amiga/py/examples/poincloud +``` + +### 3. Install the example's dependencies + +```bash +pip3 install -r requirements.txt +``` + +### 4. Execute the Python script + +```bash +python3 main.py --service-config service_config.json +``` + +### 5. Customize run + +```bash +# usage: amiga-camera-pointcloud [-h] --service-config SERVICE_CONFIG [--save-disparity] [--save-pointcloud] +# +# optional arguments: +# -h, --help show this help message and exit +# --service-config SERVICE_CONFIG +# The camera config. +# --save-disparity Save the disparity image. +# --save-pointcloud Save the depth image. +``` + +### 6. Code overview + +In this example we use + +In this example we get the camera calibration from the camera service that jointly with the `disparity` image we will be used to generate the `pointcloud`. + +First, we use the `EventClient` to request the camera calibration from the camera service. The camera calibration is a `oak_pb2.CameraCalibration` message that contains the camera intrinsics and extrinsics. + +```python +# create a client to the camera service +config: EventServiceConfig = proto_from_json_file(args.service_config, EventServiceConfig()) + +camera_client = EventClient(config) + +# get the calibration message +calibration_proto: oak_pb2.OakCalibration = await camera_client.request_reply("/calibration", Empty(), decode=True) + +# NOTE: The OakCalibration message contains the camera calibration data for all the cameras. +# Since we are interested in the disparity image, we will use the calibration data for the right camera +# which is the first camera in the list. +camera_data: oak_pb2.CameraData = calibration_proto.camera_data[0] + +# compute the camera matrix from the calibration data +camera_matrix: Tensor = get_camera_matrix(camera_data) +``` + +Below is the code to compute the camera matrix from the calibration data. Notice that we cast the `intrinsic_matrix` to a `Tensor` and reshape it to a 3x3 matrix for later easy integration with the +kornia library. + +```python +def get_camera_matrix(camera_data: oak_pb2.CameraData) -> Tensor: + """Compute the camera matrix from the camera calibration data. + + Args: + camera_data (oak_pb2.CameraData): The camera calibration data. + + Returns: + Tensor: The camera matrix with shape 3x3. + """ + fx = camera_data.intrinsic_matrix[0] + fy = camera_data.intrinsic_matrix[4] + cx = camera_data.intrinsic_matrix[2] + cy = camera_data.intrinsic_matrix[5] + + return tensor([[fx, 0, cx], [0, fy, cy], [0, 0, 1]]) +``` + +Next, we use the `EventClient` to subsribe to the `disparity` path from the camera service. The `disparity` image is a `oak_pb2.OakImage` message that contains the `disparity` image data. +To compute the `pointcloud` we first need to decode the `disparity` image data to a `Tensor` and then compute the `pointcloud` from the `disparity` image using the kornia method `depth_from_disparity` +and `depth_to_3d_v2`. + +```python +async for event, message in camera_client.subscribe( + SubscribeRequest(uri=uri_pb2.Uri(path="/disparity"), every_n=5), decode=True +): + # cast image data bytes to a tensor and decode + disparity_t = decode_disparity(message, image_decoder) # HxW + + # compute the depth image from the disparity image + calibration_baseline: float = 0.075 # m + calibration_focal: float = float(camera_matrix[0, 0]) + + depth_t = K.geometry.depth.depth_from_disparity( + disparity_t, baseline=calibration_baseline, focal=calibration_focal + ) # HxW + + # compute the point cloud from the depth image + points_xyz = K.geometry.depth.depth_to_3d_v2(depth_t, camera_matrix) # HxWx3 + + # filter out points that are in the range of the camera + valid_mask = (points_xyz[..., -1:] >= 0.2) & (points_xyz[..., -1:] <= 7.5) # HxWx1 + valid_mask = valid_mask.repeat(1, 1, 3) # HxWx3 + + points_xyz = points_xyz[valid_mask].reshape(-1, 3) # Nx3 +``` +Below is the code to decode the `disparity` image data to a `Tensor`. + +```python +def decode_disparity(message: oak_pb2.OakFrame, decoder: ImageDecoder) -> Tensor: + """Decode the disparity image from the message. + + Args: + message (oak_pb2.OakFrame): The camera frame message. + decoder (ImageDecoder): The image decoder. + + Returns: + Tensor: The disparity image tensor (HxW). + """ + # decode the disparity image from the message into a dlpack tensor for zero-copy + disparity_dl = decoder.decode(message.image_data) + + # cast the dlpack tensor to a torch tensor + disparity_t = torch.from_dlpack(disparity_dl) + + return disparity_t[..., 0].float() # HxW +``` + +Additionally, we can save the `disparity` image and the `pointcloud` to disk by using the `--save-disparity` and `--save-pointcloud` flags respectively. + +:::tip +We highgly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html). +::: diff --git a/website/docs/examples/camera_settings/README.md b/website/docs/examples/camera_settings/README.md index 3e16e9796..9838667ad 100644 --- a/website/docs/examples/camera_settings/README.md +++ b/website/docs/examples/camera_settings/README.md @@ -61,7 +61,7 @@ python3 main.py --service-config service_config.json --camera-settings camera_se ### 6. Code overview -In this example we use the `EventClient` with the `request_rreply` method to receive and set the camera settings. The `request_reply` method is a coroutine that returns a `Future` object. The `Future` object is used to retrieve the result of the request. +In this example we use the `EventClient` with the `request_reply` method to receive and set the camera settings. The `request_reply` method is a coroutine that returns a `Future` object. The `Future` object is used to retrieve the result of the request. To get the settings, the path to the camera service is `/camera_settings/` and the request message is `Empty`. The reply message is `oak_pb2.CameraSettings` and contains the current camera settings. From d3ce744c8ae785a9431539dc73810b856bec7106 Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 13:48:11 +0000 Subject: [PATCH 05/14] minor updates to the file readers --- .../docs/examples/file_converter/README.md | 38 +++++++++---------- website/docs/examples/file_reader/README.md | 4 +- 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/website/docs/examples/file_converter/README.md b/website/docs/examples/file_converter/README.md index c93ebfca7..eda1be8c5 100644 --- a/website/docs/examples/file_converter/README.md +++ b/website/docs/examples/file_converter/README.md @@ -48,25 +48,23 @@ python main.py --file-name events_09162022160753_000000.bin Use the `--help` flag to see all possible arguments for using this tool. ```bash -$ python main.py --help - -usage: Event file converter example. [-h] --file-name FILE_NAME [--output-path OUTPUT_PATH] -[--camera-name CAMERA_NAME] [--disparity-scale DISPARITY_SCALE] [--video-to-jpg] [--snapshot] - -optional arguments: - -h, --help show this help message and exit - --file-name FILE_NAME - Path to the `events.bin` file. - --output-path OUTPUT_PATH - Path to the folder where converted data will be written. - Default: /home/kyle/farm-ng/farm-ng-amiga/py/examples/file_converter - --camera-name CAMERA_NAME - The name of the camera to visualize. Default: oak0. - --disparity-scale DISPARITY_SCALE - Scale for amplifying disparity color mapping. Default: 1. - --video-to-jpg Use this flag to convert video .bin files to a series of jpg images. - Default for videos is mp4. - --snapshot Use this flag if the .bin file is a single snapshot. Output will be jpg images. +# usage: Event file converter example. [-h] --file-name FILE_NAME [--output-path OUTPUT_PATH] [--camera-name CAMERA_NAME] +# [--view-name {rgb,left,right,disparity}] [--disparity-scale DISPARITY_SCALE] +# [--video-to-jpg] +# +# optional arguments: +# -h, --help show this help message and exit +# --file-name FILE_NAME +# Path to the `events.bin` file. +# --output-path OUTPUT_PATH +# Path to the folder where converted data will be written. +# --camera-name CAMERA_NAME +# The name of the camera to visualize. Default: oak0. +# --view-name {rgb,left,right,disparity} +# The name of the camera view to visualize. Default: rbg. +# --disparity-scale DISPARITY_SCALE +# Scale for amplifying disparity color mapping. Default: 1. +# --video-to-jpg Use this flag to convert video .bin files to a series of jpg images. Default is mp4. ``` For instance, you can change the camera that is played back from the default of `oak0`. E.g., @@ -84,7 +82,7 @@ python main.py --file-name events_09162022160753_000000.bin --camera-name oak1 - Or you can convert a "snapshot" log to one jpg per view. ```bash -python main.py --file-name cpy_data/farm_ng/2023_01_06_13_24_33_445932_snapshot_b42d218.bin --snapshot +python main.py --file-name cpy_data/farm_ng/2023_01_06_13_24_33_445932_snapshot_b42d218.bin --video-to-jpg ``` > NOTE: video logs will not convert if the `--snapshot` flag is used. diff --git a/website/docs/examples/file_reader/README.md b/website/docs/examples/file_reader/README.md index 1cb4a1dad..7ece4d21d 100644 --- a/website/docs/examples/file_reader/README.md +++ b/website/docs/examples/file_reader/README.md @@ -72,10 +72,10 @@ python main.py --file-name /events_12052022115852.bin ``` Optionally, you can change the camera that is played back from -the default of `oak0`. E.g., +the default of `oak0`. E.g., and the view `rgb`. E.g. ```bash -python main.py --file-name /events_12052022115852.bin --camera-name oak1 +python main.py --file-name /events_12052022115852.bin --camera-name oak1 --view-name rgb ``` Congratulations two videos should now pop up and play! One should From fb2e71f864aec655adea3defbd326651e4a4c088 Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 14:45:04 +0000 Subject: [PATCH 06/14] add service client tutorial --- .../docs/examples/service_client/README.md | 208 ++++++++++++++++++ 1 file changed, 208 insertions(+) create mode 100644 website/docs/examples/service_client/README.md diff --git a/website/docs/examples/service_client/README.md b/website/docs/examples/service_client/README.md new file mode 100644 index 000000000..0c9b52322 --- /dev/null +++ b/website/docs/examples/service_client/README.md @@ -0,0 +1,208 @@ +--- +id: service-client +title: Service Client +--- + +This example is the hello world of the farm-ng-core Event Service framework. + +The Event Service is a publish-subscribe service that allows to stream +data from the farm-ng-core services to its respective clients. The central piece +of the Event Service is the `EventServiceGrpc` service that is the main +class encapsulating the gRPC server. + +In the following example, we will explain how to define your own service +and how to use the `EventClient` to subscribe to the service. + +### Requirements + +This example only requires the farm-ng-core package. + +```bash +pip3 install farm-ng-core + +``` +:::tip +We highgly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html), [**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/) and [**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). +::: + + +### Define your protobuf messages + +The farm-ng-core Event Service uses protobuf messages and leverages +gRPC to communicate between client and server. The first step is to +define your own protobuf messages. + +For this example, we provide the `two_ints.proto`: + +```protobuf +syntax = "proto3"; + +// The request message containing the two integers. +message AddTwoIntsRequest { + int32 a = 1; + int32 b = 2; +} + +// The response message containing the sum. +message AddTwoIntsResponse { + int32 sum = 1; +} +``` + +We provide the generated Python code in the `two_ints_pb2.py` file and +its corresponding `two_ints_pb2.pyi` file for type hinting. Optionally, +you can regenerate the Python code yourself using the `genprotos.py`. + +### Create your own service + +In order to create your own service, we will create an auxiliary class +to encapsulate the `EventServiceGrpc` class. This auxiliary class will +be called `AddTwoIntsServer` and will easily allow us to define the +service. + +The class `AddTwoIntsServer` will have a method called `request_reply_handler` +that will be called every time a request is received. The method +`request_reply_handler` will receive two arguments: the `Event` and the +request message. The `Event` contains the metadata of the request and +the request message contains the actual request message. + +```python +class AddTwoIntServer: + """A simple service that implements the AddTwoInts service.""" + + def __init__(self, event_service: EventServiceGrpc) -> None: + """Initialize the service. + + Args: + event_service: The event service to use for communication. + """ + self._event_service = event_service + self._event_service.add_request_reply_handler(self.request_reply_handler) + + @property + def logger(self) -> logging.Logger: + """Return the logger for this service.""" + return self._event_service.logger + + async def request_reply_handler(self, event: Event, message: two_ints_pb2.AddTwoIntsRequest) -> Message: + """The callback for handling request/reply messages.""" + if event.uri.path == "/sum": + self.logger.info(f"Requested to sum {message.a} + {message.b}") + + return two_ints_pb2.AddTwoIntsResponse(sum=message.a + message.b) + + return Empty() +``` + +To wrap up, we need to create a `service.py` program that will instantiate +the `EventServiceGrpc` and the `AddTwoIntServer` and run the service leveraging +the `serve` method with the `asyncio` event loop. + +```python +if __name__ == "__main__": + parser = argparse.ArgumentParser(prog="farm-ng-service") + parser.add_argument("--service-config", type=Path, required=True, help="The service config.") + args = parser.parse_args() + + # load the service config + service_config: EventServiceConfig = proto_from_json_file(args.service_config, EventServiceConfig()) + + # create the grpc server + event_service: EventServiceGrpc = EventServiceGrpc(grpc.aio.server(), service_config) + + loop = asyncio.get_event_loop() + + try: + # wrap and run the service + loop.run_until_complete(AddTwoIntServer(event_service).serve()) + except KeyboardInterrupt: + print("Exiting...") + finally: + loop.close() +``` + +Additionally, we provide a `service_config.json` file that contains the +configuration of the service. The `service_config.json` file contains +the following fields: + +```json +{ + "name": "two_int", + "port": 5001, + "host": "localhost", + "log_level": "DEBUG" +} +``` + +### Create the client + +The client is a simple Python script that will connect to the service +and subscribe to the service. For the client, we will leverage the +`EventClient` class that will allow us to interact with the service. + +In this example, we will create a `client.py` program that will instantiate +the `EventServiceConfig` and the `EventClient`. The `EventClient` will +request the service to sum two integers implementing the `request_reply` +method. The `request_reply` method is a coroutine that triggers a the +`request_reply_handler` method in the service and returns a `Future` +object. The `Future` object is used to retrieve the result of the +request. + +```python +async def main() -> None: + parser = argparse.ArgumentParser(prog="farm-ng-client") + parser.add_argument("--service-config", type=Path, required=True, help="The service config.") + parser.add_argument("--a", type=int, required=True, help="The first integer.") + parser.add_argument("--b", type=int, required=True, help="The second integer.") + args = parser.parse_args() + + # create a client to the camera service + config: EventServiceConfig = proto_from_json_file(args.service_config, EventServiceConfig()) + + # request the sum of two integers + result = await EventClient(config).request_reply("/sum", AddTwoIntsRequest(a=args.a, b=args.b), decode=True) + + print(f"Result of {args.a} + {args.b} = {result.sum}") + + +if __name__ == "__main__": + asyncio.run(main()) +``` + +## Run the example + +### 1. Run the service + +In a first terminal, run the service: + +```bash +python service.py --service-config config.json +``` + +you should see the following output: + +```bash +Starting server on port 5001 +Server started +Sending /health: 0 to 0 clients +Sending /health: 1 to 0 clients +Sending /health: 2 to 0 clients +Sending /health: 3 to 0 clients +... +... +``` + +### 2. Run the client + +In a second terminal, run the client: + +```bash +python client.py --service-config config.json --a 1 --b 2 +``` + +you should see the following output: + +```bash +Result of 1 + 2 = 3 +``` From 81a458b922a25bb08f6766de4ff1c2a6ba4eeee4 Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 16:20:36 +0000 Subject: [PATCH 07/14] remove people detection --- .../people_detection/people_detection.md | 121 ------------------ 1 file changed, 121 deletions(-) delete mode 100644 website/docs/examples/people_detection/people_detection.md diff --git a/website/docs/examples/people_detection/people_detection.md b/website/docs/examples/people_detection/people_detection.md deleted file mode 100644 index a14caff60..000000000 --- a/website/docs/examples/people_detection/people_detection.md +++ /dev/null @@ -1,121 +0,0 @@ ---- -id: people-detection -title: People Detection ---- - - -This example shows how to use the `farm-ng-amiga` library to -detect people in a video stream. - -It also shows how to implement a service and client via grpc. - -The requirements to run this example are to have a -[**farm-ng brain**](/docs/brain/) running Oak cameras and that -your PC is on the same local network as the brain. - -For testing you can use your webcam as a replacement, which we -will go over later in this tutorial. - -![PD gif](https://user-images.githubusercontent.com/64480560/229892116-e99de4d2-577a-4c38-876f-4ba03429d52c.gif) - -### 1. Install the [farm-ng Brain ADK package](/docs/brain/brain-install) - -### 2. Setup - -:::tip - -It is recommended to also install these dependencies and run the -example in the brain ADK virtual environment. - -::: - -Create first a virtual environment - -```bash -python3 -m venv venv -source venv/bin/activate -``` - -```bash -#assuming you have the farm-ng-amiga repository. -cd farm-ng-amiga/py/examples/people_detection -``` - -### 3. Install Dependencies - -```bash -pip install -r requirements.txt -``` - -#### Download the model data - -In this example we use MobileNet SSD from tensorflow to be -implemented in opencv. - -Download the model weights and architecture: - -```bash -mkdir models -``` - -```bash -wget \ -https://github.com/rdeepc/ExploreOpencvDnn/raw/master/models/frozen_inference_graph.pb \ --O models/frozen_inference_graph.pb -``` - -```bash -wget \ -https://github.com/rdeepc/ExploreOpencvDnn/raw/master/models/ssd_mobilenet_v2_coco_2018_03_29.pbtxt \ --O models/ssd_mobilenet_v2_coco_2018_03_29.pbtxt -``` - -### 4. Run the service - -Open one terminal or in that same terminal run the service: - -```bash -python service.py --port 50095 --models-dir models/ -# INFO:__main__:Loaded model: /home/edgar/software/farm-ng-amiga/py/examples/people_detection/models -# INFO:__main__:Starting server on port 50095 -# INFO:__main__:Server started -``` - -![server](https://user-images.githubusercontent.com/64480560/229893034-7302d479-692a-4907-98e1-87a31b60fc19.png) - -:::tip -With this command, you can run this on either your computer or -your Amiga! -::: - -### 5. Run the Client - -In another terminal, run the a pipeline using the client: - -```bash -python main.py --port-camera 50051 --port-detector 50095 -``` - -And you should see a window with the video stream and the -detected people. The server is a process receives the decoded -images then computes an AI model and returns the results. If you -have a service on your machine and the client consume from the -robot, there is the transmission overhead of grpc from the -robot-camera to you laptop (service). - -:::tip -You can also run this command from either your computer or your -Amiga. -::: - -### 6. Code overview - -Here you can review the code and gain a closer look at how this -example is done. - -### [Link to `people_detection/main.py`](https://github.com/farm-ng/farm-ng-amiga/blob/main/py/examples/people_detection/main.py) - -:::tip -We highgly recommend to have some basic knowledge about -[**`asyncio`**](https://docs.python.org/3/library/asyncio.html). -::: From 355bd0c98fa320c082426fdd81606c69f68881cf Mon Sep 17 00:00:00 2001 From: edgarriba Date: Tue, 26 Sep 2023 18:53:48 +0000 Subject: [PATCH 08/14] add the service_counter tutorial --- .../docs/examples/service_counter/README.md | 171 ++++++++++++++++++ 1 file changed, 171 insertions(+) create mode 100644 website/docs/examples/service_counter/README.md diff --git a/website/docs/examples/service_counter/README.md b/website/docs/examples/service_counter/README.md new file mode 100644 index 000000000..694489ee4 --- /dev/null +++ b/website/docs/examples/service_counter/README.md @@ -0,0 +1,171 @@ +--- +id: service-counter +title: Service Counter +--- + +This is a subsequent example of the [**`service_client`**](/docs/examples/service_client/) example +where we will show how to use the `publish` method from `EventServiceGrpc` to publish messages +to later use the `EventClient` to interact with the service. + +In particular, we will create a service that will have a counter running in a separate +task and will publish the counter value at fixed rate. We will show how to use the client +to subscribe to the service and will print the counter value every time it receives a message. +In addition, the client will be able to request the service to reset the counter to zero. + +### Requirements + +This example only requires the farm-ng-core package. + +```bash +pip3 install farm-ng-core + +``` +:::tip +We highgly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html), [**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/) and [**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). +::: + +### Create the service + +We first create a service that will publish the counter value at a certain +rate. For this, we will create a `counter.py` program that will +instantiate the `EventServiceGrpc` and will run the service leveraging +the `serve` method with the `asyncio` event loop. + +In the same program, we will create a `CounterService` class that will +implement the logic of the service, including the concurrent task that +will run the counter. The `CounterService` class will also have a method +to handle the `requests` from the client. The `requests` method is a +coroutine that triggers the `request_handler` method in the service, +in that case to reset the counter to zero. + +```python +class CounterServer: + def __init__(self, event_service: EventServiceGrpc) -> None: + """Initialize the service. + Args: + event_service: The event service to use for communication. + """ + self._event_service = event_service + self._event_service.add_request_reply_handler(self.request_reply_handler) + + self._counter: int = 0 + self._rate: float = 1.0 + + async def request_reply_handler(self, event: Event, message: Message) -> None: + """The callback for handling request/reply messages.""" + if event.uri.path == "/reset_counter": + self._counter = 0 + + return Empty() + + async def run(self) -> None: + """Run the main task.""" + while True: + await self._event_service.publish("/counter", Int32Value(value=self._counter)) + self._counter += 1 + await asyncio.sleep(1.0 / self._rate) + + async def serve(self) -> None: + await asyncio.gather(self._event_service.serve(), self.run()) +``` + +### Create the client + +For the client, we will create a `client.py` program that will implement a thin wrapper +class `CounterClient` around the `EventClient` class. The `CounterClient` will +have a method to `subscribe` to the events stream coming from the `/counter` path. + +```python +class CounterClient: + def __init__(self, service_config: EventServiceConfig) -> None: + """Initialize the client. + Args: + service_config: The service config. + """ + self._event_client = EventClient(service_config) + + async def subscribe(self) -> None: + """Run the main task.""" + async for event, message in self._event_client.subscribe( + request=SubscribeRequest(uri=Uri(path="/counter"), every_n=1), decode=True + ): + print(f"Received message: {message}") +``` + +In the same program, we will create a `main` function that will instantiate the +`EventServiceConfig` and the `CounterClient`. The `main` function will have a +couple high level commands to `subscribe` to the `/counter` path and to `request` +the service to reset the counter to zero. + +```python +async def command_subscribe(client: CounterClient) -> None: + """Subscribe to the counter service.""" + await client.subscribe() + + +async def command_reset(client: CounterClient) -> None: + """Reset the counter.""" + await client._event_client.request_reply("/reset_counter", Empty()) +``` + +## Run the example + +### 1. Run the service + +In a first terminal, run the service: + +```bash +python counter.py --service-config service_config.json +``` + +you should see the following output: + +```bash +Starting server on port 5001 +Server started +``` + +### 2. Subscribe to the service + +In a second terminal, run the client: + +```bash +python client.py --service-config service_config.json subscribe +``` + +you should see the following output and the counter value increasing: + +```bash +Received message: value: 3 + +Received message: value: 4 + +Received message: value: 5 + +Received message: value: 6 + +... +... +``` + +### 3. Reset the counter + +In a third terminal, run the client: + +```bash +python client.py --service-config service_config.json reset +``` + +you should see the following output: + +```bash +Received message: +Received message: value: 1 + +Received message: value: 2 + +Received message: value: 3 +... +... +``` From 2702ce68e046d1ba898d30a9e1aa9243f167e26c Mon Sep 17 00:00:00 2001 From: edgar Date: Wed, 27 Sep 2023 00:12:31 +0200 Subject: [PATCH 09/14] add the service propagation example --- .../examples/service_propagation/README.md | 291 ++++++++++++++++++ 1 file changed, 291 insertions(+) create mode 100644 website/docs/examples/service_propagation/README.md diff --git a/website/docs/examples/service_propagation/README.md b/website/docs/examples/service_propagation/README.md new file mode 100644 index 000000000..afa02bdc0 --- /dev/null +++ b/website/docs/examples/service_propagation/README.md @@ -0,0 +1,291 @@ +--- +id: service-propagation +title: Service Propagation +--- + +In this example, we will show how create a subsystem of services that can be +composed together to create a more complex system and leverage bidirectional +communication between services to propagate information. + +In particular, we will create an `Agent` service that will have several +tasks running concurrently sampling data from different tasks. The `Agent` +service will publish the data from the tasks at fixed rate to a `/sample` path +that will be subscribed by a `Supervisor` service. + +At the same time, the `Agent` service will listen to a `/update_residual` path +to receive the residual value from another the `Supervisor` service. + +```python +class AgentServer: + def __init__(self, event_service: EventServiceGrpc) -> None: + """Initialize the service. + Args: + event_service: The event service to use for communication. + """ + self._event_service = event_service + self._event_service.add_request_reply_handler(self.request_reply_handler) + + args: dict[str, float] = {} + for arg in self._event_service.config.args: + key, value = arg.split("=") + args[key] = value + + # the rate in hertz to send commands + self._rate = float(args["rate"]) + self._num_tasks = int(args["num_tasks"]) + + self._remainder: int = 1e6 + + async def request_reply_handler(self, event: Event, message) -> None: + """The callback for handling request/reply messages.""" + if event.uri.path == "/update_residual": + self._remainder = message.value + self._event_service.logger.info(f"Remainder: {self._remainder}") + + return Empty() + + async def run_task(self, task_id: int) -> None: + """Run the main task.""" + while True: + if self._remainder <= 0: + await asyncio.sleep(0.01) + continue + + message = Struct() + message["sample"] = random.random() + message["task_id"] = task_id + + await self._event_service.publish("/sample", message) + await asyncio.sleep(1.0 / self._rate) + print(f"Published sample {message['sample']} from task {task_id}") + + async def serve(self) -> None: + """Run the service.""" + tasks: list[asyncio.Task] = [asyncio.create_task(self.run_task(i)) for i in range(self._num_tasks)] + await asyncio.gather(self._event_service.serve(), *tasks) +``` + +In addition, we will create a `SupervisorServer` service that will be subscribed to the `/sample` +path of the `Agent` service. The `SupervisorServer` will listen to the samples and send a request +update to the `StorageServer` service when the sample is above a certain confidence threshold. + +```python +class SupervisorServer: + def __init__(self, event_service: EventServiceGrpc, config_list: EventServiceConfigList) -> None: + """Initialize the service. + Args: + event_service: The event service to use for communication. + """ + self._event_service = event_service + + self._clients: dict[str, EventClient] = { + config.name: EventClient(config) + for config in config_list.configs + if config.name != event_service.config.name + } + + args: dict[str, float] = {} + for arg in self._event_service.config.args: + key, value = arg.split("=") + args[key] = value + + # the rate in hertz to send commands + self._confidence = float(args["confidence"]) + + async def subscribe(self, subscripton) -> None: + """Run the main task.""" + # create the event client + service_name = subscripton.uri.query.split("=")[-1] + client = self._clients[service_name] + + async for event, message in client.subscribe(subscripton, decode=True): + if message["sample"] > self._confidence: + residual = await self._clients["storage"].request_reply("/update_storage", Empty(), decode=True) + self._event_service.logger.info(f"Residual: {residual}") + await client.request_reply("/update_residual", residual) + + async def serve(self) -> None: + """Run the service.""" + tasks: list[asyncio.Task] = [] + for subscription in self._event_service.config.subscriptions: + tasks.append(asyncio.create_task(self.subscribe(subscription))) + await asyncio.gather(self._event_service.serve(), *tasks) +``` + +The last service we will create is a `StorageServer` service that will be listening +to the `/update_storage` path. The `StorageServer` will keep track of the storage +capacity and will remove from storage at fixed rate. + +```python +class StorageServer: + def __init__(self, event_service: EventServiceGrpc) -> None: + """Initialize the service. + Args: + event_service: The event service to use for communication. + """ + self._event_service = event_service + self._event_service.add_request_reply_handler(self.request_reply_handler) + + self._storage: int = 0 + + args: dict[str, float] = {} + for arg in self._event_service.config.args: + key, value = arg.split("=") + args[key] = value + + # the maximum storage capacity + self._max_storage = int(args["max_storage"]) + + # the batch size to remove from storage + self._batch_size = int(args["batch_size"]) + + async def request_reply_handler(self, event: Event, message: Empty) -> None: + """The callback for handling request/reply messages.""" + if event.uri.path == "/update_storage": + self._storage += 1 + residual: int = self._max_storage - self._storage + self._event_service.logger.info( + f"Storage: {self._storage}/{self._max_storage} ({residual} remaining)") + return Int32Value(value=residual) + + return Empty() + + async def remove_from_storage(self) -> None: + """Remove from storage.""" + while True: + + if self._storage < self._batch_size: + await asyncio.sleep(0.05) + continue + + self._storage -= self._batch_size + + self._event_service.logger.info(f"Removed from storage: {self._batch_size}/{self._max_storage}") + + await asyncio.sleep(0.1) + + async def serve(self) -> None: + tasks: list[asyncio.Task] = [ + asyncio.create_task(self._event_service.serve()), + asyncio.create_task(self.remove_from_storage()), + ] + await asyncio.gather(*tasks) +``` + +### Requirements + +This example only requires the farm-ng-core package. + +```bash +pip3 install farm-ng-core +``` + +:::tip +We highgly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html), +[**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/) and +[**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). +::: + +### Service configuration + +The service configuration is defined in the `service_config.json` file which describes how each service + instance is connected to each other and how they are configured. As an example, the `agent-1` service +is configured as follows: + +```json +{ + "name": "agent-1", + "args": [ + "rate=1.0", + "num_tasks=3" + ], + "subscriptions": [ + { + "uri": "event://supervisor-1/sample?service=agent-1" + } + ] +} +``` + +## Run the example + +### 1. Run the storage service + +```bash +python storage.py --service-config service_config.json --service-name storage +``` + +### 2. Run the supervisor services + +```bash +python supervisor.py --service-config service_config.json --service-name supervisor-1 +python supervisor.py --service-config service_config.json --service-name supervisor-2 +``` + +### 3. Run the agent services + +```bash +python agent.py --service-config service_config.json --service-name agent-1 +python agent.py --service-config service_config.json --service-name agent-2 +python agent.py --service-config service_config.json --service-name agent-3 +``` + +### Expected output + +In the `storage` service terminal, you should see the capacity of the storage +which is decreasing as the `agent` services are publishing samples. As soon as +the storage capacity is full, the `agent` services will stop publishing samples +until the `supervisor` services request the `agent` services to update the +residual value. + +```bash +Storage: 37/500 (463 remaining) +Storage: 38/500 (462 remaining) +Storage: 39/500 (461 remaining) +Storage: 40/500 (460 remaining) +Storage: 41/500 (459 remaining) +Storage: 42/500 (458 remaining) +Storage: 43/500 (457 remaining) +Storage: 44/500 (456 remaining) +Storage: 45/500 (455 remaining) +Storage: 46/500 (454 remaining) +Storage: 47/500 (453 remaining) +Storage: 48/500 (452 remaining) +``` + +In the `supervisor` service terminal, you should see the residual value +decreasing as the `agent` services are publishing samples. As soon as the +residual value is zero, the `supervisor` services will request the `agent` +services to update the residual value. + +```bash +INFO:supervisor-1:Residual: value: 455 + +Residual: value: 454 + +INFO:supervisor-1:Residual: value: 454 + +Residual: value: 453 + +INFO:supervisor-1:Residual: value: 453 + +Residual: value: 452 +``` + +Finally, in the `agent` service terminal, you should see the samples being +published at fixed rate. As soon as the residual value is zero, the `agent` +services will stop publishing samples until the `supervisor` services request +the `agent` services to update the residual value. + +```bash +Published sample 0.7213814924044664 from task 1 +Published sample 0.19269278385883515 from task 2 +Published sample 0.41566478402029516 from task 3 +Published sample 0.6616156115367178 from task 4 +Published sample 0.699833473381781 from task 5 +Published sample 0.1892017291616711 from task 6 +Published sample 0.6031618279156414 from task 7 +Published sample 0.6306753264890216 from task 8 +Published sample 0.9169593227439662 from task 9 +``` From e45cffb5c79c56d46312cd5c47fbdf16dce92c64 Mon Sep 17 00:00:00 2001 From: Kyle Coble Date: Tue, 26 Sep 2023 18:43:18 -0400 Subject: [PATCH 10/14] Update motor state stream example --- .../docs/examples/motor_state/motor-state.md | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/website/docs/examples/motor_state/motor-state.md b/website/docs/examples/motor_state/motor-state.md index 1f8d89f1b..9cad4a8ee 100644 --- a/website/docs/examples/motor_state/motor-state.md +++ b/website/docs/examples/motor_state/motor-state.md @@ -3,10 +3,15 @@ id: motor-state title: Motor State --- -# Amiga Motor State Stream +# Amiga Motor State Stream Example -Currently this is a very basic example showing how to access and -decode the `MotorState` values streamed by the canbus service. +Currently this is a very basic example showing how to access and decode the +`MotorState` values streamed by the canbus service. + +:::info +There will be no `/motor_states` stream if your amiga is e-stopped by a physical e-stop press. +The e-stop cuts the power to the motors, so they do not send their state on the CAN bus. +::: ## Setup @@ -27,14 +32,14 @@ pip install -r requirements.txt ## Run example -Specify the file (download before) +Specify the `host` field with the IP address of your amiga +in the `service_config.json` file. +As a debugging step, ensure you can ping the amiga at that IP address. ```bash -python main.py --canbus-port 50060 # --address -# +python main.py --service-config service_config.json ``` ## Expected output -You should see a printed stream of the current `MotorState` for -all detected motors in your terminal. +You should see a printed stream of the current `MotorState` for all detected motors in your terminal. From c5951e614c65a3d7f21c2d678654420497978846 Mon Sep 17 00:00:00 2001 From: Kyle Coble Date: Tue, 26 Sep 2023 18:44:57 -0400 Subject: [PATCH 11/14] Add vehicle twist example --- website/docs/examples/vehicle_twist/README.md | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 website/docs/examples/vehicle_twist/README.md diff --git a/website/docs/examples/vehicle_twist/README.md b/website/docs/examples/vehicle_twist/README.md new file mode 100644 index 000000000..9b1624593 --- /dev/null +++ b/website/docs/examples/vehicle_twist/README.md @@ -0,0 +1,65 @@ +--- +id: vehicle-twist +title: Vehicle Twist +--- + +# Amiga Vehicle Twist example + +Currently this is a very basic example showing how to send `Twist2d` proto messages +to the canbus service to control the amiga. + +## Setup + +Create first a virtual environment + +```bash +cd farm-ng-amiga +python3 -m venv venv +source venv/bin/activate +``` + +## Install + +```bash +cd py/examples/vehicle_twist +pip install -r requirements.txt +``` + +## Run example + +Specify the `host` field with the IP address of your amiga +in the `service_config.json` file. +As a debugging step, ensure you can ping the amiga at that IP address. + +```bash +python main.py --service-config service_config.json +``` + +:::warning +WARNING: When the dashboard is in auto mode, this will cause the Amiga to drive. +Make sure the area is clear before using this. + +You can also test this by sending the commands when the Amiga dashboard is +not in `AUTO READY` or `AUTO ACTIVE` and see the commands being sent +with the red needle on the auto page. +::: + +### Drive the amiga + +Use the `WASD` keys to drive the amiga in the window that pops up after when run the script. + +`w` & `s` will increment the linear velocity up / down respectively. + +`a` & `d` will increment the angular velocity left / right respectively. + +:::caution +The commanded speed will persist! +::: + +Use space bar to set all velocities back to 0. + +Use `q` to quit the application. + +## Expected output + +You should see a printed stream of the `Twist2d` messages you are sending to the canbus service. From 2be6ace001ea4beb41b4b941af77989c6d768e3f Mon Sep 17 00:00:00 2001 From: Kyle Coble Date: Tue, 26 Sep 2023 18:52:32 -0400 Subject: [PATCH 12/14] Formatting & some typos --- .../examples/camera_calibration/README.md | 16 ++++++--- website/docs/examples/camera_client/README.md | 7 ++-- .../docs/examples/camera_pointcloud/README.md | 34 ++++++++++++------- .../docs/examples/camera_settings/README.md | 21 ++++++++---- .../docs/examples/file_converter/README.md | 8 +++-- website/docs/examples/file_reader/README.md | 3 +- .../docs/examples/service_client/README.md | 10 +++--- .../docs/examples/service_counter/README.md | 13 ++++--- .../examples/service_propagation/README.md | 2 +- 9 files changed, 73 insertions(+), 41 deletions(-) diff --git a/website/docs/examples/camera_calibration/README.md b/website/docs/examples/camera_calibration/README.md index 696879531..d99c25a88 100644 --- a/website/docs/examples/camera_calibration/README.md +++ b/website/docs/examples/camera_calibration/README.md @@ -52,7 +52,7 @@ By default, the camera address is assumed top be `localhost`. ```bash # usage: amiga-camera-calibration [-h] --service-config SERVICE_CONFIG -# +# # optional arguments: # -h, --help show this help message and exit # --service-config SERVICE_CONFIG @@ -61,9 +61,14 @@ By default, the camera address is assumed top be `localhost`. ### 6. Code overview -In this example we use the `EventClient` with the `request_rreply` method to receive the camera camera calibration. The `request_reply` method is a coroutine that returns a `Future` object. The `Future` object is used to retrieve the result of the request. +In this example we use the `EventClient` with the `request_reply` +method to receive the camera camera calibration. +The `request_reply` method is a coroutine that returns a `Future` object. +The `Future` object is used to retrieve the result of the request. -The path to the calibration service is `/calibration` and the request message is `Empty`. The response message is `OakCalibration`, which is automatically decoded by the `request_reply` method using the `decode=True` argument. +The path to the calibration service is `/calibration` and the request message is `Empty`. +The response message is `OakCalibration`, which is automatically decoded by the `request_reply` +method using the `decode=True` argument. ```python async def main(service_config_path: Path) -> None: @@ -76,7 +81,8 @@ async def main(service_config_path: Path) -> None: config: EventServiceConfig = proto_from_json_file(service_config_path, EventServiceConfig()) # get the calibration message - calibration: oak_pb2.OakCalibration = await EventClient(config).request_reply("/calibration", Empty(), decode=True) + calibration: oak_pb2.OakCalibration = + await EventClient(config).request_reply("/calibration", Empty(), decode=True) print(calibration) @@ -89,6 +95,6 @@ if __name__ == "__main__": ``` :::tip -We highgly recommend to have some basic knowledge about +We highly recommend to have some basic knowledge about [**`asyncio`**](https://docs.python.org/3/library/asyncio.html). ::: diff --git a/website/docs/examples/camera_client/README.md b/website/docs/examples/camera_client/README.md index 006b8c3ce..f661000e4 100644 --- a/website/docs/examples/camera_client/README.md +++ b/website/docs/examples/camera_client/README.md @@ -56,14 +56,15 @@ the `WifiClient` (coming soon) python3 main.py --help # usage: amiga-camera-stream [-h] --service-config SERVICE_CONFIG -# +# # optional arguments: # -h, --help show this help message and exit # --service-config SERVICE_CONFIG # The camera config. ``` -To customize the run, you need to update the `service_config.json` by modifying the `host` and `port` fields. +To customize the run, you need to update the `service_config.json` +by modifying the `host` and `port` fields. ### 5. Code overview @@ -105,6 +106,6 @@ if __name__ == "__main__": ``` :::tip -We highgly recommend to have some basic knowledge about +We highly recommend to have some basic knowledge about [**`asyncio`**](https://docs.python.org/3/library/asyncio.html). ::: diff --git a/website/docs/examples/camera_pointcloud/README.md b/website/docs/examples/camera_pointcloud/README.md index 576f9889c..613418e5d 100644 --- a/website/docs/examples/camera_pointcloud/README.md +++ b/website/docs/examples/camera_pointcloud/README.md @@ -48,7 +48,7 @@ python3 main.py --service-config service_config.json ```bash # usage: amiga-camera-pointcloud [-h] --service-config SERVICE_CONFIG [--save-disparity] [--save-pointcloud] -# +# # optional arguments: # -h, --help show this help message and exit # --service-config SERVICE_CONFIG @@ -59,11 +59,14 @@ python3 main.py --service-config service_config.json ### 6. Code overview -In this example we use +In this example we use -In this example we get the camera calibration from the camera service that jointly with the `disparity` image we will be used to generate the `pointcloud`. +In this example we get the camera calibration from the camera service that jointly with +the `disparity` image we will be used to generate the `pointcloud`. -First, we use the `EventClient` to request the camera calibration from the camera service. The camera calibration is a `oak_pb2.CameraCalibration` message that contains the camera intrinsics and extrinsics. +First, we use the `EventClient` to request the camera calibration from the camera service. +The camera calibration is a `oak_pb2.CameraCalibration` message that +contains the camera intrinsics and extrinsics. ```python # create a client to the camera service @@ -72,7 +75,8 @@ config: EventServiceConfig = proto_from_json_file(args.service_config, EventServ camera_client = EventClient(config) # get the calibration message -calibration_proto: oak_pb2.OakCalibration = await camera_client.request_reply("/calibration", Empty(), decode=True) +calibration_proto: oak_pb2.OakCalibration = + await camera_client.request_reply("/calibration", Empty(), decode=True) # NOTE: The OakCalibration message contains the camera calibration data for all the cameras. # Since we are interested in the disparity image, we will use the calibration data for the right camera @@ -83,8 +87,9 @@ camera_data: oak_pb2.CameraData = calibration_proto.camera_data[0] camera_matrix: Tensor = get_camera_matrix(camera_data) ``` -Below is the code to compute the camera matrix from the calibration data. Notice that we cast the `intrinsic_matrix` to a `Tensor` and reshape it to a 3x3 matrix for later easy integration with the -kornia library. +Below is the code to compute the camera matrix from the calibration data. +Notice that we cast the `intrinsic_matrix` to a `Tensor` and reshape it to +a 3x3 matrix for later easy integration with the kornia library. ```python def get_camera_matrix(camera_data: oak_pb2.CameraData) -> Tensor: @@ -104,9 +109,12 @@ def get_camera_matrix(camera_data: oak_pb2.CameraData) -> Tensor: return tensor([[fx, 0, cx], [0, fy, cy], [0, 0, 1]]) ``` -Next, we use the `EventClient` to subsribe to the `disparity` path from the camera service. The `disparity` image is a `oak_pb2.OakImage` message that contains the `disparity` image data. -To compute the `pointcloud` we first need to decode the `disparity` image data to a `Tensor` and then compute the `pointcloud` from the `disparity` image using the kornia method `depth_from_disparity` -and `depth_to_3d_v2`. +Next, we use the `EventClient` to subsribe to the `disparity` path from the camera service. +The `disparity` image is a `oak_pb2.OakImage` message that contains the `disparity` image data. + +To compute the `pointcloud` we first need to decode the `disparity` image data to a `Tensor` +and then compute the `pointcloud` from the `disparity` image +using the kornia method `depth_from_disparity` and `depth_to_3d_v2`. ```python async for event, message in camera_client.subscribe( @@ -132,6 +140,7 @@ async for event, message in camera_client.subscribe( points_xyz = points_xyz[valid_mask].reshape(-1, 3) # Nx3 ``` + Below is the code to decode the `disparity` image data to a `Tensor`. ```python @@ -154,9 +163,10 @@ def decode_disparity(message: oak_pb2.OakFrame, decoder: ImageDecoder) -> Tensor return disparity_t[..., 0].float() # HxW ``` -Additionally, we can save the `disparity` image and the `pointcloud` to disk by using the `--save-disparity` and `--save-pointcloud` flags respectively. +Additionally, we can save the `disparity` image and the `pointcloud` to disk by +using the `--save-disparity` and `--save-pointcloud` flags respectively. :::tip -We highgly recommend to have some basic knowledge about +We highly recommend to have some basic knowledge about [**`asyncio`**](https://docs.python.org/3/library/asyncio.html). ::: diff --git a/website/docs/examples/camera_settings/README.md b/website/docs/examples/camera_settings/README.md index 9838667ad..d3779479b 100644 --- a/website/docs/examples/camera_settings/README.md +++ b/website/docs/examples/camera_settings/README.md @@ -47,8 +47,9 @@ python3 main.py --service-config service_config.json --camera-settings camera_se ### 5. Customize run ```bash -# usage: amiga-camera-settings [-h] --service-config SERVICE_CONFIG [--camera-settings CAMERA_SETTINGS] [--stream-name {rgb,mono}] -# +# usage: amiga-camera-settings [-h] --service-config SERVICE_CONFIG +# [--camera-settings CAMERA_SETTINGS] [--stream-name {rgb,mono}] +# # optional arguments: # -h, --help show this help message and exit # --service-config SERVICE_CONFIG @@ -61,13 +62,19 @@ python3 main.py --service-config service_config.json --camera-settings camera_se ### 6. Code overview -In this example we use the `EventClient` with the `request_reply` method to receive and set the camera settings. The `request_reply` method is a coroutine that returns a `Future` object. The `Future` object is used to retrieve the result of the request. +In this example we use the `EventClient` with the `request_reply` method to receive and +set the camera settings. The `request_reply` method is a coroutine that returns a `Future` object. +The `Future` object is used to retrieve the result of the request. -To get the settings, the path to the camera service is `/camera_settings/` and the request message is `Empty`. The reply message is `oak_pb2.CameraSettings` and contains the current camera settings. +To get the settings, the path to the camera service is +`/camera_settings/` and the request message is `Empty`. +The reply message is `oak_pb2.CameraSettings` and contains the current camera settings. -To set the settings, the path to the camera service is `/camera_settings/` and the request message is `oak_pb2.CameraSettings`. +To set the settings, the path to the camera service is `/camera_settings/` +and the request message is `oak_pb2.CameraSettings`. -You can explore the files `service_config.json` to customize the camera service configuration and `camera_settings.json` to customize the camera settings. +You can explore the files `service_config.json` to customize the camera service configuration +and `camera_settings.json` to customize the camera settings. ```python async def main(service_config_path: Path, settings_config_path: Path, stream_name: str) -> None: @@ -115,6 +122,6 @@ if __name__ == "__main__": ``` :::tip -We highgly recommend to have some basic knowledge about +We highly recommend to have some basic knowledge about [**`asyncio`**](https://docs.python.org/3/library/asyncio.html). ::: diff --git a/website/docs/examples/file_converter/README.md b/website/docs/examples/file_converter/README.md index eda1be8c5..02e8b8ded 100644 --- a/website/docs/examples/file_converter/README.md +++ b/website/docs/examples/file_converter/README.md @@ -48,10 +48,11 @@ python main.py --file-name events_09162022160753_000000.bin Use the `--help` flag to see all possible arguments for using this tool. ```bash -# usage: Event file converter example. [-h] --file-name FILE_NAME [--output-path OUTPUT_PATH] [--camera-name CAMERA_NAME] +# usage: Event file converter example. [-h] --file-name FILE_NAME +# [--output-path OUTPUT_PATH] [--camera-name CAMERA_NAME] # [--view-name {rgb,left,right,disparity}] [--disparity-scale DISPARITY_SCALE] # [--video-to-jpg] -# +# # optional arguments: # -h, --help show this help message and exit # --file-name FILE_NAME @@ -64,7 +65,8 @@ python main.py --file-name events_09162022160753_000000.bin # The name of the camera view to visualize. Default: rbg. # --disparity-scale DISPARITY_SCALE # Scale for amplifying disparity color mapping. Default: 1. -# --video-to-jpg Use this flag to convert video .bin files to a series of jpg images. Default is mp4. +# --video-to-jpg Use this flag to convert video .bin files to a series of jpg images. +# Default is mp4. ``` For instance, you can change the camera that is played back from the default of `oak0`. E.g., diff --git a/website/docs/examples/file_reader/README.md b/website/docs/examples/file_reader/README.md index 7ece4d21d..55cc33ca0 100644 --- a/website/docs/examples/file_reader/README.md +++ b/website/docs/examples/file_reader/README.md @@ -75,7 +75,8 @@ Optionally, you can change the camera that is played back from the default of `oak0`. E.g., and the view `rgb`. E.g. ```bash -python main.py --file-name /events_12052022115852.bin --camera-name oak1 --view-name rgb +python main.py --file-name /events_12052022115852.bin \ + --camera-name oak1 --view-name rgb ``` Congratulations two videos should now pop up and play! One should diff --git a/website/docs/examples/service_client/README.md b/website/docs/examples/service_client/README.md index 0c9b52322..d84aa01a4 100644 --- a/website/docs/examples/service_client/README.md +++ b/website/docs/examples/service_client/README.md @@ -1,5 +1,5 @@ --- -id: service-client +id: service-client title: Service Client --- @@ -21,12 +21,14 @@ This example only requires the farm-ng-core package. pip3 install farm-ng-core ``` + :::tip -We highgly recommend to have some basic knowledge about -[**`asyncio`**](https://docs.python.org/3/library/asyncio.html), [**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/) and [**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). +We highly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html), +[**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/), +and [**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). ::: - ### Define your protobuf messages The farm-ng-core Event Service uses protobuf messages and leverages diff --git a/website/docs/examples/service_counter/README.md b/website/docs/examples/service_counter/README.md index 694489ee4..beee1f914 100644 --- a/website/docs/examples/service_counter/README.md +++ b/website/docs/examples/service_counter/README.md @@ -1,5 +1,5 @@ --- -id: service-counter +id: service-counter title: Service Counter --- @@ -20,9 +20,12 @@ This example only requires the farm-ng-core package. pip3 install farm-ng-core ``` + :::tip -We highgly recommend to have some basic knowledge about -[**`asyncio`**](https://docs.python.org/3/library/asyncio.html), [**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/) and [**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). +We highly recommend to have some basic knowledge about +[**`asyncio`**](https://docs.python.org/3/library/asyncio.html), +[**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/), +and [**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). ::: ### Create the service @@ -65,7 +68,7 @@ class CounterServer: await self._event_service.publish("/counter", Int32Value(value=self._counter)) self._counter += 1 await asyncio.sleep(1.0 / self._rate) - + async def serve(self) -> None: await asyncio.gather(self._event_service.serve(), self.run()) ``` @@ -160,7 +163,7 @@ python client.py --service-config service_config.json reset you should see the following output: ```bash -Received message: +Received message: Received message: value: 1 Received message: value: 2 diff --git a/website/docs/examples/service_propagation/README.md b/website/docs/examples/service_propagation/README.md index afa02bdc0..3211d7a17 100644 --- a/website/docs/examples/service_propagation/README.md +++ b/website/docs/examples/service_propagation/README.md @@ -181,7 +181,7 @@ pip3 install farm-ng-core ``` :::tip -We highgly recommend to have some basic knowledge about +We highly recommend to have some basic knowledge about [**`asyncio`**](https://docs.python.org/3/library/asyncio.html), [**`gRPC`**](https://grpc.io/docs/what-is-grpc/introduction/) and [**`protobuf`**](https://developers.google.com/protocol-buffers/docs/pythontutorial). From a0ad62c861faf248b957331bc5820d7b6daea890 Mon Sep 17 00:00:00 2001 From: Kyle Coble Date: Tue, 26 Sep 2023 19:30:47 -0400 Subject: [PATCH 13/14] Rename so examples are all README (helps w/ links) --- website/docs/examples/motor_state/{motor-state.md => README.md} | 0 website/docs/examples/vnc_viewer/{vnc_viewer.md => README.md} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename website/docs/examples/motor_state/{motor-state.md => README.md} (100%) rename website/docs/examples/vnc_viewer/{vnc_viewer.md => README.md} (100%) diff --git a/website/docs/examples/motor_state/motor-state.md b/website/docs/examples/motor_state/README.md similarity index 100% rename from website/docs/examples/motor_state/motor-state.md rename to website/docs/examples/motor_state/README.md diff --git a/website/docs/examples/vnc_viewer/vnc_viewer.md b/website/docs/examples/vnc_viewer/README.md similarity index 100% rename from website/docs/examples/vnc_viewer/vnc_viewer.md rename to website/docs/examples/vnc_viewer/README.md From 8533d56b642ac9a195a971213f2ad4cc93fb3e3f Mon Sep 17 00:00:00 2001 From: Kyle Coble Date: Tue, 26 Sep 2023 19:31:02 -0400 Subject: [PATCH 14/14] Update examples sidebar --- website/sidebars.js | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/website/sidebars.js b/website/sidebars.js index 1dc9a85d1..848e0b849 100644 --- a/website/sidebars.js +++ b/website/sidebars.js @@ -134,13 +134,18 @@ const sidebars = { "Brain Examples" : [ "examples/import_log_file/import-log-file", "examples/file_reader/file-reader", - "examples/file_converter/file-converter", "examples/file_reader_can/file-reader-can", + "examples/file_converter/file-converter", "examples/camera_client/camera-client", "examples/camera_calibration/camera-calibration", - "examples/people_detection/people-detection", + "examples/camera_settings/camera-settings", + "examples/camera_pointcloud/camera-pointcloud", "examples/motor_state/motor-state", - "examples/vnc_viewer/vnc-viewer" + "examples/vehicle_twist/vehicle-twist", + "examples/vnc_viewer/vnc-viewer", + "examples/service_client/service-client", + "examples/service_counter/service-counter", + "examples/service_propagation/service-propagation", ], "Brain App Tutorials" : { "00 - Tutorial Introduction" :[