From cde438debbffebec3729266b344d22b5f63de7be Mon Sep 17 00:00:00 2001
From: Naomi Pentrel <5212232+npentrel@users.noreply.github.com>
Date: Wed, 11 Dec 2024 17:34:14 +0100
Subject: [PATCH] Updates to IA and styling (#3754)
---
assets/scss/_styles_project.scss | 2 +-
docs/data-ai/data/alert-on-data.md | 9 +
docs/data-ai/reference/_index.md | 1 -
docs/dev/reference/_index.md | 1 -
.../{tutorials/_index.md => tutorials.md} | 0
docs/dev/tools/tutorials/configure/_index.md | 10 -
.../tutorials/configure/build-a-mock-robot.md | 500 --------
.../tutorials/configure/configure-rover.md | 497 --------
.../tutorials/configure/pet-photographer.md | 1009 -----------------
docs/dev/tools/tutorials/control/_index.md | 10 -
.../tutorials/control/air-quality-fleet.md | 823 --------------
.../tools/tutorials/control/flutter-app.md | 790 -------------
docs/dev/tools/tutorials/control/gamepad.md | 236 ----
docs/dev/tools/tutorials/custom/_index.md | 10 -
.../controlling-an-intermode-rover-canbus.md | 352 ------
.../tools/tutorials/custom/custom-base-dog.md | 522 ---------
.../dev/tools/tutorials/get-started/_index.md | 10 -
.../tutorials/get-started/blink-an-led.md | 509 ---------
.../tutorials/get-started/confetti-bot.md | 25 -
.../tools/tutorials/get-started/lazy-susan.md | 23 -
.../tutorials/get-started/servo-mousemover.md | 301 -----
docs/dev/tools/tutorials/projects/_index.md | 10 -
.../tutorials/projects/air-filtration.md | 20 -
.../tutorials/projects/bedtime-songs-bot.md | 36 -
.../projects/build-an-outdoor-rover.md | 23 -
.../dev/tools/tutorials/projects/claw-game.md | 950 ----------------
docs/dev/tools/tutorials/projects/envvar.md | 23 -
.../tutorials/projects/foam-dart-launcher.md | 25 -
docs/dev/tools/tutorials/projects/guardian.md | 22 -
docs/dev/tools/tutorials/projects/helmet.md | 468 --------
.../projects/integrating-viam-with-openai.md | 427 -------
docs/dev/tools/tutorials/projects/light-up.md | 22 -
.../projects/make-a-plant-watering-robot.md | 398 -------
.../projects/modernize-retro-robot.md | 22 -
.../tutorials/projects/pet-treat-dispenser.md | 20 -
.../tools/tutorials/projects/plant-water.md | 23 -
docs/dev/tools/tutorials/projects/postman.md | 21 -
.../tutorials/projects/rover-typescript.md | 21 -
.../tutorials/projects/send-security-photo.md | 318 ------
docs/dev/tools/tutorials/projects/tipsy.md | 23 -
.../tutorials/projects/verification-system.md | 347 ------
docs/dev/tools/tutorials/services/_index.md | 10 -
.../services/color-detection-scuttle.md | 409 -------
.../tutorials/services/constrain-motion.md | 519 ---------
.../services/navigate-with-rover-base.md | 702 ------------
.../services/plan-motion-with-arm-gripper.md | 685 -----------
.../services/visualize-data-grafana.md | 238 ----
.../services/webcam-line-follower-robot.md | 644 -----------
docs/dev/tools/tutorials/template.md | 78 --
docs/manage/_index.md | 2 +-
docs/manage/fleet/reuse-configuration.md | 6 +-
docs/manage/manage/_index.md | 4 +-
docs/manage/reference/_index.md | 1 -
docs/manage/reference/billing.md | 2 +-
docs/manage/{manage => reference}/organize.md | 6 +-
.../manage/troubleshoot/teleoperate-custom.md | 77 ++
docs/manage/troubleshoot/teleoperate.md | 68 +-
docs/operate/control/control-logic.md | 9 +
.../control/{desktop-app.md => kiosk-app.md} | 4 +-
docs/operate/get-started/_index.md | 2 +-
docs/operate/get-started/reuse-config.md | 4 +-
docs/operate/get-started/setup.md | 4 +-
.../get-started/software-capabilities.md | 14 +
.../get-started/system-settings.md} | 6 +-
docs/operate/reference/_index.md | 1 -
65 files changed, 131 insertions(+), 12223 deletions(-)
create mode 100644 docs/data-ai/data/alert-on-data.md
rename docs/dev/tools/{tutorials/_index.md => tutorials.md} (100%)
delete mode 100644 docs/dev/tools/tutorials/configure/_index.md
delete mode 100644 docs/dev/tools/tutorials/configure/build-a-mock-robot.md
delete mode 100644 docs/dev/tools/tutorials/configure/configure-rover.md
delete mode 100644 docs/dev/tools/tutorials/configure/pet-photographer.md
delete mode 100644 docs/dev/tools/tutorials/control/_index.md
delete mode 100644 docs/dev/tools/tutorials/control/air-quality-fleet.md
delete mode 100644 docs/dev/tools/tutorials/control/flutter-app.md
delete mode 100644 docs/dev/tools/tutorials/control/gamepad.md
delete mode 100644 docs/dev/tools/tutorials/custom/_index.md
delete mode 100644 docs/dev/tools/tutorials/custom/controlling-an-intermode-rover-canbus.md
delete mode 100644 docs/dev/tools/tutorials/custom/custom-base-dog.md
delete mode 100644 docs/dev/tools/tutorials/get-started/_index.md
delete mode 100644 docs/dev/tools/tutorials/get-started/blink-an-led.md
delete mode 100644 docs/dev/tools/tutorials/get-started/confetti-bot.md
delete mode 100644 docs/dev/tools/tutorials/get-started/lazy-susan.md
delete mode 100644 docs/dev/tools/tutorials/get-started/servo-mousemover.md
delete mode 100644 docs/dev/tools/tutorials/projects/_index.md
delete mode 100644 docs/dev/tools/tutorials/projects/air-filtration.md
delete mode 100644 docs/dev/tools/tutorials/projects/bedtime-songs-bot.md
delete mode 100644 docs/dev/tools/tutorials/projects/build-an-outdoor-rover.md
delete mode 100644 docs/dev/tools/tutorials/projects/claw-game.md
delete mode 100644 docs/dev/tools/tutorials/projects/envvar.md
delete mode 100644 docs/dev/tools/tutorials/projects/foam-dart-launcher.md
delete mode 100644 docs/dev/tools/tutorials/projects/guardian.md
delete mode 100644 docs/dev/tools/tutorials/projects/helmet.md
delete mode 100644 docs/dev/tools/tutorials/projects/integrating-viam-with-openai.md
delete mode 100644 docs/dev/tools/tutorials/projects/light-up.md
delete mode 100644 docs/dev/tools/tutorials/projects/make-a-plant-watering-robot.md
delete mode 100644 docs/dev/tools/tutorials/projects/modernize-retro-robot.md
delete mode 100644 docs/dev/tools/tutorials/projects/pet-treat-dispenser.md
delete mode 100644 docs/dev/tools/tutorials/projects/plant-water.md
delete mode 100644 docs/dev/tools/tutorials/projects/postman.md
delete mode 100644 docs/dev/tools/tutorials/projects/rover-typescript.md
delete mode 100644 docs/dev/tools/tutorials/projects/send-security-photo.md
delete mode 100644 docs/dev/tools/tutorials/projects/tipsy.md
delete mode 100644 docs/dev/tools/tutorials/projects/verification-system.md
delete mode 100644 docs/dev/tools/tutorials/services/_index.md
delete mode 100644 docs/dev/tools/tutorials/services/color-detection-scuttle.md
delete mode 100644 docs/dev/tools/tutorials/services/constrain-motion.md
delete mode 100644 docs/dev/tools/tutorials/services/navigate-with-rover-base.md
delete mode 100644 docs/dev/tools/tutorials/services/plan-motion-with-arm-gripper.md
delete mode 100644 docs/dev/tools/tutorials/services/visualize-data-grafana.md
delete mode 100644 docs/dev/tools/tutorials/services/webcam-line-follower-robot.md
delete mode 100644 docs/dev/tools/tutorials/template.md
rename docs/manage/{manage => reference}/organize.md (97%)
create mode 100644 docs/manage/troubleshoot/teleoperate-custom.md
create mode 100644 docs/operate/control/control-logic.md
rename docs/operate/control/{desktop-app.md => kiosk-app.md} (52%)
rename docs/{manage/fleet/setup.md => operate/get-started/system-settings.md} (66%)
diff --git a/assets/scss/_styles_project.scss b/assets/scss/_styles_project.scss
index 2349d87585..6bd048dbe2 100644
--- a/assets/scss/_styles_project.scss
+++ b/assets/scss/_styles_project.scss
@@ -2654,7 +2654,7 @@ span.section-overview {
display: block !important;
}
- li.nav-fold.open-on-desktop.header-only > span > span {
+ li.nav-fold.header-only > span > span {
text-transform: uppercase;
font-family: Space Mono,sans-serif;
color: #aaa;
diff --git a/docs/data-ai/data/alert-on-data.md b/docs/data-ai/data/alert-on-data.md
new file mode 100644
index 0000000000..ca2cb46311
--- /dev/null
+++ b/docs/data-ai/data/alert-on-data.md
@@ -0,0 +1,9 @@
+---
+linkTitle: "Alert on data"
+title: "Alert on data"
+weight: 35
+layout: "docs"
+type: "docs"
+no_list: true
+description: "TODO"
+---
diff --git a/docs/data-ai/reference/_index.md b/docs/data-ai/reference/_index.md
index b0d7057a18..326adf53af 100644
--- a/docs/data-ai/reference/_index.md
+++ b/docs/data-ai/reference/_index.md
@@ -5,6 +5,5 @@ weight: 500
layout: "empty"
type: "docs"
empty_node: true
-open_on_desktop: true
header_only: true
---
diff --git a/docs/dev/reference/_index.md b/docs/dev/reference/_index.md
index 8b203fb9c0..b64efc3df2 100644
--- a/docs/dev/reference/_index.md
+++ b/docs/dev/reference/_index.md
@@ -5,6 +5,5 @@ weight: 300
layout: "empty"
type: "docs"
empty_node: true
-open_on_desktop: true
header_only: true
---
diff --git a/docs/dev/tools/tutorials/_index.md b/docs/dev/tools/tutorials.md
similarity index 100%
rename from docs/dev/tools/tutorials/_index.md
rename to docs/dev/tools/tutorials.md
diff --git a/docs/dev/tools/tutorials/configure/_index.md b/docs/dev/tools/tutorials/configure/_index.md
deleted file mode 100644
index 4490a561b3..0000000000
--- a/docs/dev/tools/tutorials/configure/_index.md
+++ /dev/null
@@ -1,10 +0,0 @@
----
-title: "Configuration Tutorials"
-linkTitle: "Configure"
-childTitleEndOverwrite: "Tutorial"
-weight: 20
-type: docs
-empty_node: true
-layout: "empty"
-canonical: "tutorials/"
----
diff --git a/docs/dev/tools/tutorials/configure/build-a-mock-robot.md b/docs/dev/tools/tutorials/configure/build-a-mock-robot.md
deleted file mode 100644
index 2867a3444e..0000000000
--- a/docs/dev/tools/tutorials/configure/build-a-mock-robot.md
+++ /dev/null
@@ -1,500 +0,0 @@
----
-title: "Build a Mock Robot"
-linkTitle: "Mock Robot"
-type: "docs"
-description: "Create a mock robot using just your personal computer to try using Viam without any robotic hardware."
-videos:
- [
- "/tutorials/build-a-mock-robot/mock-robot.webm",
- "/tutorials/build-a-mock-robot/mock-robot.mp4",
- ]
-images: ["/tutorials/build-a-mock-robot/mock-robot.gif"]
-videoAlt: "A mock arm's joint positions from the control tab of the Viam app."
-aliases:
- - "/tutorials/build-a-mock-robot/"
- - "/tutorials/how-to-build-a-mock-robot/"
-tags: ["mock", "simulation"]
-authors: []
-languages: ["python", "go"]
-viamresources: ["board", "arm", "motor"]
-platformarea: ["core"]
-level: "Beginner"
-date: "2022-10-11"
-# updated: ""
-cost: "0"
-toc_hide: true
----
-
-
-
-In this tutorial you will build a mock robot to learn how to configure {{< glossary_tooltip term_id="machine" text="smart machines" >}} with Viam.
-You do not need any hardware to do this tutorial.
-
-Follow this tutorial to set up and control a robot with a `fake` [arm](/components/arm/fake/), [board](/components/board/), and [motor](/components/motor/), and an additional mock {{< glossary_tooltip term_id="part" text="sub-part" >}} with a [motor](/components/motor/).
-These `fake` components interact with Viam like real hardware but do not physically exist.
-
-## Set up a mock robot
-
-### Install and start `viam-server` on your computer
-
-You'll need the following hardware and software for this tutorial:
-
-- A computer running Linux or macOS
-- [Go](https://go.dev/doc/install) or [Python 3.9+](https://www.python.org/downloads/)
-
-If you don't already have a Viam account, sign up for one on the [Viam app](https://app.viam.com).
-
-{{% snippet "setup.md" %}}
-
-### Configure your mock robot
-
-[Configure your mock robot](/configure/) to represent a physical machine with robotic board, arm, and motor hardware.
-
-If you were using physical hardware, this process would provide `viam-server` with information about what hardware is attached to it and how to communicate with it.
-For this robot, you configure `viam-server` to use `fake` components that emulate physical hardware.
-
-1. Navigate to the **CONFIGURE** tab of your machine's page in the [Viam app](https://app.viam.com).
-2. Configure a [fake board component](/components/board/fake/):
-
- - Click the **+** (Create) icon next to your machine part in the left-hand menu and select **Component**.
- - Select the `board` type, then select the `fake` model.
- - Enter the name `myBoard` for your board and click **Create**.
- - Leave the attribute `fail_new` set to false.
-
-3. Configure a [fake arm component](/components/arm/fake/):
-
- - Click the **+** (Create) icon next to your machine part in the left-hand menu and select **Component**.
- - Select the `arm` type, then select the `fake` model.
- - Enter the name `myArm` for your arm and click **Create**.
- - Make your fake arm act like a [UR5e](https://www.universal-robots.com/products/ur5-robot/) by setting the attribute **arm-model** to `ur5e`.
- The config panel should look like this:
-
- {{< imgproc src="/tutorials/build-a-mock-robot/create-arm.png" alt="A fake arm being configured in Builder mode in the Viam app CONFIGURE tab." resize="600x" >}}
-
- - Click **Save** in the top right of the window to save your config.
-
-4. Configure a [fake motor component](/components/motor/fake/):
-
- - Click the **+** (Create) icon next to your machine part in the left-hand menu and select **Component**.
- - Select the `motor` type, then select the `fake` model.
- - Enter the name `myMotor` for your motor and click **Create**.
- - Most motors are wired to a board which sends them signals.
- Even though your motor is fake, make it more realistic by assigning it a `board`.
- Select `myBoard` from the **board** dropdown.
-
-5. Click **Save**.
-
-You will need to reference the component names later when you connect to your mock robot with code.
-
-## Control your mock robot using the Viam app
-
-When you add components to your machine, the Viam app automatically generates a UI for them under the [**CONTROL** tab](/fleet/control/):
-
-{{< imgproc src="/tutorials/build-a-mock-robot/control-tab.png" alt="The Control tab with the fake arm, and motor components." resize="600x" >}}
-
-You can use the **CONTROL** tab UI to send commands to your machine.
-
-For example, you can control the direction and speed of your motor, or change the joint positions of your machineic arm.
-You can also see the machine's reported positions and speeds change.
-With real physical components, you would not only be able to control and see your machine's readings on this tab, but you would also see your machine move in the physical world.
-
-## Control your mock robot using a Viam SDK
-
-### Install a Viam SDK
-
-Install a Viam SDK (software development kit) so you can write custom logic to control the mock machine.
-Use the programming language you are most comfortable with.
-
-Refer to the appropriate SDK documentation for SDK installation instructions:
-
-- [Viam Python SDK](https://python.viam.dev/)
-- [Viam Go SDK](https://github.com/viamrobotics/rdk/tree/main/robot/client)
-
-### Connect to your mock robot with your code
-
-The easiest way to get started writing an application with Viam's SDKs is to use the sample code on the **CONNECT** tab.
-
-Navigate to your [machine's page on the Viam app](https://app.viam.com/robots), select the **Code sample** page on the **CONNECT** tab, select your SDK language (this tutorial uses Python or Go), and copy the sample code.
-
-{{% snippet "show-secret.md" %}}
-
-This code snippet imports all the necessary libraries, is pre-populated with your machine cloud credentials, and sets up a connection with the Viam app in the cloud.
-Next, paste that sample code into a file named index.py or index.go in your code editor, and save your file locally.
-
-You can now run the code.
-Doing so verifies that the Viam SDK is properly installed, that the `viam-server` instance on your machine is live, and that the computer running the program is able to connect to that instance.
-
-Run your code by entering the following in a new terminal on your computer:
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-```sh {class="command-line" data-prompt="$"}
-python3 index.py
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-```sh {class="command-line" data-prompt="$"}
-go run index.go
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-If you successfully configured your machine and it is able to connect to the Viam app you should see the program print a list of the various _{{< glossary_tooltip term_id="resource" text="resources" >}}_ that have been configured on your machine in the Viam app:
-
-![Command line output from running python3 index.py when your Raspberry Pi has correctly connected and initialized with the Viam app. The output is an array of resources that have been pulled from the Viam app. The list includes the motion service, arm component, data manager, board component and motor component. There is also a list of arm position and orientation values.](/tutorials/build-a-mock-robot/resource-output.png)
-
-### Control your mock robot
-
-Now, write a program that moves the mock robotic arm to a new random position every second.
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-First, import the [arm component](https://python.viam.dev/autoapi/viam/components/arm/client/index.html) from the Viam Python SDK, and the [random](https://docs.python.org/3/library/random.html) library.
-
-At the top of your index.py file, paste the following:
-
-```python {class="line-numbers linkable-line-numbers"}
-from viam.components.arm import Arm, JointPositions
-import random
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-At the top of your index.go file, paste the following:
-
-First, import the [arm component](https://github.com/viamrobotics/rdk/blob/main/components/arm/client.go) from the Viam Go SDK, and the [random](https://pkg.go.dev/math/rand) and [time](https://pkg.go.dev/time) libraries.
-
-```go {class="line-numbers linkable-line-numbers"}
-import (
- "fmt"
- "math/rand"
- "time"
- componentpb "go.viam.com/api/component/arm/v1"
- "go.viam.com/rdk/components/arm"
-)
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Next, initialize your fake robotic arm.
-In the main function, paste the following.
-Make sure that the name of your fake arm matches the arm name you configured earlier.
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-```python {class="line-numbers linkable-line-numbers"}
-arm = Arm.from_robot(machine, name='myArm')
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-```go {class="line-numbers linkable-line-numbers"}
-myArm, err := arm.FromRobot(machine, "myArm")
-if err != nil {
- logger.Fatalf("cannot get arm: %v", err)
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Now that your mock arm has been initialized, you can write some code to control it.
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-```python {class="line-numbers linkable-line-numbers"}
-# Gets a random position for each servo on the arm that is within the safe
-# range of motion of the arm. Returns a new array of safe joint positions.
-def getRandoms():
- return [random.randint(-90, 90),
- random.randint(-120, -45),
- random.randint(-45, 45),
- random.randint(-45, 45),
- random.randint(-45, 45)]
-
-
-# Moves the arm to a new random position every second
-async def randomMovement(arm: Arm):
- while (True):
- randomPositions = getRandoms()
- newRandomArmJointPositions = JointPositions(values=randomPositions)
- await arm.move_to_joint_positions(newRandomArmJointPositions)
- print(await arm.get_joint_positions())
- await asyncio.sleep(1)
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-```go {class="line-numbers linkable-line-numbers"}
-// Returns an array of random floats between two numbers
-func getRandoms(min, max float64) []float64 {
- res := make([]float64, 5)
- for i := range res {
- res[i] = min + rand.Float64() * (max - min)
- }
- return res
-}
-
-// Moves the arm to a new random position every second
-func randomMovement (ctx context.Context, a arm.Arm ) {
- for {
- randomPositions := getRandoms(-90, 90)
- newRandomArmJointPositions := &componentpb.JointPositions{Values: randomPositions}
- a.MoveToJointPositions(ctx, newRandomArmJointPositions, nil)
- fmt.Println(a.JointPositions(ctx, nil))
- time.Sleep(1 * time.Second)
- }
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-You can run this code by invoking this function below your arm initialization in `main`.
-Your main function should look like this:
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-```python {class="line-numbers linkable-line-numbers"}
-async def main():
- machine = await connect()
-
- print('Resources:')
- print(machine.resource_names)
-
- arm = Arm.from_robot(machine, 'myArm')
- await randomMovement(arm)
-
- await machine.close()
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-```go {class="line-numbers linkable-line-numbers"}
-func main() {
- // Connect to the machine...
- myArm, err := arm.FromRobot(machine, "myArm")
- if err != nil {
- logger.Fatalf("cannot get arm: %v", err)
- }
- randomMovement(context.Background(), myArm)
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Now when you run this code, you should see the new mock arm positions listed in the command line.
-
-Verify that your mock robotic arm is working in the **CONTROL** tab of the [Viam app](https://app.viam.com).
-Watch the robotic arm's [`JointPositions()`](/dev/reference/apis/components/arm/#getjointpositions) changing in real-time along with the code on your development machine.
-
-
- {{}}
-
-
-## Configure your machine's mock sub-part
-
-Now that you have your `fake` robotic arm, board, and motor working, add a `fake` motor sub-part to your machine.
-Imagine for the purpose of this tutorial that the `fake` motor we are adding controls a conveyor belt in front of your mock arm on an assembly line.
-
-### What is a sub-part?
-
-Usually, when building a {{< glossary_tooltip term_id="machine" text="machine" >}}, you pick out a [single-board computer](/components/board/) like the [Jetson Nano](https://github.com/viam-modules/nvidia/) or [Raspberry Pi](https://github.com/viam-modules/raspberry-pi).
-You follow the {{< glossary_tooltip term_id="setup" text="setup instructions" >}} to install `viam-server` on your [board](/components/board/), and you start operating your machine with that computer, adding the {{< glossary_tooltip term_id="component" text="components" >}} and {{< glossary_tooltip term_id="service" text="services" >}} you want to use to that `viam-server` instance.
-
-By utilizing {{< glossary_tooltip term_id="part" text="parts" >}}, you can expand upon this, chaining multiple computers together to build a complex robot with Viam:
-
-- Each individual computer-controlled unit of a machine is called a “{{< glossary_tooltip term_id="part" text="part" >}}” in Viam.
-- Typically, simple robots have just one part, but you can have as many parts as your project requires.
-- Parts are organized in a tree, with one of them being the _main_ part, and the others being _sub-parts_.
-- You can access any sub-part either directly, or through any part above it in the tree.
-- Each part runs a single `viam-server` instance.
-
-### Add a new sub-part in the Viam app
-
-Navigate to the **CONFIGURE** tab of your machine's page on the Viam app.
-
-Click the **+** (Create) icon next to your main machine part in the left-hand menu and select **Sub-part**.
-Your sub-part will automatically be created with a name like `your-machine-name-1`:
-
-{{< imgproc src="/tutorials/build-a-mock-robot/machine-nesting.png" alt="The CONFIGURE tab of the mock machine including the sub-part, showing the nested configuration in the left hand menu." resize="1000x" style="width:500px" >}}
-
-Configure your machine's new sub-part with a fake motor:
-
-1. Click the **+** (Create) icon next to your sub-part in the left-hand menu and select **Component**.
-1. Select the `motor` type, then select the `fake` model.
-1. Enter the name `motor2` for your motor and click **Create**:
-
- {{< imgproc src="/tutorials/build-a-mock-robot/sub-part-motor.png" alt="The CONFIGURE tab. A new fake motor component called motor2 is being created under the sub-part." resize="400x" >}}
-
-1. Click **Save**.
-
-### Start a new instance of `viam-server` for your mock sub-part
-
-Every sub-part of a machine needs to run an instance of `viam-server`.
-Since you are using only one computer, you need to bind the sub-part to a new port so you can run two servers on your machine at the same time.
-
-The following instructions use port `8081`, but you can use any open port you want.
-
-1. Go to the **CONFIGURE** tab and find the sub-part's card.
- Expand the card to view the **NETWORK** configuration section for the sub-part.
-1. Click **Set bind address**.
-1. In the **Host** field type `localhost`.
-1. In the **Port** field type `8081`.
-1. Click **Save**.
-
-Your **NETWORK** configuration appears as follows:
-
-{{< imgproc src="/tutorials/build-a-mock-robot/network-config.png" alt="The NETWORK configuration for the sub-part of the mock machine, with Host localhost and Port 8081." resize="1000x" style="width:500px" >}}
-
-### Run a second instance of `viam-server` for your sub-part
-
-Now, it's time to run `viam-server` on your sub-part.
-This instance of `viam-server` will work with the main part's instance of `viam-server` as two parts of one machine.
-
-Stay on the **CONFIGURE** tab.
-Click the **...** (Actions) icon on the right side of the sub-part's card and select **View setup instructions** to open the sub-part's setup instructions.
-Select the platform you want to run `viam-server` on and follow the instructions until you receive confirmation that your sub-part has connected.
-
-Now that you have two instances of `viam-server` running on your local machine, you should be able to see both your main robot arm and your new mock sub motor listed on your main machine's **CONTROL** tab.
-
-{{< imgproc src="/tutorials/build-a-mock-robot/control-all.png" alt="Screenshot of the Viam app's CONTROL tab for the main part that lists the main arm, and the sub part motor component." resize="800x" style="width:600px" >}}
-
-To test that your motor sub-part has been added to your machine, run your Python or Go script again.
-Review the output of your program that prints the machine's resources to see your sub-part's motor's name listed.
-
-## Control a sub-part using the Viam SDK
-
-Now that you have your mock sub-part connected to your main part under your mock robot, you can control all of your sub-part's components and services with Viam's SDKs.
-
-In your main function, you need to instantiate your mock sub motor.
-Make sure your sub-part's name and motor's name matches what you have configured.
-This code uses the name `SubPart` as a placeholder.
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-```python {class="line-numbers linkable-line-numbers"}
-motor = Motor.from_robot(robot=machine, name='SubPart:motor2')
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-```go {class="line-numbers linkable-line-numbers"}
-myMotor, err := motor.FromRobot(machine, "motor2")
-if err != nil {
- logger.Fatalf("cannot get motor: %v", err)
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Write a function that toggles your sub-part's motor on and off every second:
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-```python {class="line-numbers linkable-line-numbers"}
-# Toggles the motor on and off every second
-async def toggleMotor(motor: Motor):
- while (True):
- await motor.set_power(1)
- print("go")
- await asyncio.sleep(1)
- await motor.stop()
- print("stop")
- await asyncio.sleep(1)
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-```go {class="line-numbers linkable-line-numbers"}
-// Toggles the motor on and off every second
-func toggleMotor (ctx context.Context, m motor.Motor) {
- for {
- m.SetPower(ctx, 1, nil)
- fmt.Println("go")
- time.Sleep(1 * time.Second)
- m.Stop(ctx, nil)
- fmt.Println("stop")
- time.Sleep(1 * time.Second)
- }
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Now, invoke your new function in `main()`.
-Comment out invoking `randomMovement()` to focus on testing the sub-part.
-Your main function should look similar to this snippet:
-
-{{< tabs >}}
-{{% tab name="Python" %}}
-
-```python {class="line-numbers linkable-line-numbers"}
-async def main():
- machine = await connect()
- print('Resources:')
- print(machine.resource_names)
- arm = Arm.from_robot(machine, name='myArm')
- motor = Motor.from_robot(machine, name='SubPart:motor2')
- await toggleMotor(motor)
- # await randomMovement(arm)
- await machine.close()
-```
-
-{{% /tab %}}
-{{% tab name="Go" %}}
-
-```go {class="line-numbers linkable-line-numbers"}
-func main() {
- // Connect to the machine...
- myMotor, err := motor.FromRobot(machine, "motor2")
- if err != nil {
- logger.Fatalf("cannot get motor: %v", err)
- }
- toggleMotor(context.Background(), myMotor)
-
- myArm, err := arm.FromRobot(machine, "myArm")
- if err != nil {
- logger.Fatalf("cannot get arm: %v", err)
- }
- // randomMovement(context.Background(), myArm)
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Run this code to see your mock sub-part's motor toggling between running and idle in real time from the Viam app!
-
-{{}}
-
-## Next steps
-
-In this tutorial, we showed you how to set up a mock robot with a sub-part so that you can learn more about using fake components, setting up a local development environment, and writing code using a Viam SDK.
-
-If you're ready to get started with building robots with real hardware components, pick up a [board](/components/board/) and try following another [tutorial](/tutorials/).
diff --git a/docs/dev/tools/tutorials/configure/configure-rover.md b/docs/dev/tools/tutorials/configure/configure-rover.md
deleted file mode 100644
index 3c98a6ffdd..0000000000
--- a/docs/dev/tools/tutorials/configure/configure-rover.md
+++ /dev/null
@@ -1,497 +0,0 @@
----
-title: "Configure a Rover like Yahboom or SCUTTLE"
-linkTitle: "Configure a Rover"
-type: "docs"
-description: "Configure a rover like the a Yahboom 4WD Rover or a SCUTTLE robot on the Viam platform."
-images: ["/tutorials/scuttlebot/scuttle-on-floor-cropped.png"]
-imageAlt: "A SCUTTLE robot on a carpeted floor."
-tags: ["base", "camera", "scuttle", "yahboom"]
-aliases:
- - "/tutorials/scuttlebot"
- - "/tutorials/scuttlebot/scuttlebot"
- - "/tutorials/yahboom-rover/"
- - "/tutorials/control/yahboom-rover/"
-authors: ["Hazal Mestci"]
-languages: ["python", "go"]
-viamresources: ["board", "motor", "camera", "base", "encoder"]
-platformarea: ["core"]
-level: "Beginner"
-date: "2022-08-02"
-updated: "2024-04-17"
-cost: 540
----
-
-This tutorial will guide you through configuring a rover.
-
-
-
-If you are using a SCUTTLE, a Yahboom rover, or a different rover, this tutorial covers instructions for your rover model.
-
-{{< alert title="Viam Rover" color="note" >}}
-If you are using a Viam Rover, use the [Viam Rover tutorial fragment](/appendix/try-viam/rover-resources/rover-tutorial-fragments/) instead.
-{{< /alert >}}
-
-## Requirements
-
-- A running an instance of `viam-server`.
- See our [Installation Guide](/installation/viam-server-setup/) for instructions.
-- A rover like the [SCUTTLE robot](https://www.scuttlerobot.org/shop/) or the [Yahboom 4WD Smart Robot](https://category.yahboom.net/collections/robotics/products/4wdrobot)
-
-Make sure your rover is assembled before starting this tutorial.
-
-## Start configuring your robot
-
-{{% snippet "setup.md" %}}
-
-Once connected, navigate to the machine's **CONFIGURE** tab.
-
-![A SCUTTLE robot on a carpeted floor.](/tutorials/scuttlebot/scuttle-on-floor.png)
-
-The configuration for each rover is different depending on which {{< glossary_tooltip term_id="component" text="components" >}} your rover is composed of.
-In the following, you can see two popular examples with components that are present on most rovers:
-
-## Configure the board
-
-The first component you will add is the [board](/components/board/) which represents the Raspberry Pi to which the other components are wired.
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `board` type, then select the `viam:raspberry-pi:rpi` model.
-Enter `local` as the name and click **Create**.
-You can use a different name but will then need to adjust the name in the following steps to the name you choose.
-
-![An example board configuration in the app builder UI. The name (local), type (board) and model (pi) are shown. No other attributes are configured.](/components/board/pi-ui-config.png)
-
-You don't need to add any attributes for your board.
-
-## Configuring the motors and encoders
-
-### Configure the encoders
-
-{{< alert title="Note" color="note" >}}
-Not all rovers require the configuration of encoders.
-If your motors work without encoders, skip to [configuring your motors](#configure-the-motors).
-{{< /alert >}}
-
-Configure the left and right encoders as follows:
-
-{{< tabs >}}
-{{% tab name="SCUTTLE" %}}
-
-{{< tabs name="Configure AMS-AS5048 Encoders" >}}
-{{% tab name="Config Builder" %}}
-
-Start with the right encoder:
-
-#### Right encoder
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `encoder` type, then select the `AMS-AS5048` model.
-Enter `renc` as the name and click **Create**.
-
-Click the **board** dropdown list and select the name of your board, `local`.
-
-In the **i2c bus** field type `1`, and in the **i2c address** field type `65`.
-
-#### Left encoder
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `encoder` type, then select the `AMS-AS5048` model.
-Enter `lenc` as the name for your encoder and click **Create**.
-
-Click the **board** dropdown list and select the name of your board, `local`.
-
-In the **i2c bus** field type `1`, and in the **i2c address** field type `64`.
-
-{{% /tab %}}
-{{% tab name="JSON" %}}
-
-Add the following JSON objects to the `components` array:
-
-```json {class="line-numbers linkable-line-numbers"}
-{
- "name": "lenc",
- "model": "AMS-AS5048",
- "type": "encoder",
- "namespace": "rdk",
- "attributes": {
- "connection_type": "i2c",
- "i2c_attributes": {
- "i2c_bus": "1",
- "i2c_addr": 64
- }
- }
-},
-{
- "name": "renc",
- "model": "AMS-AS5048",
- "type": "encoder",
- "namespace": "rdk",
- "attributes": {
- "connection_type": "i2c",
- "i2c_attributes": {
- "i2c_bus": "1",
- "i2c_addr": 65
- }
- }
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-{{% /tab %}}
-{{% tab name="Other" %}}
-
-Follow the instructions for the [model of encoder](/components/encoder/#configuration) your rover uses to configure your encoders and configure at least a `right` and a `left` encoder.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-### Configure the motors
-
-{{< tabs name="motors-config">}}
-{{% tab name="SCUTTLE" %}}
-
-{{< tabs name="gpio-config">}}
-{{% tab name="Config Builder" %}}
-
-Start with the right motor:
-
-#### Right motor
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `motor` type, then select the `gpio` model.
-Enter `right` as the name or use the suggested name for your motor and click **Create**.
-
-Then from the **Board** dropdown, select `local`, the Raspberry Pi the motor is wired to.
-
-Select `Encoded` in the **Encoder** section and select `renc` as the **encoder** and set **ticks per rotation** to `2`.
-
-Next, describe how the motor is wired to the Pi:
-
-1. Switch the Component Pin Assignment Type to `In1/In2`.
-2. Set **A/In1** to `16`.
-3. Set **B/In2** to `15`.
-4. Leave the `pwm` (pulse-width modulation) pin blank, because this specific motor driver's configuration does not require a separate PWM pin.
-
-![The motor config panel.](/tutorials/scuttlebot/pi-wheel.png)
-
-#### Left motor
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `motor` type, then select the `gpio` model.
-Enter `left` as the name or use the suggested name for your motor and click **Create**.
-
-Then select `local` from the **Board** dropdown to choose the Raspberry Pi the motor is wired to.
-
-Select `Encoded` in the **Encoder** section and select `lenc` as the **encoder** and set **ticks per rotation** to `2`.
-
-Next, describe how the motor is wired to the Pi:
-
-1. Switch the Component Pin Assignment Type to `In1/In2`.
-2. Set **A/In1** to `12`.
-3. Set **B/In2** to `11`.
-4. Leave the `pwm` (pulse-width modulation) pin blank, because this specific motor driver's configuration does not require a separate PWM pin.
-
-{{% /tab %}}
-{{% tab name="JSON" %}}
-
-Add the following JSON objects to the `components` array:
-
-```json
-{
- "name": "right",
- "model": "gpio",
- "type": "motor",
- "namespace": "rdk",
- "attributes": {
- "pins": {
- "a": "16",
- "b": "15",
- "pwm": "",
- "dir": ""
- },
- "board": "local",
- "dir_flip": false,
- "ticks_per_rotation": 2
- },
- "depends_on": [ "local" ]
-},
-{
- "name": "left",
- "model": "gpio",
- "type": "motor",
- "namespace": "rdk",
- "attributes": {
- "pins": {
- "a": "12",
- "b": "11",
- "pwm": ""
- },
- "board": "local",
- "dir_flip": false,
- "ticks_per_rotation": 2
- },
- "depends_on": [ "local" ]
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-{{% /tab %}}
-{{% tab name="Yahboom" %}}
-
-Since both right (and left) side motors of the Yahboom rover are wired together to a single motor driver, you configure the right (and left) side motors as a single [motor component](/components/motor/).
-
-Start with the right set of wheels.
-
-#### Right motor
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `motor` type, then select the `gpio` model.
-Enter `right` as the name or use the suggested name for your motor and click **Create**.
-
-![G P I O motor config in the builder UI with the In1 and In2 pins configured and the PWM pin field left blank.](/components/motor/gpio-config-ui.png)
-
-Click the **Board** dropdown and select `local` as the board the motor driver is wired to.
-Next, configure the **Component Pin Assignment** section to represent how the motor is wired to the board.
-In the **Component Pin Assignment** section of the right motor card, toggle the **Type** to **In1/In2** to use the compatible mode for this motor driver.
-
-If you followed the setup instructions for putting together your Yahboom rover correctly, you can set the **pins** as follows:
-
-- `a` to `35`
-- `b` to `37`
-- `pwm` (pulse-width modulation) to `33`.
-
-Leave `dir` pin blank, because Yahboom's motor driver uses an a/b/pwm configuration.
-
-Click **Show more** and set `max_rpm` to `300`.
-You can ignore the other optional attributes.
-
-#### Left motor
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `motor` type, then select the `gpio` model.
-Enter `left` as the name or use the suggested name for your motor and click **Create**.
-
-Click the **Board** dropdown and select `local` as the board the motor driver is wired to.
-Next, configure the **Component Pin Assignment** section to represent how the motor is wired to the board.
-In the **Component Pin Assignment** section of the right motor card, toggle the **Type** to **In1/In2** to use the compatible mode for this motor driver.
-
-If you followed the setup instructions for putting together your Yahboom rover correctly, you can set the **pins** as follows:
-
-- `a` to `38`
-- `b` to `40`
-- `pwm` (pulse-width modulation) to `36`.
-
-Leave `dir` pin blank, because Yahboom's motor driver uses an a/b/pwm configuration.
-
-Click **Show more** and set `max_rpm` to `300`.
-You can ignore the other optional attributes.
-
-{{% /tab %}}
-{{% tab name="Other" %}}
-
-Follow the instructions for the [model of motor](/components/motor/#configuration) your rover uses to configure your motors and configure at least a `right` and a `left` motor.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-#### Test the motor configuration
-
-{{< alert title="Caution" color="caution" >}}
-
-Ensure the rover has sufficient space to drive around without hitting anyone or anything.
-
-If you don't have enough space, consider holding your robot off the ground so it cannot collide with anything unexpected.
-
-{{< /alert >}}
-
-Now that you have configured your motors, you can actuate them.
-Make sure your machine is turned on.
-Navigate to the **Control** tab.
-
-You'll see a panel for each configured component.
-
-![Motor panels](/tutorials/scuttlebot/scuttle-bothmotors.png)
-
-Click on the panel for the right `motor`.
-
-![Power level adjustment](/tutorials/scuttlebot/pi-moverhmotor.png)
-
-Try changing the motor's **power** level and click **Run**.
-
-{{< alert title="Caution" color="caution" >}}
-Be careful when using your motors!
-Start with the power level set to 20% and increase it incrementally (about 10% each time) until the wheel rotates at a reasonable speed, clicking **Run** at each increment.
-If you hear a "whining" sound from the motor, the power level is not high enough to turn the armature.
-If this happens, increase the power level by 10% increments until it starts to turn.
-{{< /alert >}}
-
-If your wheel turns in reverse when it should turn forward, add the `dir_flip` attribute to the motor's configuration, by clicking **Show more** and setting the attribute to "true."
-
-There, you should see a panel for the right motor: you can use this panel to set the motor's power level.
-
-## (Optional) Configure the camera
-
-Optionally, add a camera to your rover.
-
-{{< tabs name="Configure a Webcam" >}}
-{{% tab name="Config Builder" %}}
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `camera` type, then select the `webcam` model.
-Enter a name or use the suggested name for your camera and click **Create**.
-
-{{< imgproc src="/components/camera/configure-webcam.png" alt="Configuration of a webcam camera in the Viam app config builder." resize="1200x" style="width=600x" >}}
-
-If you click on the **Video Path** field while your robot is live, a dropdown autopopulates with identified camera paths.
-
-{{% /tab %}}
-{{% tab name="JSON" %}}
-
-```json {class="line-numbers linkable-line-numbers"}
-{
- "name": "Webcam",
- "model": "webcam",
- "type": "camera",
- "namespace": "rdk",
- "attributes": {
- "video_path": ""
- }
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-If your rover has its camera mounted on a pair of [servos](/components/servo/), like the Yahboom rover, you can use these to control the pan and tilt of the camera.
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `servo` type, then select the `viam:raspberry-pi:pi-servo` model.
-Enter `pan` as the name and click **Create**.
-
-Set `Depends On` to `local`, and `pin` to the pin the servo is wired to (`23` for the Yahboom rover).
-
-Finally, add the tilt `servo` as well.
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `servo` type, then select the `viam:raspberry-pi:pi-servo` model.
-Enter `tilt` as the name and click **Create**.
-
-Set `Depends On` to `local`, and `pin` to the pin the servo is wired to (`21` for the Yahboom rover).
-
-### Test the camera stream
-
-{{< readfile "/static/include/components/camera-view-camera-stream.md" >}}
-
-## Configure the base
-
-Next, configure the [base component](/components/base/), which describes the geometry of your chassis and wheels so that the software can calculate how to steer the rover in a coordinated way.
-Configuring a {{% glossary_tooltip term_id="base" text="base"%}} component also provides you with a nice UI for moving the rover around.
-
-{{< alert title="Note" color="note" >}}
-Viam supports most rovers with built-in models like the [`wheeled`](/components/base/wheeled/) base.
-If your rover is not supported out of the box, follow the [Create a Modular Resource to Control a Rover](/tutorials/custom/controlling-an-intermode-rover-canbus/) tutorial to create a model for your rover or mobile robot.
-{{< /alert >}}
-
-{{< tabs name="Configure a Wheeled Base" >}}
-{{% tab name="Config Builder" %}}
-
-Click the **+** icon next to your machine part in the left-hand menu and select **Component**.
-Select the `base` type, then select the `wheeled` model.
-Enter a name or use the suggested name for your base and click **Create**.
-
-{{< tabs >}}
-{{% tab name="SCUTTLE" %}}
-
-1. Select the motors attached to the base in the fields as your **right** and **left** motors.
-2. Enter `250` for `wheel_circumference_mm`.
-3. Enter `400` for `width_mm` (measured between the midpoints of the wheels).
-
-{{% /tab %}}
-{{% tab name="Yahboom" %}}
-
-1. Select the motors attached to the base in the fields as your **right** and **left** motors.
-2. Enter `220` for `wheel_circumference_mm`.
-3. Enter `150` for `width_mm` (measured between the midpoints of the wheels).
-
-{{% /tab %}}
-{{% tab name="Other" %}}
-
-1. Select the motors attached to the base in the fields as your **right** and **left** motors.
-2. Measure the wheel circumference in mm and enter it in the field for `wheel_circumference_mm`.
-3. Measure the width in mm between the midpoints of the wheels and enter it in the field for `width_mm` (measured between the midpoints of the wheels).
-
-{{% /tab %}}
-{{< /tabs >}}
-
-{{< imgproc src="/components/base/wheeled-base-ui-config.png" alt="An example configuration for a wheeled base in the Viam app config builder, with Attributes & Depends On dropdowns and the option to add a frame." resize="1200x" style="width: 900px" >}}
-
-{{% /tab %}}
-{{% tab name="JSON" %}}
-
-```json
-{
- "components": [
- {
- "attributes": {
- "board": "local",
- "pins": {
- "pwm": "",
- "a": "16",
- "b": "15"
- }
- },
- "model": "gpio",
- "name": "right",
- "type": "motor"
- },
- {
- "attributes": {
- "board": "local",
- "pins": {
- "pwm": "",
- "a": "12",
- "b": "11"
- }
- },
- "model": "gpio",
- "name": "left",
- "type": "motor"
- },
- {
- "attributes": {
- "left": ["left"],
- "right": ["right"],
- "wheel_circumference_mm": 250,
- "width_mm": 400
- },
- "model": "wheeled",
- "name": "your-wheeled-base",
- "type": "base"
- }
- ]
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Save the config by clicking **Save** at the top right of the page.
-
-### Test the base
-
-{{< readfile "/static/include/components/test-control/base-control.md" >}}
-
-## Next steps
-
-Now that you have fully configured your SCUTTLE robot, you can drive it around and view its camera stream.
-
-To take things to the next level, check out one of the following tutorials:
-
-{{< cards >}}
-{{% card link="/tutorials/services/color-detection-scuttle" %}}
-{{% card link="/tutorials/control/gamepad/" %}}
-{{% card link="/tutorials/services/webcam-line-follower-robot/" %}}
-{{< /cards >}}
diff --git a/docs/dev/tools/tutorials/configure/pet-photographer.md b/docs/dev/tools/tutorials/configure/pet-photographer.md
deleted file mode 100644
index 4209279ebc..0000000000
--- a/docs/dev/tools/tutorials/configure/pet-photographer.md
+++ /dev/null
@@ -1,1009 +0,0 @@
----
-title: "Pet Photographer: Create a Data Filtering Module"
-linkTitle: "Pet Photographer"
-type: "docs"
-description: "Use the filter modular component in the Viam app to photograph your pet in their collar."
-tags: ["vision", "filter", "camera", "detector", "services"]
-imageAlt: "Filtered data from the custom colorfiltercam in the DATA tab showing only photos of a dog wearing a blue collar"
-images: ["/tutorials/pet-photographer/data-capture.png"]
-authors: ["Sky Leilani"]
-languages: ["go", "python"]
-viamresources: ["vision", "camera", "data_manager"]
-platformarea: ["data", "registry"]
-level: "Intermediate"
-date: "2023-09-17"
-# updated: "2024-04-19"
-cost: "0"
-no_list: true
-toc_hide: true
----
-
-
-
-If your machine [captures](/services/data/) a lot of data, you might want to filter captured data to selectively store only the data you are interested in.
-For example, you might want to use your smart machine's camera to capture images based on specific criteria, such as the presence of a certain color, and omit captured images that don't meet that criteria.
-
-In this tutorial, you will use a custom {{< glossary_tooltip term_id="module" text="module" >}} to function as a color filter, and use it with a [camera](/components/camera/) to only capture images where your pet is in the frame in the following way:
-
-1. Attach a colored object, such as a blue collar, to your pet.
-1. Set up a camera in an area where your pet is likely to appear in the frame, and configure the data management service to capture and sync images from that camera.
-1. Configure the `colorfilter` custom module to filter captured images from your camera, saving them only when your pet, along with their easily-identifiable colored object, is present in the frame.
-
- {{}}
-
-The source code for this module is available on the [`modular-filter-examples` GitHub repository](https://github.com/viam-labs/modular-filter-examples) .
-In addition to the `colorfilter` module used in this tutorial, the example repository also includes a [sensor reading filter](https://github.com/viam-labs/modular-filter-examples/tree/main/sensorfilter) which you could use to control and filter the data recorded by a [sensor component](/components/sensor/).
-
-## Hardware requirements
-
-To create your own filtering pet photographer robot, you'll need the following hardware:
-
-- A computer
-- A [webcam](/components/camera/webcam/) or other type of [camera](/components/camera/)
-- A colored object, such as a blue collar for enhanced accuracy _(optional)_
-
-{{< alert title="Tip" color="tip" >}}
-In this tutorial, the camera is configured to identify and filter images with the color blue, as it is less common in many environments.
-If your pet already has a distinct color that is different from their environment, you can also configure your camera to use that color to identify pictures of your pet.
-{{< /alert >}}
-
-Make sure your webcam is connected to your computer.
-
-## Setup
-
-{{% snippet "setup.md" %}}
-
-{{< alert title="Note" color="note" >}}
-Your `viam-server` must be [version 0.8.0](https://github.com/viamrobotics/rdk/releases/tag/v0.8.0-rc0) or newer, as filtering capabilities were introduced in the RDK starting from that version.
-{{< /alert >}}
-
-Then, install [Go](https://go.dev/dl/) or [Python](https://www.python.org/downloads/) on both your local development computer and on your machine's board if they are not the same device.
-
-## Add the custom module
-
-In this tutorial, you can choose to add custom data filtering to your machine in one of two ways:
-
-1. [Download the `colorfilter` module](#download-the-colorfilter-module) from Viam and get started quickly.
-1. [Code your own color filtering module](#code-your-own-module), exploring the process of building a module from scratch.
-
-### Download the colorfilter module
-
-Follow the instructions below to download the `colorfilter` module in your preferred programming language:
-
-{{< tabs >}}
-{{% tab name="Python"%}}
-
-1. Clone the [`colorfilter` module](https://github.com/viam-labs/modular-filter-examples) from GitHub onto your computer:
-
- ```{class="command-line" data-prompt="$"}
- git clone https://github.com/viam-labs/modular-filter-examples.git
- ```
-
-1. Navigate to the Python color filter directory, `pycolorfilter`.
-1. Note the path to your module's executable, run.sh, for later use.
-1. [Add the `colorfilter` module to your smart machine as a local module](#add-as-a-local-module) and continue the tutorial from there.
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-1. Clone the [`colorfilter` module](https://github.com/viam-labs/modular-filter-examples) from GitHub onto your machine's computer:
-
- ```{class="command-line" data-prompt="$"}
- git clone https://github.com/viam-labs/modular-filter-examples.git
- ```
-
-1. Navigate to the Go color filter directory, `colorfilter`.
-1. Inside of the `module` directory, [compile the executable](/how-tos/create-module/#compile-or-package-your-module) that runs your module.
-1. Save the path to your module's executable for later use.
-1. [Add the `colorfilter` module to your smart machine as a local module](#add-as-a-local-module) and continue the tutorial from there.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-### Code your own module
-
-To code your own color filtering module, first create the necessary files and directories on your smart machine:
-
-{{< tabs >}}
-{{% tab name="Python"%}}
-
-1. Create a folder for your module with the name of your model `colorfilter`.
- - Your model name must use all lowercase letters.
-1. Inside that folder, create a file called color_filter.py.
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-1. Create a folder for your module with the name of your model `colorfilter`.
- - Your model name must use all lowercase letters.
-1. Inside that folder, create:
- - A file called color_filter.go.
- - A directory named `module`.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-#### Code a filter resource model
-
-Next, include all the methods that the corresponding Viam SDK requires in the API definition of its built-in {{< glossary_tooltip term_id="subtype" text="subtype" >}}.
-
-{{< tabs >}}
-{{% tab name="Python"%}}
-You can write your own code or copy the code from the `colorfilter` module's [color_filter.py](https://github.com/viam-labs/modular-filter-examples/blob/main/pycolorfilter/color_filter.py) file.
-
-To write your own code, implement a client interface defined by the required methods outlined in the client.py file for the specific resource you are implementing.
-For example, the camera's client.py file is located at [/components/camera/client.py](https://github.com/viamrobotics/viam-python-sdk/blob/main/src/viam/components/camera/client.py).
-
-1. Open the color_filter.py file you just created and implement the required methods from client.py.
- - Exclude the `get_images` method, which you will customize to add filtering functionality in the upcoming section.
- - Include the other methods within the class corresponding to your resource type (in this case, the `CameraClient` class).
-
-For more information, refer to [Write your new resource model definition](/how-tos/create-module/#write-your-new-resource-model-definition).
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-To write your own code, implement a client interface defined by the required methods outlined in the client.go file for the specific resource you are implementing.
-For example, the camera's client.go file is located at [/components/camera/client.go](https://github.com/viamrobotics/rdk/blob/main/components/camera/client.go).
-
-1. Open the color_filter.go file you just created and implement the required methods in it.
- Exclude the `Read` method, which you will replace with a method, `Next`, to add filtering functionality in the upcoming section.
- - You can create your own code or copy the code from the [viam-labs `colorfilter` repository's color_filter.go](https://github.com/viam-labs/modular-filter-examples/blob/main/colorfilter/color_filter.go) file.
-
-For more information, refer to [Write your new resource model definition](/how-tos/create-module/#write-your-new-resource-model-definition).
-
-{{% /tab %}}
-{{< /tabs >}}
-
-The filter function in your custom filter module must contain two critical elements:
-
-1. A utility function that will check if the caller of the filter function is the [data management service](/services/data/).
-1. A safeguard that ensures if the data management service is not the caller, an error and the unfiltered data is returned.
-
-{{< alert title="Important" color="note" >}}
-You must include both the safeguard and utility functions in order to access data filtering functionality within your module.
-
-For programming languages other than Python and Go, the API of the component you're receiving data from will provide comparable utility functions and safeguards.
-These tools help you to check the caller of your filter function and ensure your smart machine responds accordingly.
-
-For detailed information, please refer to the documentation for your chosen SDK.
-{{< /alert >}}
-
-Follow the steps below to include the utility function and check whether the data management service is the caller of the function responsible for data capture.
-If a service other than the data management service calls the function, it will return the original, unfiltered data.
-
-To check the caller of the collector function using the utility function:
-
-{{< tabs >}}
-{{% tab name="Python"%}}
-First, import `from_dm_from_extra`:
-
-```python {class="line-numbers linkable-line-numbers"}
-from viam.utils import from_dm_from_extra
-```
-
-Then, include it in the conditional statement in your filter function:
-
-```python {class="line-numbers linkable-line-numbers"}
-if from_dm_from_extra(extra):
- detections = await self.vision_service.get_detections(img)
-```
-
-With this configuration:
-
-- Your camera checks if the data management service is the caller of the filter function by using `from_dm_from_extra`.
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-Write a conditional statement that checks `FromDMContextKey`:
-
-{{< alert title="Important" color="note" >}}
-Use `FromDMContextKey` to check the caller of the data capture function when working with a modular _camera_ using the Go SDK.
-For all other components, you should use `FromDMString` instead.
-See the [sensor filter example](https://github.com/viam-labs/modular-filter-examples/blob/main/sensorfilter/sensor_filter.go) for example code to support working with a sensor.
-{{< /alert >}}
-
-```go {class="line-numbers linkable-line-numbers"}
-if ctx.Value(data.FromDMContextKey{}) != true {
- // If not data management collector, return underlying stream contents without filtering.
- return fs.cameraStream.Next(ctx)
-}
-
-// Only return captured image if it contains a certain color set by the vision service.
-img, release, err := fs.cameraStream.Next(ctx)
-
-detections, err := fs.visionService.Detections(ctx, img, map[string]interface{}{})
-```
-
-With this configuration:
-
-- Your camera checks if the data management service is the caller of the filter function by using `FromDMContextKey`.
-- If `FromDMContextKey` is `true` and the data management service is the caller, the camera captures an image by declaring the `img` variable and filling it with the content from the camera stream.
-- Then, after capturing the image, the code requests the next detection.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-After implementing a check to identify the initiator of the filter function, you must include the safeguard that will return an error if the data management service is not the caller.
-
-To do this, include the following in your filter module's resource model:
-
-{{< tabs name="Example tabs">}}
-{{% tab name="Python"%}}
-
-Edit color_filter.py and import the safeguard error `NoCaptureToStoreError` from Viam:
-
-```python {class="line-numbers linkable-line-numbers"}
-from viam.errors import NoCaptureToStoreError
-```
-
-Then, edit the `if from_dm_from_extra(extra)` conditional statement from earlier to add a second conditional statement within it that returns the error when the data management service is not the caller:
-
-```python {class="line-numbers linkable-line-numbers"}
-if from_dm_from_extra(extra):
- detections = await self.vision_service.get_detections(img)
- if len(detections) == 0:
- raise NoCaptureToStoreError()
-```
-
-This code:
-
-- Checks the length (`len`) of the `detections` variable.
-- Raises a `NoCaptureToStoreError()` if `len` is equal to `0` to signify that the data management service is not the caller.
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-Open color_filter.go and write a conditional statement inside of your filter function that includes the error message `data.ErrNoCaptureToStore`:
-
-```go {class="line-numbers linkable-line-numbers"}
-if len(detections) == 0 {
- return nil, nil, data.ErrNoCaptureToStore
-}
-```
-
-This code:
-
-- Checks the length (`len`) of the `detections` variable.
-- Raises a `data.ErrNoCaptureToStore` error if `len` is equal to `0` to signify that the data management service is not the caller.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-Now that you've included the required utility function and safeguard, your complete color filter function should look like the following:
-
-{{< tabs >}}
-{{% tab name="Python"%}}
-
-```python {class="line-numbers linkable-line-numbers"}
-async def get_image(
- self,
- mime_type: str = "",
- *,
- extra: Optional[Dict[str, Any]] = None,
- timeout: Optional[float] = None,
- **kwargs
- ) -> Image.Image:
- """Filters the output of the underlying camera"""
- img = await self.actual_cam.get_image()
- if from_dm_from_extra(extra):
- detections = await self.vision_service.get_detections(img)
- if len(detections) == 0:
- raise NoCaptureToStoreError()
-
- return img
-```
-
-If the data management service is the caller, the filter function requests detections from the vision service and returns the image if the specified color is detected.
-Otherwise, it raises a `NoCaptureToStoreError()` error.
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-This code includes the utility function and safeguard you implemented earlier, and also includes error handling for getting the next source image and obtaining detections.
-
-```go {class="line-numbers linkable-line-numbers"}
-// Next contains the filtering logic and returns select data from the underlying camera.
-func (fs filterStream) Next(ctx context.Context) (image.Image, func(), error) {
- if ctx.Value(data.FromDMContextKey{}) != true {
- // If not data management collector, return underlying stream contents without filtering.
- return fs.cameraStream.Next(ctx)
- }
-
- // Only return captured image if it contains a certain color set by the vision service.
- img, release, err := fs.cameraStream.Next(ctx)
- if err != nil {
- return nil, nil, errors.New("could not get next source image")
- }
- detections, err := fs.visionService.Detections(ctx, img, map[string]interface{}{})
- if err != nil {
- return nil, nil, errors.New("could not get detections")
- }
-
- if len(detections) == 0 {
- return nil, nil, data.ErrNoCaptureToStore
- }
-
- return img, release, err
-}
-```
-
-If the data management service is the caller, the filter function requests detections from the vision service and returns the image if the specified color is detected.
-Otherwise, it raises a `data.ErrNoCaptureToStore` error.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-After you have implemented your resource subtype's required methods and written your filter function, your final code should look like this:
-
-{{< tabs >}}
-{{% tab name="Python"%}}
-
-color_filter.py implements "colorfilter", a custom model of the [camera component API](/components/camera/#api).
-
-
- Click to view sample code from color_filter.py
-
-```python {class="line-numbers linkable-line-numbers"}
-from typing import (
- ClassVar, Mapping, Sequence, Optional, cast, Tuple, List, Any, Dict
-)
-from typing_extensions import Self
-from viam.module.types import Reconfigurable
-from viam.proto.app.robot import ComponentConfig
-from viam.proto.common import ResourceName, ResponseMetadata, Geometry
-from viam.components.camera import Camera
-from viam.resource.types import Model, ModelFamily
-from viam.resource.base import ResourceBase
-from viam.media.video import NamedImage
-from PIL import Image
-from viam.errors import NoCaptureToStoreError
-from viam.services.vision import Vision
-from viam.utils import from_dm_from_extra
-
-
-class ColorFilterCam(
- Camera,
- Reconfigurable
- ):
-
- """A ColorFilterCam wraps the underlying camera
- `actual_cam` and only keeps the data captured on the
- actual camera if `vision_service` detects a certain
- color in the captured image.
- """
- MODEL: ClassVar[Model] = Model(
- ModelFamily("example", "camera"),
- "colorfilter")
-
- def __init__(self, name: str):
- super().__init__(name)
-
- @classmethod
- def new_cam(
- cls,
- config: ComponentConfig,
- dependencies: Mapping[ResourceName, ResourceBase]
- ) -> Self:
- cam = cls(config.name)
- cam.reconfigure(config, dependencies)
- return cam
-
- @classmethod
- def validate_config(
- cls,
- config: ComponentConfig
- ) -> Sequence[str]:
- """Validates JSON configuration"""
- actual_cam = config.attributes.fields["actual_cam"].string_value
- if actual_cam == "":
- raise Exception(
- "actual_cam attribute is required for a ColorFilterCam component"
- )
- vision_service = config.attributes.fields[
- "vision_service"
- ].string_value
- if vision_service == "":
- raise Exception(
- """
- vision_service attribute
- is required for a
- ColorFilterCam component
- """
- )
- return [actual_cam, vision_service]
-
- def reconfigure(
- self,
- config: ComponentConfig,
- dependencies: Mapping[ResourceName, ResourceBase]
- ):
- """Handles attribute reconfiguration"""
- actual_cam_name = config.attributes.fields[
- "actual_cam"
- ].string_value
- actual_cam = dependencies[
- Camera.get_resource_name(actual_cam_name)
- ]
- self.actual_cam = cast(Camera, actual_cam)
-
- vision_service_name = config.attributes.fields[
- "vision_service"
- ].string_value
- vision_service = dependencies[
- Vision.get_resource_name(
- vision_service_name
- )
- ]
- self.vision_service = cast(
- Vision,
- vision_service
- )
-
- async def get_properties(
- self,
- *,
- timeout: Optional[float] = None,
- **kwargs
- ) -> Camera.Properties:
- """Returns details about the camera"""
- return await self.actual_cam.get_properties()
-
- async def get_image(
- self,
- mime_type: str = "",
- *,
- extra: Optional[Dict[str, Any]] = None,
- timeout: Optional[float] = None,
- **kwargs
- ) -> Image.Image:
- """Filters the output of the underlying camera"""
- img = await self.actual_cam.get_image()
- if from_dm_from_extra(extra):
- detections = await self.vision_service.get_detections(img)
- if len(detections) == 0:
- raise NoCaptureToStoreError()
-
- return img
-
- async def get_images(
- self,
- *,
- timeout: Optional[float] = None,
- **kwargs
- ) -> Tuple[
- List[NamedImage],
- ResponseMetadata
- ]:
- raise NotImplementedError
-
- async def get_point_cloud(
- self,
- *,
- extra: Optional[Dict[str, Any]] = None,
- timeout: Optional[float] = None,
- **kwargs
- ) -> Tuple[
- bytes,
- str
- ]:
- raise NotImplementedError
-
- async def get_geometries(self) -> List[Geometry]:
- raise NotImplementedError
-```
-
-In this code:
-
-- The Python SDK simplifies the verification process by exposing the utility function `from_dm_from_extra`, to see if the caller is the data management service for you.
-
-- If the boolean is `true`, the function will call the vision service to get detections and return the image if the color is detected.
- Otherwise, it raises `NoCaptureToStoreError()`.
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-color_filter.go implements "colorfilter", a custom model of the [camera component API](/components/camera/#api).
-
-
- Click to view sample code from color_filter.go
-
-```go {class="line-numbers linkable-line-numbers"}
-// Package colorfilter implements a modular camera that filters the output of an underlying camera and only keeps
-// captured data if the vision service detects a certain color in the captured image.
-package colorfilter
-
-import (
-"context"
-"fmt"
-"image"
-
- "go.viam.com/rdk/logging"
- "github.com/pkg/errors"
- "github.com/viamrobotics/gostream"
-
- "go.viam.com/rdk/components/camera"
- "go.viam.com/rdk/data"
- "go.viam.com/rdk/pointcloud"
- "go.viam.com/rdk/resource"
- "go.viam.com/rdk/rimage/transform"
- "go.viam.com/rdk/services/vision"
-
-)
-
-var (
- // Model is the full model definition.
- Model = resource.NewModel("example", "camera", "colorfilter")
- errUnimplemented = errors.New("unimplemented")
-)
-
-func init() {
- resource.RegisterComponent(camera.API, Model, resource.Registration[camera.Camera, *Config]{
- Constructor: newCamera,
- })
-}
-
-func newCamera(ctx context.Context, deps resource.Dependencies, conf resource.Config, logger logging.Logger) (camera.Camera, error) {
- c := &colorFilterCam{
- Named: conf.ResourceName().AsNamed(),
- logger: logger,
- }
- if err := c.Reconfigure(ctx, deps, conf); err != nil {
- return nil, err
- }
- return c, nil
-}
-
-// Config contains the name to the underlying camera and the name of the vision service to be used.
-type Config struct {
- ActualCam string `json:"actual_cam"`
- VisionService string `json:"vision_service"`
-}
-
-// Validate validates the config and returns implicit dependencies.
-func (cfg \*Config) Validate(path string) ([]string, error) {
- if cfg.ActualCam == "" {
- return nil, fmt.Errorf(`expected "actual_cam" attribute in %q`, path)
- }
- if cfg.VisionService == "" {
- return nil, fmt.Errorf(`expected "vision_service" attribute in %q`, path)
- }
-
- return []string{cfg.ActualCam, cfg.VisionService}, nil
-
-}
-
-// A colorFilterCam wraps the underlying camera `actualCam` and only keeps the data captured on the actual camera if `visionService`
-// detects a certain color in the captured image.
-type colorFilterCam struct {
- resource.Named
- actualCam camera.Camera
- visionService vision.Service
- logger loggingg.Logger
-}
-
-// Reconfigure reconfigures the modular component with new settings.
-func (c *colorFilterCam) Reconfigure(ctx context.Context, deps resource.Dependencies, conf resource.Config) error {
- camConfig, err := resource.NativeConfig[*Config](conf)
- if err != nil {
- return err
- }
-
- c.actualCam, err = camera.FromDependencies(deps, camConfig.ActualCam)
- if err != nil {
- return errors.Wrapf(err, "unable to get camera %v for colorfilter", camConfig.ActualCam)
- }
-
- c.visionService, err = vision.FromDependencies(deps, camConfig.VisionService)
- if err != nil {
- return errors.Wrapf(err, "unable to get vision service %v for colorfilter", camConfig.VisionService)
- }
-
- return nil
-
-}
-
-// DoCommand simply echoes whatever was sent.
-func (c \*colorFilterCam) DoCommand(ctx context.Context, cmd map[string]interface{}) (map[string]interface{}, error) {
- return cmd, nil
-}
-
-// Close closes the underlying camera.
-func (c \*colorFilterCam) Close(ctx context.Context) error {
- return c.actualCam.Close(ctx)
-}
-
-// Images does nothing.
-func (c \*colorFilterCam) Images(ctx context.Context) ([]camera.NamedImage, resource.ResponseMetadata, error) {
- return nil, resource.ResponseMetadata{}, errUnimplemented
-}
-
-// Stream returns a stream that filters the output of the underlying camera stream in the stream.Next method.
-func (c \*colorFilterCam) Stream(ctx context.Context, errHandlers ...gostream.ErrorHandler) (gostream.VideoStream, error) {
-camStream, err := c.actualCam.Stream(ctx, errHandlers...)
- if err != nil {
- return nil, err
- }
-
- return filterStream{camStream, c.visionService}, nil
-
-}
-
-// NextPointCloud does nothing.
-func (c \*colorFilterCam)NextPointCloud(ctx context.Context)(pointcloud.PointCloud, error)
-{
- return nil, errUnimplemented
-}
-
-// Properties returns details about the camera.
-func (c \*colorFilterCam) Properties(ctx context.Context)(camera.Properties, error)
-{
- return c.actualCam.Properties(ctx)
-}
-
-// Projector does nothing.
-func (c \*colorFilterCam) Projector(ctx context.Context) (transform.Projector, error) {
- return nil, errUnimplemented
-}
-
-type filterStream struct {
- cameraStream gostream.VideoStream
- visionService vision.Service
-}
-
-// Next contains the filtering logic and returns select data from the underlying camera.
-func (fs filterStream) Next(ctx context.Context) (image.Image, func(), error) {
- if ctx.Value(data.FromDMContextKey{}) != true {
- // If not data management collector, return underlying stream contents without filtering.
- return fs.cameraStream.Next(ctx)
- }
-
- // Only return captured image if it contains a certain color set by the vision service.
- img, release, err := fs.cameraStream.Next(ctx)
- if err != nil {
- return nil, nil, errors.New("could not get next source image")
- }
- detections, err := fs.visionService.Detections(ctx, img, map[string]interface{}{})
- if err != nil {
- return nil, nil, errors.New("could not get detections")
- }
-
- if len(detections) == 0 {
- return nil, nil, data.ErrNoCaptureToStore
- }
-
- return img, release, err
-}
-
-// Close closes the stream.
-func (fs filterStream) Close(ctx context.Context) error {
- return fs.cameraStream.Close(ctx)
-}
-```
-
-In this code:
-
-- A modular camera coded in Go looks for a flag called `fromDM` in the context (`ctx`) using `ctx.Value(data.FromDMContextKey{})` to figure out if the data management service is the caller.
-
-- If the boolean is `true`, the function will call the vision service to get detections and return the image if the color is .
- Otherwise, it will raise the [`ErrNoCaptureToStore`](https://github.com/viamrobotics/rdk/blob/214879e147970a454f78035e938ea853fcd79f17/data/collector.go#L44) error.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-For more information, see [Write your new resource model definition](/how-tos/create-module/#write-your-new-resource-model-definition).
-
-#### Code an entry point file
-
-Next, code your module entry point file which `viam-server` will use to initialize and start the filter module.
-
-To code an entry point file yourself, locate the subtype API as defined in the relevant `/.go` file in the [RDK source code](https://github.com/viamrobotics/rdk).
-
-- In this example, the camera's API is defined in the [camera.go](https://github.com/viamrobotics/rdk/blob/main/components/camera/camera.go) file in the RDK source code.
- When developing your main.go or main.py file, reference this file.
-
-{{< tabs >}}
-{{% tab name="Python"%}}
-
-Follow these steps to code your entry point file:
-
-1. Inside of your filter module's directory, create a new file named main.py.
- This will be the entry point file for the module.
-1. Add the code below which initializes and starts the filter module.
-
-```python {class="line-numbers linkable-line-numbers"}
-import asyncio
-from viam.components.camera import Camera
-from viam.module.module import Module
-from viam.resource.registry import Registry, ResourceCreatorRegistration
-import color_filter
-
-
-async def main():
-
- """
- This function creates and starts a new module,
- after adding all desired resource models.
- Resource creators must be
- registered to the resource
- registry before the module adds the resource model.
- """
- Registry.register_resource_creator(
- Camera.SUBTYPE,
- color_filter.ColorFilterCam.MODEL,
- ResourceCreatorRegistration(
- color_filter.ColorFilterCam.new_cam,
- color_filter.ColorFilterCam.validate_config
- )
- )
- module = Module.from_args()
- module.add_model_from_registry(
- Camera.SUBTYPE,
- color_filter.ColorFilterCam.MODEL
- )
- await module.start()
-
-if __name__ == "__main__":
- asyncio.run(main())
-```
-
-{{% /tab %}}
-{{% tab name="Go"%}}
-
-Follow these steps to code your entry point file:
-
-1. Open the folder named `module` inside of your filter module's directory and create a new file named main.go.
- This will be the entry point file for the module.
-1. Add the code below which initializes and starts the filter module.
-
-```go {class="line-numbers linkable-line-numbers"}
-// Package main is a module which serves the colorfilter custom module.
-package main
-
-import (
- "context"
-
- "go.viam.com/rdk/logging"
- "go.viam.com/rdk/utils"
-
- "github.com/viam-labs/modular-filter-examples/colorfilter"
- "go.viam.com/rdk/components/camera"
- "go.viam.com/rdk/module"
-)
-
-func main() {
- utils.ContextualMain(mainWithArgs, module.NewLoggerFromArgs("colorfilter_module"))
-}
-
-func mainWithArgs(ctx context.Context, args []string, logger logging.Logger) (err error) {
- myMod, err := module.NewModuleFromArgs(ctx, logger)
- if err != nil {
- return err
- }
-
- err = myMod.AddModelFromRegistry(ctx, camera.API, colorfilter.Model)
- if err != nil {
- return err
- }
-
- err = myMod.Start(ctx)
- defer myMod.Close(ctx)
- if err != nil {
- return err
- }
- <-ctx.Done()
- return nil
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-For more information, see [Create a new module](/how-tos/create-module/).
-
-Once you've written your filter module, [compile your module into a single executable](/how-tos/create-module/#compile-or-package-your-module) that runs your module when executed.
-
-Note the absolute path to your module’s executable for use in the next section.
-
-### Add as a local module
-
-Whether you've downloaded the `colorfilter` module, or written your own color filtering module, the next step is to add the module to your smart machine as a local module:
-
-1. Navigate to the **CONFIGURE** tab of your machine's page in the [Viam app](https://app.viam.com/robots).
-1. Click the **+** (Create) button next to your main part in the left-hand menu and select **Local module**, then **Local module**.
-
-1. Enter a name or use the suggested name for your local module, enter the [module's executable path](/how-tos/create-module/#compile-or-package-your-module), then click **Create**.
- - The name must use only lowercase characters.
-1. Then, click the **Save** button in the top right corner of the page to save your changes.
-
-![A color filter module that has been added.](/tutorials/pet-photographer/add-colorfilter-module.png)
-
-## Add services
-
-Next, add the following services to your smart machine to support the color filter module:
-
-- The [data management service](/services/data/) enables your smart machine to capture data and sync it to the cloud.
-- The [vision service](/services/vision/#detections) enables your smart machine to perform color detection on objects in a camera stream.
-
-### Add the data management service
-
-To enable data capture on your machine, add and configure the [data management service](/services/data/) to capture and store data on your machine's computer:
-
-{{< tabs >}}
-{{% tab name="Config Builder" %}}
-
-1. On the **CONFIGURE** tab, click the **+** icon next to your machine part in the left-hand menu and select **Service**.
-1. Choose `data management` as the type.
-1. Enter a name or use the suggested name for your instance of the data manager.
- This tutorial uses the name 'dm' in all example code.
-1. Click **Create**.
- On the panel that appears, you can manage the capturing and syncing functions individually.
- By default, the data management service captures data every 0.1 minutes to the ~/.viam/capture directory.
- Leave the default settings as they are.
-1. Click **Save** in the top right corner of the screen to save your changes.
-
- ![An instance of the data management service named "dm". The cloud sync and capturing options are toggled on and the directory is empty. The interval is set to 0.1](/tutorials/pet-photographer/data-management-services.png)
-
- For more detailed information, see [Add the data management service](/services/data/#configuration).
- {{% /tab %}}
- {{% tab name="JSON Template" %}}
- Add the data management service to the services array in your rover’s raw JSON configuration:
-
-```json {class="line-numbers linkable-line-numbers"}
-{
- "name": "dm",
- "type": "data_manager",
- "namespace": "rdk",
- "attributes": {
- "sync_interval_mins": 0.1,
- "capture_dir": "",
- "tags": [],
- "additional_sync_paths": []
- }
-}
-```
-
-{{% /tab %}}
-{{< /tabs >}}
-
-### Add the vision service
-
-To enable your smart machine to detect a specific color in its camera stream, add a [`color_detector` vision service](/services/vision/color_detector/).
-For this tutorial, we will configure the vision service to recognize a blue dog collar using `#43A1D0` or `rgb(67, 161, 208)`.
-If you have a different item you want to use, or want to match to a color that matches your pet closely, you can use a different color.
-
-{{< tabs >}}
-{{% tab name="Config Builder" %}}
-
-1. Navigate to the **CONFIGURE** tab of your machine's page in the [Viam app](https://app.viam.com).
-1. Click the **+** icon next to your machine part in the left-hand menu and select **Service**.
-1. Select the `vision` type, then select the `color detector` model.
-1. Enter a name or use the suggested name for your color detector.
- This tutorial uses the name 'my_color_detector' in all example code.
-1. click **Create**.
-1. In the vision service's **Attributes** section, click the color selection box to set the color to be detected.
- For this tutorial, set the color to `#43A1D0` or `rgb(67, 161, 208)`.
- Alternatively, you can provide the color of your pet, or use a different brightly-colored collar or ribbon.
-1. Set **Hue Tolerance** to `0.06` and **Segment size px** to `100`.
-1. Click the **Save** button in the top right corner of the page.
-
-Your configuration should look like the following:
-
-![The vision service configuration panel showing the color set to blue, the hue tolerance set to 0.06, and the segment size set to 100.](/tutorials/pet-photographer/vision-service.png)
-
-For more detailed information, refer to [Configure a color detector](/services/vision/color_detector/).
-
-{{% /tab %}}
-{{% tab name="JSON Template" %}}
-
-Add the vision service object to the services array in your rover’s raw JSON configuration:
-
-```json {class="line-numbers linkable-line-numbers"}
-{
- "name": "my_color_detector",
- "type": "vision",
- "model": "color_detector",
- "attributes": {
- "segment_size_px": 100,
- "detect_color": "#43a1d0",
- "hue_tolerance_pct": 0.06
- }
-}
-```
-
-Click the **Save** button in the top right corner of the page when done.
-
-{{% /tab %}}
-{{< /tabs >}}
-
-## Enable filtering by color
-
-With the vision and data management services configured, you can now configure your camera to filter by color and sync photos to Viam's cloud.
-
-### Configure your camera
-
-If you haven't already, add a [camera](/components/camera/) component to your smart machine:
-
-1. On the **CONFIGURE** tab, click the **+** (Create) button next to your main part in the left-hand menu and select **Component**.
- Start typing "webcam" and select **camera / webcam**.
- Enter a name or use the suggested name for your camera.
- This tutorial uses the name 'cam' in all example code.
- Click **Create**.
-
-1. Click the **video path** dropdown and select the webcam you'd like to use for this project from the list of suggestions.
-
-1. Click **Save** in the top right corner of the screen to save your changes.
-
-![An instance of the webcam component named 'cam'](/tutorials/pet-photographer/webcam-component.png)
-
-### Add the color filter component
-
-1. Click the **+** icon next to your machine part in the left-hand menu and select **Local module**, then **Local component**.
-1. On the **Create** menu:
-
- 1. Select the `camera` type from the dropdown menu.
- 1. Enter `example:camera:colorfilter`, the {{< glossary_tooltip term_id="model-namespace-triplet" text="model namespace triplet">}} of your modular resource's {{< glossary_tooltip term_id="model" text="model" >}}.
- 1. Provide a name for this instance of your modular resource.
- This name must be different from the module name.
-
- {{}}
-
-1. Click **Create** to create the modular resource.
-1. In the resulting module configuration pane, copy the following JSON configuration into the attributes field:
-
- ```json {class="line-numbers linkable-line-numbers"}
- {
- "vision_service": "my_color_detector",
- "actual_cam": "cam"
- }
- ```
-
- ![A component panel for a color filter modular resource with the attributes filled out for vision service and actual_cam](/tutorials/pet-photographer/colorfiltercam-component-attributes.png)
-
-### Configure data capture
-
-To add data capture for the color filter camera:
-
-1. Click **Add method** in the **Data capture** section of your color filter camera component.
-
-2. Toggle the **Method** dropdown menu, select **ReadImage**, and set the **Frequency** of the capture to `0.1`, which will configure the data management service to capture images from your camera once every 10 seconds.
-
-3. Click the **MIME type** dropdown and select `image/jpeg`.
-
-4. Click **Save** in the top right corner of the screen.
-
-![A component panel for a color filter modular resource with the attributes filled out for vision service and actual_cam as well as the data capture configuration capture set to capture ReadImage at a frequency of 0.1 images per second.](/tutorials/pet-photographer/colorfiltercam-component.png)
-
-## Test your color filter camera
-
-To test that your color filter camera is capturing and filtering images properly, navigate to the **CONTROL** tab on your machine's page.
-
-On the **colorfiltercam** panel, toggle **View colorfiltercam** to view your camera's live feed.
-Test the filter by positioning your smart machine so that it captures an image of your pet wearing its collar.
-Then examine the **DATA** tab to confirm that only pictures containing your pet wearing their collar are stored.
-
-For example, the following is the result of several dozen pictures of the same dog, but only those pictures where he is wearing the blue collar were captured and synced to the cloud:
-
-![Filtered data tab contents from the colorfiltercam component showing only photos of a dog with a blue collar](/tutorials/pet-photographer/data-capture.png)
-
-## Next steps
-
-Your pet photographer is now set up.
-Place it in an area your pet frequently visits and don't forget to attach the colored object to your pet.
-
-Now you can follow similar steps and customize the code you've written to configure a sensor for detecting specific thresholds or filter out blurry images from your camera's captures.
-
-Try these other tutorials for more on working with the data management and vision services:
-
-{{< cards >}}
-{{% card link="/how-tos/detect-color/" %}}
-{{% card link="/tutorials/projects/pet-treat-dispenser/" %}}
-{{% card link="/tutorials/projects/guardian/" %}}
-{{% card link="/tutorials/projects/send-security-photo/" %}}
-{{% card link="/how-tos/train-deploy-ml/" %}}
-{{< /cards >}}
diff --git a/docs/dev/tools/tutorials/control/_index.md b/docs/dev/tools/tutorials/control/_index.md
deleted file mode 100644
index 046a907554..0000000000
--- a/docs/dev/tools/tutorials/control/_index.md
+++ /dev/null
@@ -1,10 +0,0 @@
----
-title: "Control Tutorials"
-linkTitle: "Control"
-childTitleEndOverwrite: "Tutorial"
-weight: 30
-type: docs
-empty_node: true
-layout: "empty"
-canonical: "tutorials/"
----
diff --git a/docs/dev/tools/tutorials/control/air-quality-fleet.md b/docs/dev/tools/tutorials/control/air-quality-fleet.md
deleted file mode 100644
index 60fecdbce9..0000000000
--- a/docs/dev/tools/tutorials/control/air-quality-fleet.md
+++ /dev/null
@@ -1,823 +0,0 @@
----
-title: "Monitor Air Quality with a Fleet of Sensors"
-linkTitle: "Air Quality Fleet"
-type: "docs"
-description: "Configure a fleet of machines to capture air quality sensor data across different locations."
-images: ["/tutorials/air-quality-fleet/three-sensor-dash-wide.png"]
-imageAlt: "A web dashboard showing PM2.5 readings from two air quality sensors."
-tags: ["tutorial"]
-authors: ["Jessamy Taylor"]
-languages: ["typescript"]
-viamresources: ["sensor", "data_manager"]
-platformarea: ["data", "fleet"]
-emailform: true
-level: "Intermediate"
-date: "2024-05-07"
-# updated: "" # When the tutorial was last entirely checked
-cost: 200
-# Learning goals:
-# 1. The reader can distinguish the concepts of organizations and locations and can select the appropriate setup when creating their own projects for their business.
-# 2. The reader can identify when to use fragments and evaluate when it is worth using fragments.
-# The reader can create their own fragments for their projects and knows what to include and exclude from them.
-# 3. The reader recognizes how permissions enable the management of data for a business across multiple customers while providing each customer access to their own data.
----
-
-In this tutorial you will use a fleet of devices to collect air quality data from different places and display the most recent readings from each device in a custom viewing dashboard.
-
-{{< alert title="Learning Goals" color="info" >}}
-
-By completing this project, you will learn to:
-
-- Configure a fleet of identical machines
-- Organize your fleet using {{< glossary_tooltip term_id="location" text="locations" >}}
-- Collect and sync data from multiple machines
-- Use the Viam TypeScript SDK to query sensor data and create a custom dashboard
-- Use API keys to provide access to different groups of machines
-
-{{< /alert >}}
-
-![Air quality dashboard in a web browser with PM2.5 readings from three different sensor machines displayed.](/tutorials/air-quality-fleet/three-sensor-dash-wide.png)
-
-## Requirements
-
-You can complete this tutorial using any number of air quality sensing machines.
-
-For each machine, you will need the following hardware:
-
-- [SDS011 Nova PM sensor](https://www.amazon.com/SDS011-Quality-Detection-Conditioning-Monitor/dp/B07FSDMRR5)
- - If you choose to use a different air quality sensor, you may need to [create your own module](/how-tos/create-module/) implementing the [sensor API](/components/sensor/#api) for your specific hardware.
-- A single-board computer (SBC) [capable of running `viam-server`](https://docs.viam.com/installation/)
-- An appropriate power supply
-
-Make sure all of your sensors are wired to your SBC before starting this tutorial.
-
-## Decide how you will organize your fleet
-
-Before you start connecting your devices to the Viam app, you'll need to decide how you want to group your devices.
-
-In the Viam app, {{< glossary_tooltip term_id="machine" text="machines" >}} are grouped into _locations_, and locations are grouped into _organizations_:
-
-- Each location can represent either a physical location or some other conceptual grouping.
-- An organization is the highest level grouping, and often contains all the locations (and machines) of an entire company.
-
-These groupings allow you to manage permissions; you can grant a user access to an individual machine, to all the machines in a location, or to everything in an entire organization.
-You choose how to group your machines.
-
-{{}}
-
-
-
-For more information, see [Fleet Management](/how-tos/manage-fleet/#organize-your-machines).
-
-### Example
-
-Imagine you create an air quality monitoring company called Pollution Monitoring Made Simple.
-Anyone can sign up and order one of your sensing machines.
-When a new customer signs up, you assemble a new machine with a sensor, SBC, and power supply.
-
-Before shipping the sensor machine to your new client, you connect the machine to the Viam app and configure it.
-To manage all your company's air quality sensing machines together, you create one organization called Pollution Monitoring Made Simple.
-Inside that organization, you create a location for each customer.
-You have some individual customers, for example Antonia, who have a sensor machine in their home, or perhaps one inside and one outside.
-You have other customers who are businesses, for example RobotsRUs, who have two offices, one in New York and one in Oregon, with multiple sensor machines in each.
-RobotsRUs wants to separate their sensor data by physical location, so you create a location for RobotsRUs and then create sub-locations to group their New York sensor machines and their Oregon machines.
-
-When you grant Antonia access to her location, she will be able to view data from the air sensors at her home.
-When you grant RobotsRUs access to their location, they will be able to view data from all of their sub-locations, or they can choose to spin up a dashboard showing data from only one sub-location at a time.
-You, as the organization owner, will be able to manage any necessary configuration changes for all air sensing machines in all locations created within the Pollution Monitoring Made Simple organization.
-
-{{}}
-
-### Organize your fleet
-
-For this tutorial, we will walk through how to set up your fleet based on the example above.
-You can choose to manage your fleet of machines differently based on what makes sense for your use case; if you're only configuring one or two sensors for personal use, feel free to add all your machines to one location and skip to the [next section](#connect-your-machines-to-the-viam-app).
-
-1. Navigate to the [Viam app](https://app.viam.com) in a web browser.
- Create an account and log in.
-1. Click the dropdown in the upper-right corner of the **FLEET** page and use the **+** button to create a new organization for your air quality machine company.
- Name the organization and click **Create**.
-1. Click **FLEET** in the upper-left corner of the page and click **LOCATIONS**.
- A new location called `First Location` is automatically generated for you.
- Rename it so you can use it for Antonia's machines:
-
- Use the **...** menu next to edit the location name to `Antonia's Home`, then click **Save**.
-
-1. Now, create a separate location for RobotsRUs:
-
- On the left side of the **LOCATIONS** page, click the **Add location** button.
- Type in `RobotsRUs` and click **Add**.
-
-1. Add sub-locations to the RobotsRUs location to group the machines at each of their offices:
-
- Add a new location called `Oregon Office` using the same **Add location** button.
- Then, find the **New parent location** dropdown on the Oregon Office page.
- Select **RobotsRUs** and click **Change**.
-
- Repeat to add the New York office: Add a new location called `New York Office`, then change its parent location to **RobotsRUs**.
-
- {{}}
-
- In the next section, you'll add machines to the locations.
-
-## Connect your machines to the Viam app
-
-With your organizational structure in place, let's add some machines:
-
-1. Connect your first single-board computer to power.
- For this tutorial, we'll treat this as the machine for our first customer, Antonia.
- If the computer does not already have a Viam-compatible operating system installed, follow the [Platform Requirements section of the Installation Guide](/installation/viam-server-setup/#platform-requirements) to install a compatible operating system.
- You _do not_ need to follow the "Install `viam-server`" section; you will do that in the next step!
-
-1. Enable serial communication so that the SBC can communicate with the air quality sensor.
- For example, if you are using a Raspberry Pi, SSH to it and [enable serial communication in `raspi-config`](/installation/prepare/rpi-setup/#enable-communication-protocols).
-
-1. Click **Antonia's Home** in the left navigation menu to navigate to that location's page.
- In the **New machine** field near the top-right corner of the screen, type in a name for the machine, such as `Home Air Quality Sensor`, and click **Add machine**.
-
-1. You'll be taken to the machine details page and prompted to set up your machine part.
- Click **View setup instructions**.
- You can find these instructions later if you need them by clicking the part status indicator (which currently reads **Awaiting setup**).
-
-1. Follow the **Set up your machine part** instructions to install `viam-server` on the machine and connect it to the Viam app.
- `viam-server` is the binary that runs on the single-board computer (SBC), providing functionality including sensor data collection and connection to the Viam app.
-
- The setup page will indicate when the machine is successfully connected.
-
-1. If Antonia has more than one air sensing machine, add a new machine to her location and set it up in the same way.
-
-This is how you set up one machine.
-If you are following along for the RobotsRUs business from our example, create additional machines in each sub-location, that is, in the `Oregon Office` location and in the `New York Office` location.
-
-## Set up your hardware
-
-{{% alert title="Note" color="note" %}}
-If this were a real company and you were shipping air sensing machines to customers, you would have the customer plug in power to the machine wherever they are setting it up.
-Since you already installed `viam-server`, once a customer connects the machine to power and sets up wifi, the machine will automatically re-connect to the Viam app and pull any configuration updates.
-{{% /alert %}}
-
-For each sensing machine:
-
-1. Connect the PM sensor to a USB port on the machine's SBC.
-
-1. Position your sensing machines in strategic locations, and connect them to power.
- Here are some ideas for where to place sensing machines:
-
- - At home:
- - In an outdoor location protected from weather, such as under the eaves of your home
- - In the kitchen, where cooking can produce pollutants
- - Anywhere you spend lots of time indoors and want to measure exposure to pollutants
- - At work:
- - At your desk to check your exposure throughout the day
- - Near a door or window to see whether pollutants are leaking in
-
-## Configure your air quality sensors
-
-You need to [configure](/configure/) your hardware so that each of your machines can communicate with its attached air quality [sensor](/components/sensor/).
-
-No matter how many sensing machines you use, you can configure them efficiently by using a reusable configuration block called a _{{< glossary_tooltip term_id="fragment" text="fragment" >}}_.
-Fragments are a way to share and manage identical machine configurations across multiple machines.
-Instead of going through all the configuration steps for each machine, you'll start by configuring just one machine and create a fragment based on that machine's configuration.
-Then, you'll add the fragment to each of your machines.
-With all your machines configured using the same fragment, if you need to update the config in the future, you can just update the fragment and all machines will automatically get the update.
-
-{{< alert title="Note" color="note" >}}
-If this was a real company, adding the fragment to each individual machine would quickly become tiring.
-We're showing you how to do this manually as a learning device.
-Once you understand how to configure machines and use fragments, you can use [Provisioning](/fleet/provision/) to automatically set up your devices.
-{{< /alert >}}
-
-### Configure your first machine
-
-#### Configure the sensor
-
-1. Navigate to the **CONFIGURE** tab of the machine details page in the [Viam app](https://app.viam.com) for your first machine.
-2. Click the **+** (Create) button and click **Component** from the dropdown.
- Click **sensor**, then search for `sds011` and click **sds001:v1** from the results.
-3. Click **Add module**.
- This adds the {{< glossary_tooltip term_id="module" text="module" >}} that provides the sensor model that supports the specific hardware we are using for this tutorial.
-
- ![The Add Module button that appears after you click the model name.](/tutorials/air-quality-fleet/add-sensor-module.png)
-
-4. Give the sensor a name like `PM_sensor` and click **Create**.
-5. In the newly created **PM_sensor** card, replace the contents of the attributes box (the empty curly braces `{}`) with the following:
-
- ```json {class="line-numbers linkable-line-numbers"}
- {
- "usb_interface": ""
- }
- ```
-
-6. Now you need to figure out which port your sensor is connected to on your board.
- SSH to your board and run the following command:
-
- ```sh{class="command-line" data-prompt="$"}
- ls /dev/serial/by-id
- ```
-
- This should output a list of one or more USB devices attached to your board, for example `usb-1a86_USB_Serial-if00-port0`.
- If the air quality sensor is the only device plugged into your board, you can be confident that the only device listed is the correct one.
- If you have multiple devices plugged into different USB ports, you may need to choose one path and test it, or unplug something, to figure out which path to use.
-
- Now that you have found the identifier, put the full path to the device into your config, for example:
-
- ```json {class="line-numbers linkable-line-numbers"}
- {
- "usb_interface": "/dev/serial/by-id/usb-1a86_USB_Serial-if00-port0"
- }
- ```
-
-7. Save the config.
- Your machine config should now resemble the following:
-
- ![Configure tab showing PM sensor and the sensor module configured.](/tutorials/air-quality-fleet/configured-sensor.png)
-
-#### Configure data capture and sync
-
-You have configured the sensor so the board can communicate with it, but sensor data is not yet being saved anywhere.
-Viam's [data management service](/services/data/) lets you capture data locally from each sensor and then sync it to the cloud where you can access historical sensor data and see trends over time.
-Once you configure the rest of your sensing machines, you'll be able to remotely access data from all sensors in all locations, and when you're ready, you can give customers [access](/cloud/rbac/) to the data from the sensors in their locations.
-
-Configure data capture and sync as follows:
-
-1. Click the **+** (Create) button and click **Service** from the dropdown.
-2. Click **data management**.
-3. Give your data manager a name such as the auto-populated name `data_manager-1` and click **Create**.
-4. Toggle **Syncing** to the on position.
- Set the sync interval to `0.05` minutes so that data syncs to the cloud every 3 seconds.
- You can change the interval if you like, just don't make it too long or you will have to wait a long time before you see your data!
-5. Let's add a tag to all your data so that you can query data from all your air quality sensors more easily in later steps.
- In the **Tags** field, type `air-quality` and click **+ Tag: air-quality** when it appears to create a new tag.
- This tag will now automatically be applied to all data collected by this data manager.
-6. Now the data management service is available to any components on your machine, and you can set up data capture on the sensor:
-7. On your **PM_sensor** card, click **Add method**.
-8. From the **Type** dropdown, select **Readings**.
-9. Set the **Frequency** to `0.1` readings per second.
- This will capture air quality data once every ten seconds.
- It is useful to capture data frequently for testing purposes, but you can always change this frequency later since you probably don't need to capture data this frequently all day forever.
-10. Save the config.
-
-### Create a fragment
-
-{{% alert title="Note" color="note" %}}
-If you are only using one air quality sensing machine for this tutorial, you do not need to create or use fragments, since fragments are useful only when configuring multiple machines.
-You can skip to [Test your sensors](#test-your-sensors).
-{{% /alert %}}
-
-While you configured your machine with the builder UI, the Viam app generated a JSON configuration file with all your parameters.
-This is the file that tells `viam-server` what resources are available to it and how everything is connected.
-Click **JSON** in the upper-left corner of the **CONFIGURE** tab to view the generated JSON file.
-You can manually edit this file instead of using the builder UI if you are familiar with JSON.
-
-In any case, now that the JSON is generated, you are ready to create a {{< glossary_tooltip term_id="fragment" text="fragment" >}}:
-
-1. Select and copy the entire contents of the JSON config.
-2. Navigate to the **FLEET** page and go to the [**FRAGMENTS** tab](https://app.viam.com/fragments).
-3. Click **Create fragment** and change your fragment's name by clicking on it. We used the name `air-sensing-machine`.
-4. Replace the empty curly braces `{}` with the config you copied from your machine.
-5. Click **Save**.
-6. Now, you can actually delete the entire config from your machine!
- In the next section, you will replace it with the fragment you just created so that it gets updated alongside all your other machines when you update the fragment in the future.
-
- Navigate back to your machine's **CONFIGURE** tab, select **JSON** mode, and delete the entire contents of the config.
- When you try to save, you'll get an invalid JSON error because it can't be empty.
- Put in a set of curly braces `{}` and then save the config successfully.
-
-### Add the fragment to all your machines
-
-Add the fragment you just created to each of your machines including the first one:
-
-1. Click the **+** button, then click **Insert fragment** in the dropdown menu.
-2. Search for and click the name of your fragment, for example `air-sensing-machine`.
-
- ![The insert fragment UI.](/tutorials/air-quality-fleet/add-fragment.png)
-
-3. Click **Insert fragment**.
- The module, sensor, and data manager will appear in your config.
-4. Save the config.
-5. Repeat these steps on the machine details page for each of your air quality sensing machines.
-
-## Test your sensors
-
-Now that all your hardware is configured, it's a good idea to make sure readings are being gathered by the sensors and sent to the cloud before proceeding with the tutorial.
-For each machine:
-
-1. Go to the machine details page in the [Viam app](https://app.viam.com.) and navigate to the **CONTROL** tab.
-2. Within the **Sensors** section, click **Get Readings** for the **PM_sensor**.
- If the sensor software and hardware is working, you should see values populate the **Readings** column.
-
- ![The sensor readings on the control tab.](/tutorials/air-quality-fleet/get-readings.png)
-
- If you do not see readings, check the **LOGS** tab for errors, double-check that serial communication is enabled on the single board computer, and check that the `usb_interface` path is correctly specified (click below).
-
- {{< expand "Click here for usb_interface troubleshooting help" >}}
-
-If you only have one USB device plugged into each of your boards, the `usb_interface` value you configured in the sensor config is likely (conveniently) the same for all of your machines.
-If not, you can use [fragment overwrite](/fleet/fragments/#modify-the-config-of-a-machine-that-uses-a-fragment) to modify the value on any machine for which it is different:
-
-1. If you're not getting sensor readings from a given machine, check the path of the USB port using the same [process by which you found the first USB path](#usb-path).
-2. If the path to your sensor on one machine is different from the one you configured in the fragment, add a fragment overwrite to the config of that machine to change the path without needing to remove the entire fragment.
- Follow the [instructions to add a fragment overwrite](/fleet/fragments/#modify-the-config-of-a-machine-that-uses-a-fragment) to your machine's config, using the following JSON template:
-
- ```json {class="line-numbers linkable-line-numbers"}
- "fragment_mods": [
- {
- "fragment_id": "",
- "mods": [
- {
- "$set": {
- "components.PM_sensor.attributes.usb_interface": ""
- }
- }
- ]
- }
- ],
- ```
-
- Replace the values with your fragment ID and with the USB path you identify.
- If you named your sensor something other than `PM_sensor`, change the sensor name in the template above.
-
-3. Repeat this process for each machine that needs a different `usb_interface` value.
- If you have lots of machines with one `usb_interface` value, and lots of machines with a second one, you might consider duplicating the fragment, editing that value, and using that second fragment instead of the first one for the applicable machines, rather than using a fragment overwrite for each of the machines.
- You have options.
-
- {{< /expand >}}
-
-## Test data sync
-
-Next, check that data is being synced from your sensors to the cloud:
-
-1. Open your [**DATA** page](https://app.viam.com/data).
-2. Click the **Sensors** tab within the data page.
-3. If you have sensor data coming from machines unrelated to this project, use the filters on the left side of the page to view data from only your air quality sensors.
- Click the **Tags** dropdown and select the `air-quality` tag you applied to your data.
- You can also use these filters to show the data from one of your air quality sensors at a time by typing a machine name into the **Machine name** box and clicking **Apply** in the lower-left corner.
-
- ![The sensor readings that have synced to the DATA page.](/tutorials/air-quality-fleet/synced-data.png)
-
-Once you've confirmed that data is being collected and synced correctly, you're ready to start building a dashboard to display the data.
-If you'd like to graph your data using a Grafana dashboard, try our [Visualize Data with Grafana tutorial](/tutorials/services/visualize-data-grafana/).
-If you'd like to create your own customizable dashboard using the Viam TypeScript, continue with this tutorial.
-
-## Code your custom TypeScript dashboard
-
-The [Viam TypeScript SDK](https://ts.viam.dev/) allows you to build custom web interfaces to interact with your machines.
-For this project, you'll use it to build a page that displays air quality sensor data for a given location.
-You'll host the website locally on your personal computer, and view the interface in a web browser on that computer.
-
-As you'll find out in the [authentication step](#authenticate-your-code-to-your-viam-app-location), you can set each customer up with credentials to access the data from only their location, or you can create a dashboard showing data from all sensors in your entire organization.
-
-![The air quality dashboard you'll build. This one has PM2.5 readings from two different sensor machines displayed, and a key with categories of air quality.](/tutorials/air-quality-fleet/two-sensors.png)
-
-### Set up your TypeScript project
-
-Complete the following steps on your laptop or desktop.
-You don't need to install or edit anything else on your machine's single-board computer (aside from `viam-server` which you already did); you'll be running the TypeScript code from your personal computer.
-
-1. Make sure you have the latest version of [Node.JS](https://nodejs.org/en) installed on your computer.
-1. Install the Viam TypeScript SDK by running the following command in your terminal:
-
- ```sh {class="command-line" data-prompt="$"}
- npm install --save @viamrobotics/sdk
- ```
-
-1. Create a directory on your laptop or desktop for your project.
- Name it aqi-dashboard.
-
-1. Create a file in your aqi-dashboard folder and name it package.json.
- The package.json file holds necessary metadata about your project.
- Paste the following contents into it:
-
- ```json {class="line-numbers linkable-line-numbers"}
- {
- "name": "air-quality-dashboard",
- "description": "A dashboard for visualizing data from air quality sensors.",
- "scripts": {
- "start": "esbuild ./main.ts --bundle --outfile=static/main.js --servedir=static --format=esm",
- "test": "echo \"Error: no test specified\" && exit 1"
- },
- "author": "",
- "license": "ISC",
- "devDependencies": {
- "esbuild": "*"
- },
- "dependencies": {
- "@viamrobotics/sdk": "^0.13.0",
- "bson": "^6.6.0"
- }
- }
- ```
-
-{{% alert title="Fun fact" color="info" %}}
-The `--format=esm` flag in the `"start"` script is important because the ECMAScript module format is necessary to support the BSON dependency this project uses for data query formatting.
-If you don't know what the proceeding sentence means, don't worry about it; just copy-paste the JSON above and it'll work.
-{{% /alert %}}
-
-### Authenticate your code to your Viam app location
-
-Your TypeScript code requires an API key to establish a connection to your machines.
-You can set up credentials to access data from all the sensor machines in your organization, or from just one location.
-These API keys only need [**Operator** permissions](/cloud/rbac/).
-
-In our example you could create a dashboard for Antonia with an API key to see the data from her location, and create a separate dashboard for RobotsRUs with a different API key to access the data from their location.
-If RobotsRUs wanted to separate their dashboards by sub-locations, you could set up API keys for RobotsRUs to access data for each of their sub-locations separately, or you could modify the example code to filter data by location name.
-
-You can then either deploy each dashboard on a web server you manage, or add a web server on one machine per customer that hosts the dashboard for the respective customer so that they can access their data on their local network.
-We leave this step to the reader.
-
-The following instructions describe how to set up an API key for one location.
-
-1. Create another file inside the aqi-dashboard folder and name it main.ts.
- Paste the following code into main.ts:
-
- ```typescript {class="line-numbers linkable-line-numbers"}
- // Air quality dashboard
-
- import * as VIAM from "@viamrobotics/sdk";
- import { BSON } from "bson";
-
- async function main() {
- const opts: VIAM.ViamClientOptions = {
- credentials: {
- // Replace "" (including brackets) with your machine's api key
- type: "api-key",
- payload: "",
- // Replace "" (including brackets) with your machine's api key id
- authEntity: "",
- },
- };
-
- const orgID: string = ""; // Replace
- const locationID: string = ""; // Replace
-
- //
-
- //
- }
-
- //
-
- main().catch((error) => {
- console.error("encountered an error:", error);
- });
- ```
-
-1. Now you need to get the API key and the {{< glossary_tooltip term_id="organization" text="organization" >}} and {{< glossary_tooltip term_id="location" text="location" >}} IDs to replace the placeholder strings in the code you just pasted.
-
- In the [Viam app](https://app.viam.com), navigate to the location page for the location containing your air quality machines.
-
- ![The location secret with a Copy button next to it.](/tutorials/air-quality-fleet/loc-secret-button.png)
-
- Copy the **Location ID** and paste it into your code in place of ``, so that the line resembles `const orgID: string = "abcde12345"`.
-
-1. Use the dropdown menu in the upper-right corner of the page to navigate to your organization settings page.
- Copy the **Organization ID** found under **Details** near the top of the page.
- Paste it in place of `` in your code.
-
-1. Under the **API Keys** heading, click **Generate Key**.
-
-1. Name your key something such as `air-sensors-key`.
-
-1. Select **Resource** and choose the location you have all your air quality sensing machines in.
-
-1. Set the **Role** to **Owner**, then click **Generate key**.
-
-1. Copy the ID and corresponding key you just created and paste them in place of `` and `` in your code.
- For example, you'll now have something of the form
-
- ```json {class="line-numbers linkable-line-numbers"}
- authEntity: '1234abcd-123a-987b-1234567890abc',
- payload: 'abcdefg987654321abcdefghi'
- ```
-
- {{% snippet "secret-share.md" %}}
-
-### Add functionality to your code
-
-1. Now that you have the API key and org and location IDs, you are ready to add code that establishes a connection from the computer running the code to the Viam Cloud where the air quality sensor data is stored.
- You'll create a Viam `dataClient` instance which accesses all the data in your location, and then query this data to get only the data tagged with the `air-quality` tag you applied with your data service configuration.
- The following code also queries the data for a list of the machines that have collected air quality data so that later, you can make a dashboard that has a place for the latest data from each of them.
-
- Paste the following code into the main function of your main.ts script, directly after the `locationID` line, in place of `// `:
-
- ```typescript {class="line-numbers linkable-line-numbers"}
- // Instantiate data_client and get all
- // data tagged with "air-quality" from your location
- const client = await VIAM.createViamClient(opts);
- const myDataClient = client.dataClient;
- const query = {
- $match: {
- tags: "air-quality",
- location_id: locationID,
- organization_id: orgID,
- },
- };
- const match = { $group: { _id: "$robot_id" } };
- // Get a list of all the IDs of machines that have collected air quality data
- const BSONQueryForMachineIDList = [
- BSON.serialize(query),
- BSON.serialize(match),
- ];
- let machineIDs: any = await myDataClient?.tabularDataByMQL(
- orgID,
- BSONQueryForMachineIDList,
- );
- // Get all the air quality data
- const BSONQueryForData = [BSON.serialize(query)];
- let thedata: any = await myDataClient?.tabularDataByMQL(
- orgID,
- BSONQueryForData,
- );
- ```
-
-1. For this project, your dashboard will display the average of the last five readings from each air sensor.
- You need a function to calculate that average.
- The data returned by the query is not necessarily returned in order, so this function must put the data in order based on timestamps before averaging the last five readings.
-
- Paste the following code into main.ts after the end of your main function, in place of `// `:
-
- ```typescript {class="line-numbers linkable-line-numbers"}
- // Get the average of the last few readings from a given sensor
- async function getLastFewAv(alltheData: any[], machineID: string) {
- // Get just the data from this machine
- let thedata = new Array();
- for (const entry of alltheData) {
- if (entry.robot_id == machineID) {
- thedata.push({
- PM25: entry.data.readings["pm_2_5"],
- time: entry.time_received,
- });
- }
- }
-
- // Sort the air quality data from this machine
- // by timestamp
- thedata = thedata.sort(function (a, b) {
- let x = a.time.toString();
- let y = b.time.toString();
- if (x < y) {
- return -1;
- }
- if (x > y) {
- return 1;
- }
- return 0;
- });
-
- // Add up the last 5 readings collected.
- // If there are fewer than 5 readings, add all of them.
- let x = 5; // The number of readings to average over
- if (x > thedata.length) {
- x = thedata.length;
- }
- let total = 0;
- for (let i = 1; i <= x; i++) {
- const reading: number = thedata[thedata.length - i].PM25;
- total += reading;
- }
- // Return the average of the last few readings
- return total / x;
- }
- ```
-
-1. Now that you've defined the function to sort and average the data for each machine, you're done with all the `dataClient` code.
- The final piece you need to add to this script is a way to create some HTML to display data from each machine in your dashboard.
-
- Paste the following code into the main function of main.ts, in place of `// `:
-
- ```typescript {class="line-numbers linkable-line-numbers"}
- // Instantiate the HTML block that will be returned
- // once everything is appended to it
- let htmlblock: HTMLElement = document.createElement("div");
-
- // Display the relevant data from each machine to the dashboard
- for (const mach of machineIDs) {
- let insideDiv: HTMLElement = document.createElement("div");
- let avgPM: number = await getLastFewAv(thedata, mach._id);
- // Color-code the dashboard based on air quality category
- let level: string = "blue";
- switch (true) {
- case avgPM < 12.1: {
- level = "good";
- break;
- }
- case avgPM < 35.5: {
- level = "moderate";
- break;
- }
- case avgPM < 55.5: {
- level = "unhealthy-sensitive";
- break;
- }
- case avgPM < 150.5: {
- level = "unhealthy";
- break;
- }
- case avgPM < 250.5: {
- level = "very-unhealthy";
- break;
- }
- case avgPM >= 250.5: {
- level = "hazardous";
- break;
- }
- }
- // Create the HTML output for this machine
- insideDiv.className = "inner-div " + level;
- insideDiv.innerHTML =
- "
";
- htmlblock.appendChild(insideDiv);
- }
-
- // Output a block of HTML with color-coded boxes for each machine
- return document.getElementById("insert-readings").replaceWith(htmlblock);
- ```
-
-The full code is available for reference on [GitHub](https://github.com/viam-labs/air-quality-fleet/blob/main/main.ts).
-
-### Style your dashboard
-
-You have completed the main TypeScript file that gathers and sorts the data.
-Now, you'll create a page to display the data.
-
-{{% alert title="Tip" color="tip" %}}
-The complete code is available on [GitHub](https://github.com/viam-labs/air-quality-fleet) as a reference.
-{{% /alert %}}
-
-1. Create a folder called static inside your aqi-dashboard folder.
- Inside the static folder, create a file called index.html.
- This file specifies the contents of the webpage that you will see when you run your code.
- Paste the following into index.html:
-
- ```{class="line-numbers linkable-line-numbers" data-line="11"}
-
-
-
-
-
-
-
-
-
Air Quality Dashboard
-
-
-
-
PM 2.5 readings
-
The following are averages of the last few readings from each machine:
-
-
-
Loading data...
- It may take a few moments for the data to load.
- Do not refresh page.
-
-
-
-
Key:
-
Good air quality
-
Moderate
-
Unhealthy for sensitive groups
-
Unhealthy
-
Very unhealthy
-
Hazardous
-
-
- After the data has loaded, you can refresh the page for the latest readings.
-
-
-
-
- ```
-
-{{% alert title="Fun fact" color="info" %}}
-Line 11, highlighted above, is where the HTML output of the TypeScript file main.ts will get pulled in.
-
-TypeScript is a superset of JavaScript with added functionality, and it transpiles to JavaScript, which is why your file is called main.ts even though line 11 indicates `src="main.js"`.
-If you look at line 5 of package.json, you can see that `./main.ts` builds out to `static/main.js`.
-{{% /alert %}}
-
-1. Now you'll create a style sheet to specify the fonts, colors, and spacing of your dashboard.
- Create a new file inside your static folder and name it style.css.
-1. Paste the following into style.css:
-
- ```{class="line-numbers linkable-line-numbers"}
- body {
- font-family: Helvetica;
- margin-left: 20px;
- }
-
- div {
- background-color: whitesmoke;
- }
-
- h1 {
- color: black;
- }
-
- h2 {
- font-family: Helvetica;
- }
-
- .inner-div {
- font-family: monospace;
- border: .2px solid;
- background-color: lightblue;
- padding: 20px;
- margin-top: 10px;
- max-width: 320px;
- font-size: large;
- }
-
- .key {
- max-width: 200px;
- padding: 0px 5px 5px;
- }
-
- .key p {
- padding: 4px;
- margin: 0px;
- }
-
- .good {
- background-color: lightgreen;
- }
-
- .moderate {
- background-color: yellow;
- }
-
- .unhealthy-sensitive {
- background-color: orange;
- }
-
- .unhealthy {
- background-color: red;
- }
-
- .very-unhealthy {
- background-color: violet;
- }
-
- .hazardous {
- color: white;
- background-color: purple;
- }
-
- #main {
- max-width:600px;
- padding:10px 30px 10px;
- }
- ```
-
- Feel free to adjust any of the colors, margins, fonts, and other specifications in style.css based on your preferences.
-
-## Full tutorial code
-
-You can find all the code in the [GitHub repo for this tutorial](https://github.com/viam-labs/air-quality-fleet).
-
-## Run the code
-
-1. In a command prompt terminal, navigate to your aqi-dashboard directory.
- Run the following command to start up your air quality dashboard:
-
- ```sh {id="terminal-prompt" class="command-line" data-prompt="$"}
- npm start
- ```
-
- ![Terminal window with the command 'npm start' run inside the aqi-dashboard folder. The output says 'start' and then 'esbuild' followed by the esbuild string from the package.json file you configured. Then there's 'Local:' followed by a URL and 'Network:' followed by a different URL.](/tutorials/air-quality-fleet/terminal-url.png)
-
-1. The terminal should output a line such as `Local: http://127.0.0.1:8000/`.
- Copy the URL the terminal displays and paste it into the address bar in your web browser.
- The data may take up to approximately 5 seconds to load, then you should see air quality data from all of your sensors.
- If the dashboard does not appear, right-click the page, select **Inspect**, and check for errors in the console.
-
- ![Air quality dashboard in a web browser with PM2.5 readings from three different sensor machines displayed.](/tutorials/air-quality-fleet/three-sensor-dash.png)
-
- Great work.
- You've learned how to configure a fleet of machines, sync their data to one place, and pull that data into a custom dashboard using TypeScript.
-
-
-
-## Next steps
-
-Now that you can monitor your air quality, you can try to improve it and see if your efforts are effective.
-You might try putting an air filter in your home or office and comparing the air quality data before you start running the filter with air quality after you have run the filter for a while.
-Or, try sealing gaps around doors, and check whether your seal is working by looking at your dashboard.
-
-You could set up a text or email alert when your air quality passes a certain threshold.
-For instructions on setting up an email alert, see the [Monitor Helmet Usage tutorial](/tutorials/projects/helmet/) as an example.
-For an example of setting up text alerts, see the [Detect a Person and Send a Photo tutorial](/tutorials/projects/send-security-photo/).
-
-For another example of a custom TypeScript interface, check out the [Claw Game tutorial](/tutorials/projects/claw-game/).
-Instead of displaying data, the claw game interface has buttons to control a robotic arm.
-
-In this tutorial we covered configuring a fleet of machines using fragments, but to automate the setup process further, you can [use the Viam Agent to provision machines](/fleet/provision/).
-
-{{< cards >}}
-{{% card link="/fleet/provision/" %}}
-{{% card link="/tutorials/services/visualize-data-grafana/" %}}
-{{% card link="/tutorials/projects/helmet/" %}}
-{{< /cards >}}
diff --git a/docs/dev/tools/tutorials/control/flutter-app.md b/docs/dev/tools/tutorials/control/flutter-app.md
deleted file mode 100644
index 9219e4f378..0000000000
--- a/docs/dev/tools/tutorials/control/flutter-app.md
+++ /dev/null
@@ -1,790 +0,0 @@
----
-title: "Build a Flutter App that Integrates with Viam"
-linkTitle: "Build a Flutter App"
-type: "docs"
-description: "Use Viam's Flutter SDK to build a custom mobile app to show your machines and their components."
-videos: ["/tutorials/flutter-app/demo.webm", "/tutorials/flutter-app/demo.mp4"]
-videoAlt: "An example Viam-integrated Flutter app."
-tags: ["sdk", "flutter"]
-authors: ["Clint Purser"]
-languages: ["flutter"]
-viamresources: []
-platformarea: ["core"]
-images: ["/tutorials/flutter-app/preview.gif"]
-level: "Intermediate"
-date: "2024-01-17"
-cost: "0"
----
-
-
-
-
-
- {{}}
-
-
-
-Flutter is Google's user interface toolkit for building applications for mobile, web, and desktop from a single codebase.
-If you're looking to monitor and control individual machines with the same functionality you have on the [**CONTROL** tab](/fleet/control/), you can use the general-purpose [Viam mobile app](/fleet/control/#control-interface-in-the-viam-mobile-app) rather than creating your own.
-If you need custom functionality or a custom interface, you can use Viam's [Flutter SDK](https://flutter.viam.dev/) to build a custom app to interact with your machines that run on Viam.
-
-This tutorial guides you through creating a mobile app that shows your machines and their components.
-As you work through this project you will learn the following:
-
-- Flutter development basics
-- How to trigger app behavior when a user presses a button
-- The basics of using Viam's Flutter SDK
-
-## Requirements
-
-You do not need any hardware for this tutorial other than a computer running macOS or a 64-bit Linux operating system.
-
-This tutorial assumes you already have a machine [configured](/configure/) on the [Viam app](https://app.viam.com).
-
-## Set up your Flutter development environment
-
-This tutorial uses [Visual Studio Code](https://code.visualstudio.com/download) (VS Code) as the development environment (IDE), and uses the VS Code [Flutter extension](https://marketplace.visualstudio.com/items?itemName=Dart-Code.flutter) to generate sample project code.
-You can use a different editor, but it will be much easier to follow along using VS Code.
-
-### Platform compatibility
-
-Flutter can compile and run on many different operating systems.
-For this tutorial, you will be developing for iOS.
-In other words, iOS is your development _target_.
-
-You can always run your app on another platform later by configuring the code specific to that target.
-
-### Install Flutter
-
-Install Flutter according to [the Flutter documentation](https://docs.flutter.dev/get-started/install).
-Those instructions include installation of various tools and extensions for different development targets.
-For this walkthrough, you only need to install the following:
-
-- Flutter SDK
-- Visual Studio Code with the [Flutter extension](https://marketplace.visualstudio.com/items?itemName=Dart-Code.flutter)
-- [Xcode](https://developer.apple.com/xcode/), which is required for developing for iOS
- - When prompted, do install Cocoapods.
- You need it to support the iOS simulator.
-
-{{% alert title="Flutter version" color="note" %}}
-We recommend using Flutter 3.19.6, as this sample app was tested with this version.
-`fvm` is a useful tool for targeting specific flutter versions.
-You can run `fvm use 3.19.6` in the terminal before building your sample app to target Flutter 3.19.6.
-{{% /alert %}}
-
-## Start creating code
-
-### Create your Flutter project
-
-1. Launch VS Code.
- Open the [command palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette) by pressing `Ctrl+Shift+P` or `Shift+Cmd+P`, depending on your system.
-
-2. Start typing "flutter new."
- Click the **Flutter: New Project** command when it populates.
-
-{{}}
-
-3. Click **Application**, then choose a folder in which to create your project.
-4. Give your project a name, for example, smart_machine_app.
- Naming it smart_machine_app will make it slightly easier to follow along in later steps.
-
-{{}}
-
-When you hit **Enter**, Flutter auto-generates a project folder with a useful starter project.
-VS Code automatically opens it.
-
-If you don't change any of the code files, you'll have a counter app with a button that adds one to the total each time you press it.
-That's not going to be very helpful for interacting with your fleet of machines, so in the next steps, you'll edit three of these automatically-created files to start building out a Viam-integrated app.
-
-### Edit the YAML configuration files
-
-1. In the VS Code file explorer, find and open the pubspec.yaml file.
- This file specifies your app's metadata including its current version and dependencies.
-
-2. Delete the contents of your pubspec.yaml file and replace them with the following configuration which, among others, specifies the `viam_sdk` as a dependency for your project:
-
- ```yaml {class="line-numbers linkable-line-numbers"}
- name: smart_machine_app
- description: "A Flutter app that integrates with Viam."
-
- publish_to: "none" # Remove this line if you wish to publish to pub.dev
-
- version: 1.0.0+1
-
- environment:
- sdk: ">=3.2.3 <4.0.0"
-
- dependencies:
- flutter:
- sdk: flutter
- flutter_dotenv: ^5.1.0
- image: ^4.0.17
- cupertino_icons: ^1.0.2
- viam_sdk: ^0.0.20
-
- dev_dependencies:
- flutter_test:
- sdk: flutter
-
- flutter_lints: ^2.0.0
-
- flutter:
- uses-material-design: true
- ```
-
- {{% alert title="Note" color="note" %}}
-
- If you named your app something other than `smart_machine_app`, change the `name` value in the first line of the pubspec.yaml file to the name you gave your app during setup.
-
- {{% /alert %}}
-
-3. Next, open the analysis_options.yaml configuration file.
- This file specifies how strictly Flutter should enforce best practices in your code when it checks for things like syntax errors.
- For this tutorial, you will use a less strict analyzer configuration to start, but you can always tune this later.
- If you later publish an actual production app, you will likely want to increase the strictness of the analyzer before sharing your app with others.
-
-4. Replace the contents of the analysis_options.yaml file with the following:
-
- ```yaml {class="line-numbers linkable-line-numbers"}
- include: package:flutter_lints/flutter.yaml
-
- linter:
- rules:
- avoid_print: false
- prefer_const_constructors_in_immutables: false
- prefer_const_constructors: false
- prefer_const_literals_to_create_immutables: false
- prefer_final_fields: false
- unnecessary_breaks: true
- use_key_in_widget_constructors: false
- ```
-
-### Configure iOS-specific code
-
-Now you'll update some configurations in the iOS-specific code to support the [Viam Flutter SDK](https://flutter.viam.dev/).
-
-1. Open ios/Podfile.
- If Podfile does not exist in that directory, generate it by running `flutter pub get` in the root directory of your app.
- If the `flutter pub get` command returns an error, you may need to [upgrade the Flutter SDK](https://docs.flutter.dev/release/upgrade).
-
- At the top of the file you will see the following lines:
-
- ```dart {class="line-numbers linkable-line-numbers"}
- # Uncomment this line to define a global platform for your project
- # platform :ios, '11.0'
- ```
-
- Uncomment the line and change it to use version 17.0:
-
- ```dart {class="line-numbers linkable-line-numbers"}
- # Uncomment this line to define a global platform for your project
- platform :ios, '13.0'
- ```
-
-2. Open ios/Runner/Info.plist.
- It will look something like this:
-
- {{}}
-
-3. Insert the following code into the first line after the ``.
- These lines are [required to establish WebRTC and local device mDNS connections](https://github.com/viamrobotics/viam-flutter-sdk?tab=readme-ov-file#update-infoplist).
-
- ```xml {class="line-numbers linkable-line-numbers"}
- NSLocalNetworkUsageDescription
- Smart Machine App requires access to your device's local network to connect to your devices.
- NSBonjourServices
-
- _rpc._tcp
-
- ```
-
- {{}}
-
- The file should now look like the following:
-
- {{}}
-
-### Edit the main file
-
-1. Open the lib/main.dart file.
-
-2. Replace the contents of this file with the following code, which creates the scaffold of your app's login screen:
-
- ```dart {class="line-numbers linkable-line-numbers"}
- import 'package:flutter/material.dart';
- import 'package:flutter_dotenv/flutter_dotenv.dart';
-
- void main() async {
- // await dotenv.load(); // <-- This loads your API key; will un-comment later
- runApp(MyApp());
- }
-
- class MyApp extends StatelessWidget {
- const MyApp({super.key});
-
- @override
- Widget build(BuildContext context) {
- return MaterialApp(
- title: 'Smart Machine App',
- theme: ThemeData(
- colorScheme: ColorScheme.fromSeed(seedColor: Colors.purple),
- ),
- home: MyHomePage(),
- );
- }
- }
-
- class MyHomePage extends StatelessWidget {
- @override
- Widget build(BuildContext context) {
- return Scaffold(
- body: Center(
- child: Column(
- mainAxisAlignment: MainAxisAlignment.center,
- children: [
- Text('Smart Machine App'),
- SizedBox(height: 16),
- ElevatedButton(onPressed: null, child: Text('Login')),
- ],
- ),
- ),
- );
- }
- }
-
-
- ```
-
- If you chose a name other than `Smart Machine App` for your project, edit lines 15 and 32 with your own app title.
-
-### Launch the app
-
-You now have enough of your new app coded to be able to build and test a rendering of it.
-
-Follow the steps below to build and preview the current state of your app.
-
-1. Open lib/main.dart.
- In the bottom right corner of VS Code, find the button that shows the current target device.
- Click the button to change your target device.
- Make sure that you have your target device selected before you continue.
-
- {{}}
-
-2. With lib/main.dart still open, find the "Start Debugging" button in the upper right corner of the VS Code window.
- Click the button to build and render your app.
-
- {{}}
-
- A window should open up, displaying a rendering of your smart machine app:
-
- {{}}
-
-## Add app navigation
-
-### Add a new screen
-
-Great work so far!
-Your app is successfully running, with a single screen and an inactive button.
-Next, you will add a new screen that pulls in some information from your {{< glossary_tooltip term_id="organization" text="organization" >}} in the Viam app.
-This new screen will be accessed from the login button.
-
-In the VS Code file explorer on the left-hand side, right click lib/ and click **New File**, then name the new file home_screen.dart.
-
-Paste the following code into the home_screen.dart file you just created:
-
-```dart {class="line-numbers linkable-line-numbers"}
-import 'package:flutter/material.dart';
-import 'package:flutter_dotenv/flutter_dotenv.dart';
-import 'package:viam_sdk/protos/app/app.dart';
-import 'package:viam_sdk/viam_sdk.dart';
-
-class HomeScreen extends StatefulWidget {
- const HomeScreen({super.key});
-
- @override
- State createState() => _HomeScreenState();
-}
-
-class _HomeScreenState extends State {
- late Viam _viam;
- late Organization _organization;
- List _locations = [];
- bool _loading = true;
-
- @override
- void initState() {
- _getData();
- super.initState();
- }
-
- void _getData() async {
- try {
- _viam = await Viam.withApiKey(dotenv.env['API_KEY_ID']?? '', dotenv.env['API_KEY']?? '');
- _organization = (await _viam.appClient.listOrganizations()).first;
- _locations = await _viam.appClient.listLocations(_organization.id);
-
- // in Flutter, setState tells the UI to rebuild the widgets whose state has changed,
- // this is how you change from showing a loading screen to a list of values
- setState(() {
- _loading = false;
- });
- } catch (e) {
- print(e);
- }
- }
-
- /// This method will navigate to a specific [Location].
- void _navigateToLocation(Location location) {
- // Navigator.of(context)
- // .push(MaterialPageRoute(builder: (_) => LocationScreen(_viam, location)));
- }
-
- @override
- Widget build(BuildContext context) {
- return Scaffold(
- appBar: AppBar(title: const Text('Locations')),
- // If the list is loading, show a loading indicator.
- // Otherwise, show a list of [Locations]s.
- body: _loading
- ? Center(
- child: const CircularProgressIndicator.adaptive(),
- )
- : // Build a list from the [_locations] state.
- ListView.builder(
- itemCount: _locations.length,
- itemBuilder: (_, index) {
- final location = _locations[index];
- return ListTile(
- title: Text(location.name),
- onTap: () => _navigateToLocation(location),
- trailing: const Icon(Icons.chevron_right),
- );
- },
- ),
- );
- }
-}
-```
-
-### Get the Viam API key
-
-Notice in the file the following line:
-
-```dart {class="line-numbers linkable-line-numbers" data-start="28"}
-_viam = await Viam.withApiKey(dotenv.env['API_KEY_ID']?? '', dotenv.env['API_KEY']?? '');
-```
-
-This line in the code defines how your Flutter app authenticates to the Viam platform, by referencing two environment variables that together comprise your Viam API key.
-
-Follow the steps below to get your API key and create an environment variables file to store them in:
-
-1. In your project folder, create a file to store your API keys.
- Name it .env.
- Copy and paste these two lines into the file:
-
- ```sh {class="line-numbers linkable-line-numbers"}
- API_KEY_ID="PASTE YOUR API KEY ID HERE"
- API_KEY="PASTE YOUR API KEY HERE"
- ```
-
-2. Go to the [Viam app](https://app.viam.com) and log in.
-
-3. Click the organization dropdown menu on the right side of the top banner.
- If you're not already in the organization you want to connect to, click the correct organization name to navigate to it.
-
- {{}}
-
-4. Click the organization dropdown menu again and click **Settings**.
-
-5. Scroll to the **API Keys** section.
- You can find and use an existing API key for your smart machine, or you can create a new one for this application.
- To create a new one:
-
- 1. Click **Generate key**.
- 2. Give the key a name like "flutter-app-my-org-name."
- 3. Click the **Resource** dropdown and select your organization.
- 4. Set **Role** to **Owner**.
- 5. Click **Generate key**.
- 6. Find your new key at the bottom of the list.
-
-6. Use the copy buttons next to the API key ID and API key to copy each of them and paste them into your .env file.
-
-{{< readfile "/static/include/snippet/secret-share.md" >}}
-
-7. In your lib/main.dart, find line 5:
-
- ```dart
- void main() async {
- // await dotenv.load(); // <-- This loads your API key; will un-comment later
- runApp(MyApp());
- }
- ```
-
- Now that you have a .env file to load, un-comment that line so it loads the file.
- Your `main()` function should look like this:
-
- ```dart
- void main() async {
- await dotenv.load(); // <-- This loads your API key
- runApp(MyApp());
- }
- ```
-
-8. Reopen your pubspec.yaml file and paste the following two lines at the end of it, inside the `flutter:` section.
- Listing the .env among your app's assets lets the app access the file.
-
- ```yaml
- assets:
- - .env
- ```
-
- The last few lines of your pubspec.yaml file should now look like this:
-
- ```yaml
- flutter:
- uses-material-design: true
- assets:
- - .env
- ```
-
-### Connect the login button to the home screen
-
-In VS Code, reopen main.dart.
-
-Add the following line to the imports at the top of the file:
-
-```dart {class="line-numbers linkable-line-numbers"}
-import 'home_screen.dart';
-```
-
-Change `ElevatedButton` in the `Column` to the following:
-
-```dart {class="line-numbers linkable-line-numbers" data-start="35"}
- ElevatedButton(
- onPressed: () => Navigator.of(context)
- .push(MaterialPageRoute(builder: (_) => HomeScreen())),
- child: Text('Login'),
- ),
-```
-
-Run the mobile application simulator again to see how your changes have taken effect.
-Now, when you tap the login button, the app uses the API key to get the list of locations in your organization.
-It displays the names of the locations on a new screen:
-
-{{}}
-
-## Add more screens
-
-### Add a location screen
-
-At this point, you have an app that displays a list of {{< glossary_tooltip term_id="location" text="locations" >}}, but nothing happens when you tap a location name.
-In this step you will add functionality so that tapping a location name brings you to the list of {{< glossary_tooltip term_id="machine" text="smart machines" >}} in that location.
-
-In VS Code create a new file in the same folder as main.dart and home_screen.dart.
-Name it location_screen.dart.
-
-Paste the following code into the file:
-
-```dart {class="line-numbers linkable-line-numbers"}
-import 'package:flutter/material.dart';
-import 'package:viam_sdk/protos/app/app.dart';
-import 'package:viam_sdk/viam_sdk.dart';
-
-import 'robot_screen.dart';
-
-class LocationScreen extends StatefulWidget {
- /// The authenticated Viam instance.
- /// See previous screens for more details.
- final Viam _viam;
-
- /// The [Location] to show details for
- final Location location;
-
- const LocationScreen(this._viam, this.location, {super.key});
-
- @override
- State createState() => _LocationScreenState();
-}
-
-class _LocationScreenState extends State {
- /// Similar to previous screens, start with [_isLoading] to true.
- bool _isLoading = true;
-
- /// A list of [Robot]s available in this [Location].
- List robots = [];
-
- @override
- void initState() {
- super.initState();
- // Call our own _initState method to initialize our state.
- _initState();
- }
-
- /// This method will get called when the widget initializes its state.
- /// It exists outside the overridden [initState] function since it's async.
- Future _initState() async {
- // Using the authenticated [Viam] client received as a parameter,
- // you can obtain a list of smart machines (robots) within this location.
- final robots = await widget._viam.appClient.listRobots(widget.location.id);
- setState(() {
- // Once you have the list of robots, you can set the state.
- this.robots = robots;
- _isLoading = false;
- });
- }
-
- void _navigateToRobot(Robot robot) {
- Navigator.of(context).push(
- MaterialPageRoute(builder: (_) => RobotScreen(widget._viam, robot)));
- }
-
- @override
- Widget build(BuildContext context) {
- return Scaffold(
- appBar: AppBar(
- title: Text(widget.location.name),
- ),
- // If the list is loading, show a loading indicator.
- // Otherwise, show a list of [Robot]s.
- body: _isLoading
- ? const CircularProgressIndicator.adaptive()
- : // Build a list from the [locations] state.
- ListView.builder(
- itemCount: robots.length,
- itemBuilder: (_, index) {
- final robot = robots[index];
- return ListTile(
- title: Text(robot.name),
- onTap: () => _navigateToRobot(robot),
- trailing: const Icon(Icons.chevron_right),
- );
- }),
- );
- }
-}
-
-```
-
-### Add a robot screen
-
-Create a new file named robot_screen.dart and paste the following into the file:
-
-```dart {class="line-numbers linkable-line-numbers"}
-/// This is the screen that shows the resources available on a robot (or smart machine).
-/// It takes in a Viam app client instance, as well as a robot client.
-/// It then uses the Viam client instance to create a connection to that robot client.
-/// Once the connection is established, you can view the resources available
-/// and send commands to them.
-
-import 'package:flutter/material.dart';
-import 'package:viam_sdk/protos/app/app.dart';
-import 'package:viam_sdk/viam_sdk.dart';
-
-class RobotScreen extends StatefulWidget {
- final Viam _viam;
- final Robot robot;
-
- const RobotScreen(this._viam, this.robot, {super.key});
-
- @override
- State createState() => _RobotScreenState();
-}
-
-class _RobotScreenState extends State {
- /// Similar to previous screens, start with [_isLoading] to true.
- bool _isLoading = true;
-
- /// This is the [RobotClient], which allows you to access
- /// all the resources of a Viam Smart Machine.
- /// This differs from the [Robot] provided to us in the widget constructor
- /// in that the [RobotClient] contains a direct connection to the Smart Machine
- /// and its resources. The [Robot] object simply contains information about
- /// the Smart Machine, but is not actually connected to the machine itself.
- ///
- /// This is initialized late because it requires an asynchronous
- /// network call to establish the connection.
- late RobotClient client;
-
- @override
- void initState() {
- super.initState();
- // Call our own _initState method to initialize our state.
- _initState();
- }
-
- @override
- void dispose() {
- // You should always close the [RobotClient] to free up resources.
- // Calling [RobotClient.close] will clean up any tasks and
- // resources created by Viam
- if (_isLoading == false) {
- client.close();
- }
- super.dispose();
- }
-
- /// This method will get called when the widget initializes its state.
- /// It exists outside the overridden [initState] function since it's async.
- Future _initState() async {
- // Using the authenticated [Viam] the received as a parameter,
- // the app can obtain a connection to the Robot.
- // There is a helpful convenience method on the [Viam] instance for this.
- final robotClient = await widget._viam.getRobotClient(widget.robot);
- setState(() {
- client = robotClient;
- _isLoading = false;
- });
- }
-
- /// A computed variable that returns the available [ResourceName]s of
- /// this robot in an alphabetically sorted list.
- List get _sortedResourceNames {
- return client.resourceNames..sort((a, b) => a.name.compareTo(b.name));
- }
-
- @override
- Widget build(BuildContext context) {
- return Scaffold(
- appBar: AppBar(title: Text(widget.robot.name)),
- body: _isLoading
- ? const Center(child: CircularProgressIndicator.adaptive())
- : ListView.builder(
- itemCount: client.resourceNames.length,
- itemBuilder: (_, index) {
- final resourceName = _sortedResourceNames[index];
- return ListTile(
- title: Text(resourceName.name),
- subtitle: Text(
- '${resourceName.namespace}:${resourceName.type}:${resourceName.subtype}'),
- );
- }));
- }
-}
-
-```
-
-### Connect the screens together
-
-Now that you have the code for the screens in place, you can enable navigation between them.
-
-Connect the home screen to the locations screen by un-commenting the following two lines in home_screen.dart:
-
-```dart {class="line-numbers linkable-line-numbers" data-line="3-4" data-start="42"}
- /// This method will navigate to a specific [Location]. <-- Leave this commented!
- void _navigateToLocation(Location location) {
- Navigator.of(context) // <-- Un-comment this
- .push(MaterialPageRoute(builder: (_) => LocationScreen(_viam, location))); // <-- And un-comment this
- }
-```
-
-Add the following import to the top of the file:
-
-```dart {class="line-numbers linkable-line-numbers"}
-import 'location_screen.dart';
-```
-
-The whole home_screen.dart should now look like this:
-
-```dart {class="line-numbers linkable-line-numbers"}
-import 'package:flutter/material.dart';
-import 'package:flutter_dotenv/flutter_dotenv.dart';
-import 'package:viam_sdk/protos/app/app.dart';
-import 'package:viam_sdk/viam_sdk.dart';
-
-import 'location_screen.dart'; // <---- Added import
-
-class HomeScreen extends StatefulWidget {
- const HomeScreen({super.key});
-
- @override
- State createState() => _HomeScreenState();
-}
-
-class _HomeScreenState extends State {
-
- late Viam _viam;
- late Organization _organization;
- List _locations = [];
- bool _loading = true;
-
- @override
- void initState() {
- _getData();
- super.initState();
- }
-
- void _getData() async {
- try {
- _viam = await Viam.withApiKey(dotenv.env['API_KEY_ID']?? '', dotenv.env['API_KEY']?? '');
- _organization = (await _viam.appClient.listOrganizations()).first;
- _locations = await _viam.appClient.listLocations(_organization.id);
-
- // In Flutter, setState tells the UI to rebuild the widgets whose state has changed,
- // this is how you change from showing a loading screen to a list of values
- setState(() {
- _loading = false;
- });
- } catch (e) {
- print(e);
- }
- }
-
- /// This method will navigate to a specific [Location].
- void _navigateToLocation(Location location) {
- Navigator.of(context).push( // <-- uncommented
- MaterialPageRoute(builder: (_) => LocationScreen(_viam, location)));
- }
-
- @override
- Widget build(BuildContext context) {
- return Scaffold(
- appBar: AppBar(title: const Text('Locations')),
- // If the list is loading, show a loading indicator.
- // Otherwise, show a list of [Locations]s.
- body: _loading
- ? Center(
- child: const CircularProgressIndicator.adaptive(),
- )
- : // Build a list from the [_locations] state.
- ListView.builder(
- itemCount: _locations.length,
- itemBuilder: (_, index) {
- final location = _locations[index];
- return ListTile(
- title: Text(location.name),
- onTap: () => _navigateToLocation(location),
- trailing: const Icon(Icons.chevron_right),
- );
- }));
- }
-}
-```
-
-Try running your app.
-Now, when you tap a location, you'll see a list of the smart machines in that location.
-When you tap one of them (if it is currently live), you'll see a list of that machine's {{< glossary_tooltip term_id="resource" text="resources" >}}:
-
-{{}}
-
-## Next steps
-
-Nice work!
-You have successfully made a Flutter app integrated with Viam!
-
-At this point you could customize the robot screen to have more functionality to control the machine or to show data from the robot in neat ways.
-The Viam Flutter SDK GitHub repo contains [more example apps](https://github.com/viamrobotics/viam-flutter-sdk/tree/main/example) for your reference.
-
-You can also stylize the look and feel of your app to match your brand.
-Look around [the Flutter documentation](https://docs.flutter.dev/) to learn how.
-
-If you're planning to release your app for general use, you will need to add an authentication flow to your app instead of adding API keys as environment variables.
-If you need assistance with this, reach out to us on our [Discord](https://discord.gg/viam) and we’ll be happy to help.
-
-When you’re ready to publish your app to the app stores you can follow these articles from Flutter on the subject:
-
-- [iOS](https://docs.flutter.dev/deployment/ios)
-- [Android](https://docs.flutter.dev/deployment/android)
diff --git a/docs/dev/tools/tutorials/control/gamepad.md b/docs/dev/tools/tutorials/control/gamepad.md
deleted file mode 100644
index f868734d91..0000000000
--- a/docs/dev/tools/tutorials/control/gamepad.md
+++ /dev/null
@@ -1,236 +0,0 @@
----
-title: "Drive a Rover (like SCUTTLE or Yahboom) Using a Gamepad"
-linkTitle: "Drive a Rover with a Gamepad"
-type: "docs"
-description: "Drive a wheeled rover with a Bluetooth gamepad that has a dongle."
-videos:
- [
- "/tutorials/videos/scuttle-gamepad-preview.webm",
- "/tutorials/videos/scuttle-gamepad-preview.mp4",
- ]
-videoAlt: "Drive a Scuttle Robot with a Bluetooth gamepad."
-images: ["/tutorials/videos/scuttle-gamepad-preview.gif"]
-aliases:
- - "/tutorials/scuttle-gamepad/"
- - "/tutorials/scuttlebot/scuttle-gamepad/"
- - "/tutorials/control/scuttle-gamepad/"
-tags: ["base", "scuttle", "gamepad"]
-authors: []
-languages: []
-viamresources: ["base", "input_controller", "base_remote_control"]
-platformarea: ["core"]
-level: "Intermediate"
-date: "2022-08-10"
-updated: "2024-04-17"
-cost: 575
----
-
-By the end of this tutorial, you'll be able to drive your rover around like an RC car.
-
-{{< alert title="Learning Goals" color="info" >}}
-
-After following this tutorial, you will be able to use the input controller component to control your machine using a gamepad.
-
-{{< /alert >}}
-
-## Requirements
-
-You will need the following hardware to complete this tutorial:
-
-- A wheeled rover, configured with a [base component](/components/base/) on the [Viam app](https://app.viam.com/).
- This tutorial uses a [SCUTTLE rover](https://www.scuttlerobot.org/shop/) as an example but you can complete this tutorial using a [Yahboom 4WD Smart Robot](https://category.yahboom.net/collections/robotics/products/4wdrobot) or an entirely different rover.
- - For a tutorial on configuring your rover, see [Configure a Rover](/tutorials/configure/configure-rover/).
-- [EasySMX ESM-9101 Wireless Controller](https://www.amazon.com/Wireless-Controller-EasySMX-ESM-9101-Gamepad/dp/B07F1NLGW2?th=1) or a similar gamepad and dongle.
- This is the controller that comes with the SCUTTLE rover.
- You can also use an 8BitDo controller with additional setup.
-
-{{