Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DOCS-2899: Clean up data client API code samples #729

Merged
merged 8 commits into from
Sep 19, 2024
Merged
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
176 changes: 110 additions & 66 deletions src/viam/app/data_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,15 +179,13 @@ async def tabular_data_by_filter(

from viam.utils import create_filter

my_data = []
last = None
my_filter = create_filter(component_name="left_motor")
while True:
tabular_data, count, last = await data_client.tabular_data_by_filter(my_filter, last)
if not tabular_data:
break
my_data.extend(tabular_data)
left_motor_filter = create_filter(
component_name="motor-1"
)

data, count, last = await data_client.tabular_data_by_filter(filter=left_motor_filter)
for tab in data:
print(tab)

Args:
filter (viam.proto.app.data.Filter): Optional `Filter` specifying tabular data to retrieve. No `Filter` implies all tabular
Expand Down Expand Up @@ -245,7 +243,10 @@ async def tabular_data_by_sql(self, organization_id: str, sql_query: str) -> Lis

::

data = await data_client.tabular_data_by_sql(organization_id="<your-org-id>", sql_query="SELECT * FROM readings LIMIT 5")
data = await data_client.tabular_data_by_sql(
org_id="<YOUR-ORG-ID>",
sql_query="SELECT * FROM readings LIMIT 5"
)


Args:
Expand All @@ -269,17 +270,42 @@ async def tabular_data_by_mql(self, organization_id: str, mql_binary: List[bytes

# using bson
import bson
tabular_data = await data_client.tabular_data_by_mql(org_id="<your-org-id>", mql_binary=[
bson.dumps({ '$match': { 'location_id': '<location-id>' } }),
bson.dumps({ "$limit": 5 })
])
import bson.json_util

def mql_to_binary(mql_pipeline):
binary_pipeline = []
for stage in mql_pipeline:
# Parse the JSON string to a Python dict
parsed_stage = bson.json_util.loads(stage)
# Convert the dict to BSON
bson_stage = bson.BSON.encode(parsed_stage)
binary_pipeline.append(bson_stage)
return binary_pipeline

mql_pipeline=[
bson.json_util.dumps({ '$match': { 'location_id': '<YOUR_LOCATION_ID>' } }),
bson.json_util.dumps({ "$limit": 5 })
]

binary_pipeline = mql_to_binary(mql_pipeline)

# using bson dumps
tabular_data = await data_client.tabular_data_by_mql(
organization_id="<YOUR-ORG-ID>",
mql_binary=binary_pipeline
)

# using pymongo
import bson
tabular_data = await data_client.tabular_data_by_mql(org_id="<your-org-id>", mql_binary=[
bson.encode({ '$match': { 'location_id': '<location-id>' } }),
bson.encode({ "$limit": 5 })
])
print(f"Tabular Data 1: {tabular_data}")

# using encoding
tabular_data = await data_client.tabular_data_by_mql(
organization_id="<YOUR-ORG-ID>",
mql_binary=[
bson.BSON.encode({ '$match': { 'location_id': '<YOUR-LOCATION-ID>' } }),
bson.BSON.encode({ "$limit": 5 })
])

print(f"Tabular Data 2: {tabular_data}")


Args:
Expand Down Expand Up @@ -317,16 +343,17 @@ async def binary_data_by_filter(
from viam.utils import create_filter
from viam.proto.app.data import Filter, TagsFilter, TagsFilterType

camera_filter = create_filter(
component_name="camera-1"
)

# Get data captured from camera components
my_data = []
last = None
my_filter = create_filter(component_name="camera")
while True:
data, count, last = await data_client.binary_data_by_filter(my_filter, last)
if not data:
break
my_data.extend(data)
data, count, last = await data_client.binary_data_by_filter(
filter=camera_filter,
limit=1,
include_binary_data=True
)
for binary in data:
print(binary)

# Get untagged data from a dataset

Expand Down Expand Up @@ -405,13 +432,13 @@ async def binary_data_by_ids(

from viam.proto.app.data import BinaryID

binary_metadata, _, _ = await data_client.binary_data_by_filter(
binary_metadata = await data_client.binary_data_by_filter(
include_binary_data=False
)

my_ids = []

for obj in binary_metadata:
for obj in binary_metadata[0]:
my_ids.append(
BinaryID(
file_id=obj.metadata.id,
Expand Down Expand Up @@ -450,12 +477,10 @@ async def delete_tabular_data(self, organization_id: str, delete_older_than_days

::

from viam.utils import create_filter

my_filter = create_filter(component_name="left_motor")
days_of_data_to_delete = 10
tabular_data = await data_client.delete_tabular_data(
org_id="a12b3c4e-1234-1abc-ab1c-ab1c2d345abc", days_of_data_to_delete)
organization_id="<YOUR-ORG-ID>",
delete_older_than_days=150
)

Args:
organization_id (str): ID of organization to delete data from.
Expand Down Expand Up @@ -483,12 +508,14 @@ async def delete_binary_data_by_filter(self, filter: Optional[Filter]) -> int:

from viam.utils import create_filter

my_filter = create_filter(component_name="left_motor")
my_filter = create_filter(component_name="left_motor", organization_ids=["<YOUR-ORG-ID>"])

res = await data_client.delete_binary_data_by_filter(my_filter)

Args:
filter (viam.proto.app.data.Filter): Optional `Filter` specifying binary data to delete. Passing an empty `Filter` will lead to
all data being deleted. Exercise caution when using this option.
all data being deleted. Exercise caution when using this option. You must specify any organization ID with
"organization_ids" when using this option.

Returns:
int: The number of items deleted.
Expand All @@ -506,14 +533,18 @@ async def delete_binary_data_by_ids(self, binary_ids: List[BinaryID]) -> int:
::

from viam.proto.app.data import BinaryID
from viam.utils import create_filter

binary_metadata, _, _ = await data_client.binary_data_by_filter(
my_filter = create_filter(component_name="camera-1", organization_ids=["<YOUR-ORG-ID>"])
binary_metadata = await data_client.binary_data_by_filter(
filter=my_filter,
limit=20,
include_binary_data=False
)
)

my_ids = []

for obj in binary_metadata:
for obj in binary_metadata[0]:
my_ids.append(
BinaryID(
file_id=obj.metadata.id,
Expand Down Expand Up @@ -545,16 +576,20 @@ async def add_tags_to_binary_data_by_ids(self, tags: List[str], binary_ids: List
::

from viam.proto.app.data import BinaryID
from viam.utils import create_filter

tags = ["tag1", "tag2"]

binary_metadata, _, _ = await data_client.binary_data_by_filter(
my_filter = create_filter(component_name="camera-1", organization_ids=["<YOUR-ORG-ID>"])
binary_metadata = await data_client.binary_data_by_filter(
filter=my_filter,
limit=20,
include_binary_data=False
)
)

my_ids = []

for obj in binary_metadata:
for obj in binary_metadata[0]:
my_ids.append(
BinaryID(
file_id=obj.metadata.id,
Expand Down Expand Up @@ -608,16 +643,21 @@ async def remove_tags_from_binary_data_by_ids(self, tags: List[str], binary_ids:
::

from viam.proto.app.data import BinaryID
from viam.utils import create_filter

tags = ["tag1", "tag2"]

binary_metadata, _, _ = await data_client.binary_data_by_filter(
my_filter = create_filter(component_name="camera-1")

binary_metadata = await data_client.binary_data_by_filter(
filter=my_filter,
limit=50,
include_binary_data=False
)

my_ids = []

for obj in binary_metadata:
for obj in binary_metadata[0]:
my_ids.append(
BinaryID(
file_id=obj.metadata.id,
Expand Down Expand Up @@ -718,12 +758,12 @@ async def add_bounding_box_to_image_by_id(
from viam.proto.app.data import BinaryID

MY_BINARY_ID = BinaryID(
file_id=your-file_id,
organization_id=your-org-id,
location_id=your-location-id
file_id="<YOUR-FILE-ID>",
organization_id="<YOUR-ORG-ID>",
location_id="<YOUR-LOCATION-ID>"
)

bbox_label = await data_client.add_bounding_box_to_image_by_id(
bbox_id = await data_client.add_bounding_box_to_image_by_id(
binary_id=MY_BINARY_ID,
label="label",
x_min_normalized=0,
Expand All @@ -732,7 +772,7 @@ async def add_bounding_box_to_image_by_id(
y_max_normalized=.3
)

print(bbox_label)
print(bbox_id)

Args:
binary_id (viam.proto.app.data.BinaryID): The ID of the image to add the bounding box to.
Expand Down Expand Up @@ -799,6 +839,8 @@ async def bounding_box_labels_by_filter(self, filter: Optional[Filter] = None) -
bounding_box_labels = await data_client.bounding_box_labels_by_filter(
my_filter)

print(bounding_box_labels)

Args:
filter (viam.proto.app.data.Filter): `Filter` specifying data to retrieve from. If no `Filter` is provided, all labels will
return.
Expand All @@ -818,7 +860,7 @@ async def get_database_connection(self, organization_id: str) -> str:

::

data_client.get_database_connection(org_id="a12b3c4e-1234-1abc-ab1c-ab1c2d345abc")
hostname = await data_client.get_database_connection(organization_id="<YOUR-ORG-ID>")

Args:
organization_id (str): Organization to retrieve the connection for.
Expand All @@ -840,8 +882,8 @@ async def configure_database_user(self, organization_id: str, password: str) ->
::

await data_client.configure_database_user(
organization_id="<your-org-id>",
password="your_password"
organization_id="<YOUR-ORG-ID>",
password="Your_Password@1234"
)

Args:
Expand All @@ -859,9 +901,9 @@ async def create_dataset(self, name: str, organization_id: str) -> str:

::

dataset_id = await data_client.create_dataset(
name="<dataset-name>",
organization_id="<your-org-id>"
name = await data_client.create_dataset(
name="<DATASET-NAME>",
organization_id="<YOUR-ORG-ID>"
)
print(dataset_id)

Expand All @@ -885,7 +927,7 @@ async def list_dataset_by_ids(self, ids: List[str]) -> Sequence[Dataset]:
::

datasets = await data_client.list_dataset_by_ids(
ids=["abcd-1234xyz-8765z-123abc"]
ids=["<YOUR-DATASET-ID-1>, <YOUR-DATASET-ID-2>"]
)
print(datasets)

Expand All @@ -909,8 +951,8 @@ async def list_datasets_by_organization_id(self, organization_id: str) -> Sequen

::

datasets = await data_client.list_dataset_by_organization_id(
organization_id=[""a12b3c4e-1234-1abc-ab1c-ab1c2d345abc""]
datasets = await data_client.list_datasets_by_organization_id(
organization_id="YOUR-ORG-ID"
)
print(datasets)

Expand All @@ -936,12 +978,13 @@ async def rename_dataset(self, id: str, name: str) -> None:
::

await data_client.rename_dataset(
id="abcd-1234xyz-8765z-123abc",
name="<dataset-name>"
id="<YOUR-DATASET-ID>",
name="MyDataset"
)

Args:
id (str): The ID of the dataset.
id (str): The ID of the dataset. You can retrieve this by navigating to the **DATASETS** sub-tab of the **DATA** tab,
clicking on the dataset, clicking the **...** menu and selecting **Copy dataset ID**.
name (str): The new name of the dataset.

For more information, see `Data Client API <https://docs.viam.com/appendix/apis/data-client/>`_.
Expand All @@ -959,7 +1002,8 @@ async def delete_dataset(self, id: str) -> None:
)

Args:
id (str): The ID of the dataset.
id (str): The ID of the dataset. You can retrieve this by navigating to the **DATASETS** sub-tab of the **DATA** tab,
clicking on the dataset, clicking the **...** menu and selecting **Copy dataset ID**.

For more information, see `Data Client API <https://docs.viam.com/appendix/apis/data-client/>`_.
"""
Expand All @@ -981,7 +1025,7 @@ async def add_binary_data_to_dataset_by_ids(self, binary_ids: List[BinaryID], da

my_binary_ids = []

for obj in binary_metadata:
for obj in binary_metadata[0]:
my_binary_ids.append(
BinaryID(
file_id=obj.metadata.id,
Expand Down Expand Up @@ -1021,7 +1065,7 @@ async def remove_binary_data_from_dataset_by_ids(self, binary_ids: List[BinaryID

my_binary_ids = []

for obj in binary_metadata:
for obj in binary_metadata[0]:
my_binary_ids.append(
BinaryID(
file_id=obj.metadata.id,
Expand Down Expand Up @@ -1247,7 +1291,7 @@ async def streaming_data_capture_upload(
component_type='motor',
component_name='left_motor',
method_name='IsPowered',
data_request_times=[(time_requested, time_received)],
data_request_times=[time_requested, time_received],
tags=["tag_1", "tag_2"]
)

Expand Down
Loading