diff --git a/.dockerignore b/.dockerignore
index e27dc136..259832a7 100644
--- a/.dockerignore
+++ b/.dockerignore
@@ -2,8 +2,12 @@
demo.gif
docs/
.git/
-data/
frontend/build/
frontend/node_modules/
experiments/
tasks/
+*.avi
+.vscode/
+venv/
+*.ts
+*.m3u8
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 2fae51c9..2870d48f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -10,3 +10,8 @@ __pycache__/
*.caffemodel
*.uff
*.avi
+*.ts
+*.m3u8
+.vscode
+venv/
+*.tbz2
\ No newline at end of file
diff --git a/README.md b/README.md
index e726feb4..f7862a17 100644
--- a/README.md
+++ b/README.md
@@ -113,6 +113,19 @@ docker build -f x86-openvino.Dockerfile -t "neuralet/smart-social-distancing:lat
docker run -it -p HOST_PORT:8000 -v "$PWD/data":/repo/data neuralet/smart-social-distancing:latest-x86_64_openvino
```
+**Run on x86 using DeepStream**
+
+```
+cd smart-social-distancing/
+(sudo) ./deepstream.sh build
+(sudo) ./deepstream.sh run
+```
+
+Make sure to set your hostname to your publicly accessable hostname in deepstream.ini
+```
+PublicUrl: http://your_hostname_here:8000
+```
+
### Configurations
You can read and modify the configurations in `config-jetson.ini` file for Jetson Nano and `config-skeleton.ini` file for Coral.
diff --git a/deepstream.Dockerfile b/deepstream.Dockerfile
new file mode 100644
index 00000000..7a97d8b3
--- /dev/null
+++ b/deepstream.Dockerfile
@@ -0,0 +1,80 @@
+# Copyright (c) 2020 Michael de Gans
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+ARG FRONTEND_BASE="build me with deesptream.sh"
+ARG CUDA_PLUGIN_TAG="build me with deepstream.sh"
+FROM ${FRONTEND_BASE} as frontend
+FROM registry.hub.docker.com/mdegans/gstcudaplugin:${CUDA_PLUGIN_TAG}
+
+# this can't be downloaded directly because a license needs to be accepted,
+# (because those who abuse it will care so much about that) and a tarball
+# extracted. This is very un-fun:
+# https://developer.nvidia.com/deepstream-getting-started#python_bindings
+ARG DS_SOURCES_ROOT='/opt/nvidia/deepstream/deepstream/sources'
+ARG CONFIG_FILE="deepstream.ini"
+
+# copy stuff we need at the start of the build
+COPY requirements.txt /tmp/
+
+# install pip, install requirements, remove pip and deps
+RUN apt-get update && apt-get install -y --no-install-recommends \
+ python3-aiofiles \
+ python3-dev \
+ python3-gi \
+ python3-gst-1.0 \
+ python3-numpy \
+ python3-pil \
+ python3-pip \
+ python3-protobuf \
+ python3-scipy \
+ python3-setuptools \
+ graphviz \
+ && pip3 install --require-hashes -r /tmp/requirements.txt \
+ && apt-get purge -y --autoremove \
+ python3-pip \
+ python3-setuptools \
+ python3-dev \
+ && rm -rf /var/lib/apt/cache/*
+
+# TODO(mdegans) python3-opencv brings in a *ton* of dependencies so
+# it's probably better off removed from the deepstream image
+
+# NOTE(mdegans): these layers are here because docker's multi-line
+# copy syntax is dumb and doesn't support copying folders in a sane way.
+# one way of getting around this is to use a subdir for your
+# project
+
+WORKDIR /repo
+
+# copy frontend
+COPY --from=frontend /frontend/build /srv/frontend
+
+# copy code
+COPY neuralet-distancing.py README.md ${CONFIG_FILE} ./
+COPY libs ./libs/
+COPY ui ./ui/
+COPY tools ./tools/
+COPY logs ./logs/
+COPY data ./data/
+
+# entrypoint with deepstream.
+EXPOSE 8000
+ENTRYPOINT [ "/usr/bin/python3", "neuralet-distancing.py", "--config", "deepstream.ini" ]
+
diff --git a/deepstream.ini b/deepstream.ini
new file mode 100644
index 00000000..1959dafd
--- /dev/null
+++ b/deepstream.ini
@@ -0,0 +1,41 @@
+[App]
+Host: 0.0.0.0
+Port: 8000
+Resolution: 640,480
+; public uri without the trailing slash
+PublicUrl: http://localhost:8000
+Encoder: nvvideoconvert ! nvv4l2h264enc
+
+[Source_0]
+; VideoPath may be a uri supported by uridecodebin (rtsp, http, etc.)
+; or a local file.
+; TODO(mdegans): camera sources.
+VideoPath: /opt/nvidia/deepstream/deepstream-5.0/samples/streams/sample_720p.h264
+
+[Source_1]
+VideoPath: /opt/nvidia/deepstream/deepstream-5.0/samples/streams/sample_720p.h264
+
+[Detector]
+; Supported devices: Deepstream
+Device: DeepStream
+; Detector's Name can be "resnet10" or "peoplenet"
+Name: resnet10
+;ImageSize is not needed since this is included in the deepstream config .ini
+ClassID: 2
+MinScore: 0.25
+
+; TODO(mdegans): remove unused sections and keys from this file
+
+[PostProcessor]
+MaxTrackFrame: 5
+NMSThreshold: 0.98
+; distance threshold for smart distancing in (cm)
+DistThreshold: 150
+; ditance mesurement method, CenterPointsDistance: compare center of pedestrian boxes together, FourCornerPointsDistance: compare four corresponding points of pedestrian boxes and get the minimum of them.
+DistMethod: CenterPointsDistance
+
+[Logger]
+; options: csv, json (default is csv if not set)
+Name: csv
+; optional log path (default to ~/.smart_distancing/logs/):
+LogDirectory: /repo/data/web_gui/static/data
diff --git a/deepstream.sh b/deepstream.sh
new file mode 100755
index 00000000..fc9bc86a
--- /dev/null
+++ b/deepstream.sh
@@ -0,0 +1,118 @@
+#!/bin/bash
+# Copyright (c) 2020 Michael de Gans
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+set -e
+
+# change the default run port
+readonly PORT="8000"
+# change this to bump the version tag
+readonly VERSION="0.1.0"
+# change this to your docker hub user if you fork this and want to push it
+readonly USER_NAME="neuralet"
+# change this to override the arch (should never be necessary)
+readonly ARCH="$(arch)"
+# frontend dockerfile name
+readonly FRONTEND_DOCKERFILE="frontend.Dockerfile"
+# Dockerfile name
+readonly DOCKERFILE="deepstream.Dockerfile"
+# https://www.cyberciti.biz/faq/bash-get-basename-of-filename-or-directory-name/
+readonly THIS_SCRIPT_BASENAME="${0##*/}"
+# change this to use a newer gst-cuda-plugin version
+readonly CUDA_PLUGIN_VER="0.3.4"
+# https://stackoverflow.com/questions/4774054/reliable-way-for-a-bash-script-to-get-the-full-path-to-itself
+readonly THIS_DIR="$( cd "$(dirname "$0")" > /dev/null 2>&1 ; pwd -P )"
+# the primary group to use
+if [[ $ARCH = "aarch64" ]]; then
+ # on tegra, a user must be in the video group to use the gpu
+ readonly GROUP_ID="$(cut -d: -f3 < <(getent group video))"
+ declare readonly GPU_ARGS=(
+ "--runtime"
+ "nvidia"
+ )
+else
+ readonly GROUP_ID=$(id -g)
+ declare readonly GPU_ARGS=(
+ "--gpus"
+ "all"
+ )
+fi
+# the user id to use
+if [[ -z "$SUDO_USER" ]]; then
+ readonly USER_ID=$UID
+else
+ echo "sudo user: $SUDO_USER"
+ readonly USER_ID="$(id -u $SUDO_USER)"
+fi
+
+# this helps tag the image
+GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
+# get the docker tag suffix from the git branch
+if [[ $GIT_BRANCH == "master" ]]; then
+ # if we're on master, just use "deepstream"
+ readonly GIT_BRANCH="latest"
+else
+ readonly GIT_BRANCH=$GIT_BRANCH
+fi
+
+# change this to ovverride the image tag suffix
+readonly TAG_SUFFIX1="deepstream-$VERSION-$ARCH"
+readonly TAG_SUFFIX2="deepstream-$GIT_BRANCH-$ARCH"
+
+function build() {
+ readonly local FRONTEND_TAG="$GIT_BRANCH-frontend"
+ set -x
+ docker build -f $FRONTEND_DOCKERFILE \
+ -t "$USER_NAME/smart-social-distancing:$FRONTEND_TAG" \
+ .
+ docker build -f $DOCKERFILE \
+ -t "$USER_NAME/smart-distancing:$TAG_SUFFIX1" \
+ -t "$USER_NAME/smart-distancing:$TAG_SUFFIX2" \
+ --build-arg CUDA_PLUGIN_TAG="$CUDA_PLUGIN_VER-$ARCH" \
+ --build-arg FRONTEND_BASE="$USER_NAME/smart-social-distancing:$FRONTEND_TAG" \
+ .
+}
+
+function run() {
+ set -x
+ exec docker run -it --rm --name smart_distancing \
+ "${GPU_ARGS[@]}" \
+ -v "$THIS_DIR/deepstream.ini:/repo/deepstream.ini" \
+ -v "$THIS_DIR/data:/repo/data" \
+ --user $USER_ID:$GROUP_ID \
+ -p "$PORT:8000" \
+ "$USER_NAME/smart-distancing:$TAG_SUFFIX1" \
+ --verbose
+}
+
+main() {
+case "$1" in
+ build)
+ build
+ ;;
+ run)
+ run
+ ;;
+ *)
+ echo "Usage: $THIS_SCRIPT_BASENAME {build|run}"
+esac
+}
+
+main "$1"
\ No newline at end of file
diff --git a/libs/detectors/deepstream/__init__.py b/libs/detectors/deepstream/__init__.py
new file mode 100644
index 00000000..c44e9e1c
--- /dev/null
+++ b/libs/detectors/deepstream/__init__.py
@@ -0,0 +1,46 @@
+# Copyright (c) 2020 Michael de Gans
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+The DeepStream detector module includes a DeepStream specific implementation
+of the BaseDetector class and various utility classes and functions.
+"""
+
+# GStreamer needs to be imported before pyds or else there is crash on Gst.init
+import gi
+gi.require_version('Gst', '1.0')
+gi.require_version('GLib', '2.0')
+from gi.repository import (
+ Gst,
+ GLib,
+)
+from libs.detectors.deepstream._ds_utils import *
+from libs.detectors.deepstream._ds_config import *
+from libs.detectors.deepstream._gst_engine import *
+
+__all__ = [
+ 'bin_to_pdf', # _ds_utils.py
+ 'DsConfig', # _ds_config.py
+ 'ElemConfig', # _ds_config.py
+ 'find_deepstream', # _ds_utils.py
+ 'GstConfig', # _ds_config.py
+ 'GstEngine', # _gst_engine.py
+ 'link_many', # _gst_engine.py
+]
diff --git a/libs/detectors/deepstream/_ds_config.py b/libs/detectors/deepstream/_ds_config.py
new file mode 100644
index 00000000..30565a6d
--- /dev/null
+++ b/libs/detectors/deepstream/_ds_config.py
@@ -0,0 +1,357 @@
+# Copyright (c) 2020 Michael de Gans
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+DsConfig and GstConfig wrappers live here (they wrap ConfigEngine).
+"""
+
+import os
+import logging
+import datetime
+
+from math import (
+ log,
+ ceil,
+ sqrt,
+)
+
+import gi
+gi.require_version('Gst', '1.0')
+gi.require_version('GLib', '2.0')
+from gi.repository import (
+ Gst,
+)
+
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Tuple,
+ Iterable,
+ Mapping,
+ Union,
+ List,
+)
+if TYPE_CHECKING:
+ from libs.config_engine import ConfigEngine
+else:
+ ConfigEngine = None
+
+__all__ = [
+ 'DsConfig',
+ 'ElemConfig',
+ 'GstConfig',
+]
+
+from libs.detectors.deepstream._ds_utils import find_deepstream
+
+Path = Union[str, os.PathLike]
+ElemConfig = Mapping[str, Any]
+
+logger = logging.getLogger(__name__)
+
+def calc_rows_and_columns(num_sources: int) -> int:
+ """
+ Calculate rows and columns values from a number of sources.
+
+ Returns:
+ (int) math.ceil(math.sqrt(num_sources))
+ """
+ if not num_sources:
+ return 1
+ return int(ceil(sqrt(num_sources)))
+
+
+def calc_tile_resolution(out_res: Tuple[int, int], rows_and_columns: int) -> Tuple[int, int]:
+ """
+ Return the optimal resolution for the stream muxer to scale input sources to.
+ (same as the resolution for a tile).
+ """
+ return out_res[0] // rows_and_columns, out_res[1] // rows_and_columns
+
+
+class GstConfig(object):
+ """
+ GstConfig is a simple class to wrap a ConfigEngine and provide
+ for a GstEngine.
+
+ Arguments:
+ master_config:
+ The master :obj:`ConfigEngine`_ to use internally.
+ """
+
+ SRC_TYPE = 'uridecodebin'
+ MUXER_TYPE = 'concat' # using this just because it has request pads
+ INFER_TYPE = 'identity'
+ DISTANCE_TYPE = 'identity'
+ BROKER_TYPE = 'identity'
+ OSD_CONVERTER_TYPE = 'identity'
+ TILER_TYPE = 'identity'
+ OSD_TYPE = 'identity'
+ TRACKER_TYPE = 'identity'
+
+ def __init__(self, master_config: ConfigEngine):
+ self.master_config = master_config
+ self.validate()
+
+ @property
+ def src_configs(self) -> List[ElemConfig]:
+ """
+ Returns:
+ A list containing an ElemConfig for each 'Source' Section
+ in self.master_config
+ """
+ ret = []
+ for section, content in self.master_config.config.items():
+ if section.startswith('Source') and 'VideoPath' in content:
+ video_path = content['VideoPath']
+ if os.path.isfile(video_path):
+ video_path = f'file://{os.path.abspath(video_path)}'
+ ret.append({
+ 'uri': video_path,
+ })
+ return ret
+
+ @property
+ def class_ids(self) -> str:
+ """
+ Returns:
+ the class IDs from the master config.
+ """
+ return self.master_config.config['Detector']['ClassID']
+
+ @property
+ def infer_configs(self) -> List[ElemConfig]:
+ """
+ Default implementation.
+
+ Returns:
+ a list with a single empty :obj:`ElemConfig`
+ """
+ return [dict(),]
+
+ def _blank_config(self) -> ElemConfig:
+ """
+ Default implementation.
+
+ Returns:
+ a new empty :obj:`ElemConfig`
+ """
+ return dict()
+
+ muxer_config = property(_blank_config)
+ tracker_config = property(_blank_config)
+ tiler_config = property(_blank_config)
+ osd_config = property(_blank_config)
+ osd_converter_config = property(_blank_config)
+ sink_config = property(_blank_config)
+ distance_config = property(_blank_config)
+ broker_config = property(_blank_config)
+
+ @property
+ def rows_and_columns(self) -> int:
+ """
+ Number of rows and columns for the tiler element.
+
+ Calculated based on the number of sources.
+ """
+ return calc_rows_and_columns(len(self.src_configs))
+
+ @property
+ def tile_resolution(self) -> Tuple[int, int]:
+ """
+ Resolution of an individual video tile.
+
+ Calculated based on the resolution and number of sources.
+ """
+ return calc_tile_resolution(self.out_resolution, self.rows_and_columns)
+
+ @property
+ def out_resolution(self) -> Tuple[int, int]:
+ """
+ Output video resolution as a 2 tuple of width, height.
+
+ Read from self.master_config.config['App']
+ """
+ return tuple(int(i) for i in self.master_config.config['App']['Resolution'].split(','))
+
+ def validate(self):
+ """
+ Validate `self`. Called by __init__.
+
+ Checks:
+ * there is at least one source
+ * there is at least one inference element
+
+ Raises:
+ ValueError: if `self` is invalid.
+
+ Examples:
+
+ If an empty source is supplied, ValueError is raised:
+
+ >>> empty_iterable = tuple()
+ >>> src_configs = [{'prop': 'val'},]
+ >>> config = GstConfig(empty_iterable, src_configs)
+ Traceback (most recent call last):
+ ...
+ ValueError: at least one inference config is required
+ """
+ if not self.infer_configs:
+ raise ValueError(
+ "at least one 'Detector' section is required in the .ini")
+ if not self.src_configs:
+ raise ValueError(
+ "at least one 'Source' section is required in the .ini")
+
+
+class DsConfig(GstConfig):
+ """
+ DeepStream implementation of GstConfig.
+
+ 'batch-size' will may be overridden on element configs to match
+ the number of sources in the master config.
+
+ Arguments:
+ max_batch_size (int):
+ The maximum allowed batch size parameter.
+ Defaults to 32, but this should probably be
+ lower on platforms like Jetson Nano for best
+ performance.
+ """
+ SRC_TYPE = 'uridecodebin'
+ MUXER_TYPE = 'nvstreammux'
+ INFER_TYPE = 'nvinfer'
+ DISTANCE_TYPE = 'dsdistance'
+ BROKER_TYPE = 'payloadbroker'
+ OSD_CONVERTER_TYPE = 'nvvideoconvert'
+ TILER_TYPE = 'nvmultistreamtiler'
+ OSD_TYPE = 'nvdsosd'
+ TRACKER_TYPE = 'nvtracker'
+
+ DS_VER, DS_ROOT = find_deepstream()
+ DS_CONF_PATH = os.path.join(DS_ROOT, 'samples', 'configs')
+ # TODO(mdegans): secure hash validation of all configs, models, paths, etc and copy to immutable path
+ # important that the copy is *before* the validation
+ RESNET_CONF = os.path.join(DS_CONF_PATH, 'deepstream-app/config_infer_primary.txt')
+ RESNET_CONF_NANO = os.path.join(DS_CONF_PATH, 'deepstream-app/config_infer_primary_nano.txt')
+ PEOPLENET_CONF = os.path.join(DS_CONF_PATH, 'tlt_pretrained_models/config_infer_primary_peoplenet.txt')
+
+ TRACKER_LIB = 'libnvds_mot_iou.so'
+ INFER_INTERVAL = 1
+
+ def __init__(self, *args, max_batch_size=32, **kwargs):
+ self.max_batch_size = max_batch_size
+ super().__init__(*args, **kwargs)
+
+ @property
+ def muxer_config(self) -> ElemConfig:
+ return {
+ 'width': self.tile_resolution[0],
+ 'height': self.tile_resolution[1],
+ 'batch-size': self.batch_size,
+ 'enable-padding': True, # maintain apsect raidou
+ 'live-source': True,
+ 'attach-sys-ts': True,
+ }
+
+ @property
+ def tracker_config(self) -> ElemConfig:
+ return {
+ 'll-lib-file': os.path.join(self.DS_ROOT, 'lib', self.TRACKER_LIB),
+ 'enable-batch-process': True,
+ }
+
+ @property
+ def tiler_config(self) -> ElemConfig:
+ return {
+ 'rows': self.rows_and_columns,
+ 'columns': self.rows_and_columns,
+ 'width': self.out_resolution[0],
+ 'height': self.out_resolution[1],
+ }
+
+ @property
+ def distance_config(self) -> ElemConfig:
+ return {
+ 'class-id': int(self.class_ids)
+ }
+
+ @property
+ def broker_config(self) -> ElemConfig:
+ return {
+ 'mode': 2, # csv
+ 'basepath': os.path.join(
+ self.master_config.config['Logger']['LogDirectory'],
+ 'default', 'objects_log',
+ datetime.datetime.today().strftime('%Y-%m-%d'),
+ )
+ }
+
+ @property
+ def infer_configs(self) -> List[ElemConfig]:
+ """
+ Return nvinfer configs.
+ """
+ infer_configs = []
+ # TODO(mdegans): support 'Clasifier' section as secondary detectors
+ # this might mean parsing and writing the config files since the
+ # unique id is specified in the config.
+ detector_cfg = self.master_config.config['Detector']
+ model_name = detector_cfg['Name']
+ if model_name == 'resnet10':
+ # TODO(detect nano and use optimized cfg)
+ detector = {
+ 'config-file-path': self.RESNET_CONF,
+ }
+ elif model_name == 'peoplenet':
+ detector = {
+ 'config-file-path': self.PEOPLENET_CONF,
+ }
+ else:
+ raise ValueError('Invalid value for Detector "Name"')
+ detector['batch-size'] = self.batch_size
+ detector['interval'] = self.INFER_INTERVAL
+ infer_configs.append(detector)
+ return infer_configs
+
+ @property
+ def batch_size(self) -> int:
+ """
+ Return the optimal batch size.
+ (next power of two up from the number of sources).
+
+ TODO(mdegans): it's unclear if this is actually optimal
+ and under what circumstances (depends on model, afaik)
+ tests must be run to see if it's better to use the number
+ of sources directly.
+
+ NOTE(mdegans): Nvidia sets it to a static 30 in their config
+ so it may be a power of two is not optimal here. Some of
+ their test apps use the number of sources. Benchmarking
+ is probably the easiest way to settle this.
+
+ Control the max by setting max_batch_size.
+ """
+ optimal = pow(2, ceil(log(len(self.src_configs))/log(2)))
+ return min(optimal, self.max_batch_size)
+
+if __name__ == "__main__":
+ import doctest
+ doctest.testmod(optionflags=doctest.ELLIPSIS)
diff --git a/libs/detectors/deepstream/_ds_utils.py b/libs/detectors/deepstream/_ds_utils.py
new file mode 100644
index 00000000..e9caf61d
--- /dev/null
+++ b/libs/detectors/deepstream/_ds_utils.py
@@ -0,0 +1,122 @@
+# Copyright (c) 2020 Michael de Gans
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+DeepStream common utilities.
+"""
+
+import logging
+import os
+import shutil
+import subprocess
+
+import gi
+gi.require_version('Gst', '1.0')
+gi.require_version('GLib', '2.0')
+from gi.repository import (
+ Gst,
+ GLib,
+)
+
+from typing import (
+ Tuple,
+ Optional,
+)
+
+__all__ = [
+ 'bin_to_pdf',
+ 'find_deepstream',
+]
+
+DS_VERSIONS = ('4.0', '5.0')
+DS4_PATH = '/opt/nvidia/deepstream/deepstream-{ver}'
+
+logger = logging.getLogger(__name__)
+
+def find_deepstream() -> Tuple[str, str]:
+ """
+ Finds DeepStream.
+
+ Return:
+ A 2 tuple of the DeepStream version
+ and it's root path or None if no
+ version is found.
+ """
+ # TODO(mdegans): implement
+ for ver in DS_VERSIONS:
+ ds_dir = DS4_PATH.format(ver=ver)
+ if os.path.isdir(ds_dir):
+ return ver, ds_dir
+
+# this is from `mce.pipeline`
+def bin_to_pdf(bin_: Gst.Bin, details: Gst.DebugGraphDetails, filename: str,
+ ) -> Optional[str]:
+ """
+ Copied from `mce.pipeline `_
+
+ Dump a Gst.Bin to pdf using
+ `Gst.debug_bin_to_dot_file `_
+ and graphviz.
+ Will launch the 'dot' subprocess in the background with Popen.
+ Does not check whether the process completes, but a .dot is
+ created in any case. Has the same arguments as
+ `Gst.debug_bin_to_dot_file `_
+
+ Arguments:
+ bin:
+ the bin to make a .pdf visualization of
+ details:
+ a Gst.DebugGraphDetails choice (see gstreamer docs)
+ filename:
+ a base filename to use (not full path, with no extension)
+ usually this is the name of the bin you can get with some_bin.name
+
+ Returns:
+ the path to the created file (.dot or .pdf) or None if
+ GST_DEBUG_DUMP_DOT_DIR not found in os.environ
+ """
+ if 'GST_DEBUG_DUMP_DOT_DIR' in os.environ:
+ dot_dir = os.environ['GST_DEBUG_DUMP_DOT_DIR']
+ dot_file = os.path.join(dot_dir, f'{filename}.dot')
+ pdf_file = os.path.join(dot_dir, f'{filename}.pdf')
+ logger.debug(f"writing {bin_.name} to {dot_file}")
+ Gst.debug_bin_to_dot_file(bin_, details, filename)
+ dot_exe = shutil.which('dot')
+ if dot_exe:
+ logger.debug(
+ f"converting {os.path.basename(dot_file)} to "
+ f"{os.path.basename(pdf_file)} in background")
+ command = ('nohup', dot_exe, '-Tpdf', dot_file, f'-o{pdf_file}')
+ logger.debug(
+ f"running: {' '.join(command)}")
+ subprocess.Popen(
+ command,
+ stdout=subprocess.DEVNULL,
+ stderr=subprocess.DEVNULL,
+ preexec_fn=os.setpgrp,
+ )
+ else:
+ logger.warning(
+ f'graphviz does not appear to be installed, so cannot convert'
+ f'{dot_file} to pdf. You can install graphviz with '
+ f'"sudo apt install graphviz" on Linux for Tegra or Ubuntu.')
+ return dot_file
+ return pdf_file
+ return None
diff --git a/libs/detectors/deepstream/_gst_engine.py b/libs/detectors/deepstream/_gst_engine.py
new file mode 100644
index 00000000..19d7338e
--- /dev/null
+++ b/libs/detectors/deepstream/_gst_engine.py
@@ -0,0 +1,607 @@
+# Copyright (c) 2020 Michael de Gans
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+GstEngine lives here (multiprocessing.Process subclass).
+"""
+
+import os
+import functools
+import multiprocessing
+import queue
+import logging
+
+# import gstreamer bidings
+import gi
+gi.require_version('Gst', '1.0')
+gi.require_version('GLib', '2.0')
+from gi.repository import (
+ Gst,
+ GLib,
+)
+from typing import (
+ Any,
+ Callable,
+ Iterable,
+ Optional,
+ Sequence,
+)
+from libs.detectors.deepstream._ds_utils import bin_to_pdf
+from libs.detectors.deepstream._ds_config import GstConfig
+
+__all__ = [
+ 'GstEngine',
+ 'GstLinkError',
+ 'link_many',
+ 'PadProbeCallback',
+]
+
+PadProbeCallback = Callable[
+ [Gst.Pad, Gst.PadProbeInfo, Any],
+ Gst.PadProbeReturn,
+]
+"""
+Signature of Gsteamer Pad Probe Callback
+"""
+
+# a documentation template for an elemetn creation function
+# TODO(mdegans): remove after refactoring elem creation methods
+_ELEM_DOC = """
+Create {elem_name} Gst.Element and add to the pipeline.
+
+Returns:
+ bool: False on failure, True on success.
+"""
+
+
+class GstLinkError(RuntimeError):
+ """on failure to link pad or element"""
+
+def link(a: Gst.Element, b: Gst.Element):
+ """
+ Link Gst.Element a to b
+
+ Use this to avoid the checking for true on exit,
+ which is very C, but not very Pythonic.
+
+ (Always Availability of src and sink pads)
+
+ Raises:
+ LinkError: on failure to link.
+ """
+ if not a.link(b):
+ raise GstLinkError(f'could not link {a.name} to {b.name}')
+
+def link_many(*elements: Iterable[Gst.Element]):
+ """
+ Link many Gst.Element.
+
+ (linear, assumes Always Availability of src and sink pads).
+
+ Returns:
+ bool: False on failure, True on success.
+ """
+ elements = iter(elements)
+ last = next(elements)
+ for current in elements:
+ if not last.link(current):
+ raise GstLinkError(f'could not link {last.name} to {current.name}')
+ last = current
+
+
+class GstEngine(multiprocessing.Process):
+ """
+ GstEngine is an internal engine for GStreamer.
+
+ It is a subclass of multiprocessing.Process to run a GLib.MainLoop in
+ a separate process. There are several reasons for this:
+
+ * GStreamer elements can have memory leaks so if and when the processes
+ crashes, it can be restarted without having to restart the whole app.
+ In general GStreamer is as buggy as it is fast and the quality of elements
+ runs the gamut.
+ * Python callbacks called by GLib.MainLoop can block the whole MainLoop.
+ (The same is true in C, but you can launch CPU bound stuff in a thread,
+ which is not possible in Python due to the GIL). Running GLib.MainLoop
+ it in a separate process and putting the results into a queue if a slot
+ is empty (dropping the results if not), avoids this problem.
+ * Ease of adding and removing new sources. With DeepStream, right now, the
+ *easiest* and most reliable way to do this is to relaunch it's process
+ with a modified configuration.
+
+ Arguments:
+ config (:obj:`GstConfig`):
+ GstConfig instance for this engine (wraps sd.core.ConfigEngine).
+ debug (bool, optional):
+ log all bus messages to the debug level
+ (this can mean a lot of spam, but can also be useful if things are
+ misbehaving)
+ blocking (bool, optional):
+ if set to true, attempts to access the .results property will block
+ for .queue_timeout seconds waiting for results. If no results are
+ ready after that, None is returned. If set to false, and a result is
+ not ready, None will be returned immediately.
+
+ Attributes:
+ logger (:obj:`logging.Logger`):
+ Python logger for the class.
+ queue_timeout (int):
+ (default: 15 seconds) timeout for the blocking argument/attribute.
+ feed_name (str):
+ (default: 'default') the feed name portion of the uri.
+ web_root (str):
+ The default web root path.
+ (default: '/repo/data/web_gui')
+ IGNORED_MESSAGES(:obj:`tuple` of :obj:`Gst.MessageType`):
+ Gst.MessageType to be ignored by on_bus_message.
+
+ Examples:
+
+ NOTE: the default GstConfig pipeline is:
+ uridecodebin ! concat ! identity ... identity ! fakesink,
+
+ Real-world subclasses can override GstConfig to set different source,
+ sink, and inference elements. See GstConfig documentation for details.
+
+ """
+
+ IGNORED_MESSAGES = tuple() # type: Tuple[Gst.MessageType]
+
+ logger = logging.getLogger('GstEngine')
+ # TODO(mdegans): make these properties that warn when a set is attempted
+ # after the processs has started since these are copied at that point
+ # (since this is a process) and re-assignment won't work.
+ queue_timeout=10
+ feed_name = 'default'
+ web_root = '/repo/data/web_gui'
+ # this is to dump .dot and .pdf
+ logdir = '/tmp'
+
+ def __init__(self, config:GstConfig, *args, debug=False, blocking=False, **kwargs):
+ self.logger.debug('__init__')
+ super().__init__(*args, **kwargs)
+ # set debug for optional extra logging
+ self._debug = debug
+
+ # the pipeline configuration
+ self._gst_config = config # type: GstConfig
+
+ # GStreamer main stuff
+ self._main_loop = None # type: GLib.MainLoop
+ self._pipeline = None # type: Gst.Pipeline
+ # GStreamer elements (in order of connection)
+ self._sources = [] # type: List[Gst.Element]
+ self._muxer = None # type: Gst.Element
+ self._muxer_lock = GLib.Mutex()
+ self._infer_elements = [] # type: List[Gst.Element]
+ self._tracker = None # type: Gst.Element
+ self._distance = None # type: Gst.Element
+ self._broker = None # type: Gst.Element
+ self._osd_converter = None # type: Gst.Element
+ self._tiler = None # type: Gst.Element
+ self._tiler_probe_id = None # type: int
+ self._osd = None # type: Gst.Element
+ self._sink = None # type: Gst.Element
+
+ # process communication primitives
+ self._result_queue = multiprocessing.Queue(maxsize=1)
+ self._stop_requested = multiprocessing.Event()
+ # todo: make this a property with proper ipc:
+ # so it can be changed after start
+ self.blocking=blocking
+
+ def on_bus_message(self, bus: Gst.Bus, message: Gst.Message, *_) -> bool:
+ """
+ Default bus message callback.
+
+ This implementation does the following on each message type:
+
+ Ignored:
+ any Gst.MessageType in GstEngine.IGNORED_MESSAGES
+
+ Logged:
+ Gst.MessageType.STREAM_STATUS
+ Gst.MessageType.STATE_CHANGED
+ Gst.MessageType.WARNING
+ (all others)
+
+ call self._quit():
+ Gst.MessageType.EOS
+ Gst.MessageType.ERROR
+ """
+ # TAG and DURATION_CHANGED seem to be the most common
+ if message.type in self.IGNORED_MESSAGES:
+ pass
+ elif message.type == Gst.MessageType.STREAM_STATUS:
+ status, owner = message.parse_stream_status() # type: Gst.StreamStatusType, Gst.Element
+ self.logger.debug(f"{owner.name}:status:{status.value_name}")
+ elif message.type == Gst.MessageType.STATE_CHANGED:
+ old, new, _ = message.parse_state_changed() # type: Gst.State, Gst.State, Gst.State
+ self.logger.debug(
+ f"{message.src.name}:state-change:"
+ f"{old.value_name}->{new.value_name}")
+ elif message.type == Gst.MessageType.EOS:
+ self.logger.debug(f"Got EOS")
+ self._quit()
+ elif message.type == Gst.MessageType.ERROR:
+ err, errmsg = message.parse_error() # type: GLib.Error, str
+ self.logger.error(f'{err}: {errmsg}')
+ self._quit()
+ elif message.type == Gst.MessageType.WARNING:
+ err, errmsg = message.parse_warning() # type: GLib.Error, str
+ self.logger.warning(f'{err}: {errmsg}')
+ else:
+ if self._debug:
+ self.logger.debug(
+ f"{message.src.name}:{Gst.MessageType.get_name(message.type)}")
+ return True
+
+ def _create_pipeline(self) -> bool:
+ """
+ Attempt to create pipeline bin.
+
+ Returns:
+ bool: False on failure, True on success.
+ """
+ # create the pipeline and check
+ self.logger.debug('creating pipeline')
+ self._pipeline = Gst.Pipeline()
+ if not self._pipeline:
+ self.logger.error('could not create Gst.Pipeline element')
+ return False
+ return True
+
+ # TODO(mdegans): some of these creation methods can probably be combined
+
+ def _create_sources(self) -> bool:
+ # create a source and check
+ for conf in self._gst_config.src_configs:
+ self.logger.debug(f'creating source: {self._gst_config.SRC_TYPE}')
+ src = Gst.ElementFactory.make(self._gst_config.SRC_TYPE) # type: Gst.Element
+ if not src:
+ self.logger.error(f'could not create source of type: {self._gst_config.SRC_TYPE}')
+ return False
+
+ self.logger.debug('')
+
+ # set properties on the source
+ for k, v in conf.items():
+ src.set_property(k, v)
+ src.set_property('async_handling', True)
+ src.set_property('caps', Gst.Caps.from_string("video/x-raw(ANY)"))
+ src.set_property('expose-all-streams', False)
+
+
+ # add the source to the pipeline and check
+ self._pipeline.add(src)
+
+ # append the source to the _sources list
+ self._sources.append(src)
+ return True
+ _create_sources.__doc__ = _ELEM_DOC.format(elem_name='`self.config.SRC_TYPE`')
+
+ def _create_element(self, e_type:str) -> Optional[Gst.Element]:
+ """
+ Create a Gst.Element and add to the pipeline.
+
+ Arguments:
+ e_type (str):
+ The FOO_TYPE of elememt to add defined on the config class
+ as an attribute eg. MUXER_TYPE, SRC_TYPE... This argument is
+ case insensitive. choices are: ('muxer', 'src', 'sink')
+
+ Once the element of the corresponding type on the config is
+ made using Gst.ElementFactory.make, it will be added to
+ self._pipeline and assigned to self._e_type.
+
+ Returns:
+ A Gst.Element if sucessful, otherwise None.
+
+ Raises:
+ AttributeError if e_type doesn't exist on the config and the class.
+ """
+ # NOTE(mdegans): "type" and "name" are confusing variable names considering
+ # GStreamer's and Python's usage of them. Synonyms anybody?
+ e_type = e_type.lower()
+ e_name = getattr(self._gst_config, f'{e_type.upper()}_TYPE')
+ props = getattr(self._gst_config, f'{e_type}_config') # type: dict
+ self.logger.debug(f'creating {e_type}: {e_name} with props: {props}')
+
+ # make an self.gst_config.E_TYPE_TYPE element
+ elem = Gst.ElementFactory.make(e_name)
+ if not elem:
+ self.logger.error(f'could not create {e_type}: {e_name}')
+ return
+
+ # set properties on the element
+ if props:
+ if self._debug:
+ self.logger.debug(
+ f'{elem.name}:{props}')
+ for k, v in props.items():
+ elem.set_property(k, v)
+
+ # assign the element to self._e_type
+ setattr(self, f'_{e_type}', elem)
+
+ # add the element to the pipeline and check
+ self._pipeline.add(elem)
+
+ return elem
+
+ def _create_infer_elements(self) -> bool:
+ """
+ Create GstConfig.INFER_TYPE elements, add them to the pipeline,
+ and append them to self._infer_elements for ease of access / linking.
+
+ Returns:
+ bool: False on failure, True on success.
+ """
+ self.logger.debug('creating inference elements')
+ for conf in self._gst_config.infer_configs:
+ # create and check inference element
+ elem = Gst.ElementFactory.make(self._gst_config.INFER_TYPE) # type: Gst.Element
+ if not elem:
+ self.logger.error(f"failed to create {self._gst_config.INFER_TYPE} element")
+ return False
+
+ # set properties on inference element
+ for k, v in conf.items():
+ elem.set_property(k, v)
+
+ # add the elements to the pipeline and check
+ self._pipeline.add(elem)
+ # oddly, add returns false even when the log shows success
+
+ # append the element to the list of inference elements
+ self._infer_elements.append(elem)
+ return True
+
+ def _create_sink(self, pipe_string: str = None):
+ """
+ Create a Gst.Bin sink from a pipeline string
+ """
+ try:
+ #TODO(mdegans): urlparse and path join on the paths
+ # (to validate the uri and avoid "//" and such)
+ public_url = self._gst_config.master_config.config['App']['PublicUrl']
+ playlist_root = f'{public_url}/static/gstreamer/{self.feed_name}'
+ #TODO(mdegans): make the base path a uri for testing
+ video_root = f'{self.web_root}/static/gstreamer/{self.feed_name}'
+ if not pipe_string:
+ encoder = self._gst_config.master_config.config['App']['Encoder']
+ pipe_string = f' {encoder} ! mpegtsmux ! hlssink ' \
+ f'sync=true ' \
+ f'max-files=15 target-duration=5 ' \
+ f'playlist-root={playlist_root} ' \
+ f'location={video_root}/video_%05d.ts ' \
+ f'playlist-location={video_root}/playlist.m3u8'
+ self.logger.debug(f'sink bin string: {pipe_string}')
+ self._sink = Gst.parse_bin_from_description(pipe_string, True)
+ dot_filename = bin_to_pdf(
+ self._sink, Gst.DebugGraphDetails.ALL, f'{self.__class__.__name__}.sink.created')
+ if dot_filename:
+ self.logger.debug(
+ f'.dot file written to {dot_filename}')
+ if not self._sink:
+ # i don't think it's possble to get here unless gstreamer is
+ # broken
+ return False
+ self._pipeline.add(self._sink)
+
+ return True
+ except (GLib.Error, KeyError):
+ self.logger.error("sink creation failed", exc_info=True)
+ return False
+
+ def _create_all(self) -> int:
+ """
+ Create and link the pipeline from self.config.
+
+ Returns:
+ bool: False on failure, True on success.
+ """
+ create_funcs = (
+ self._create_pipeline,
+ self._create_sources,
+ functools.partial(self._create_element, 'muxer'),
+ functools.partial(self._create_element, 'tracker'),
+ self._create_infer_elements,
+ functools.partial(self._create_element, 'distance'),
+ functools.partial(self._create_element, 'broker'),
+ functools.partial(self._create_element, 'osd_converter'),
+ functools.partial(self._create_element, 'tiler'),
+ functools.partial(self._create_element, 'osd'),
+ self._create_sink,
+ )
+
+ for i, f in enumerate(create_funcs):
+ if not f():
+ self.logger.error(
+ f"Failed to create DsEngine pipeline at step {i}")
+ return False
+ return True
+
+ def _on_source_src_pad_create(self, element: Gst.Element, src_pad: Gst.Pad):
+ """
+ Callback to link sources to the muxer.
+ """
+ # a lock is required so that identical pads are not requested.
+ # GLib.Mutex is required because python's isn't respected by GLib's MainLoop
+ self._muxer_lock.lock()
+ try:
+ self.logger.debug(f'{element.name} new pad: {src_pad.name}')
+ self.logger.debug(
+ f'{src_pad.name} caps:{src_pad.props.caps}')
+ muxer_sink_pad_name = f'sink_{self._muxer.numsinkpads}'
+ self.logger.debug(f'{self._muxer.name}:requesting pad:{muxer_sink_pad_name}')
+ muxer_sink = self._muxer.get_request_pad(muxer_sink_pad_name)
+ if not muxer_sink:
+ self.logger.error(
+ f"failed to get request pad from {self._muxer.name}")
+ self.logger.debug(
+ f'{muxer_sink.name} caps:{muxer_sink.props.caps}')
+ ret = src_pad.link(muxer_sink)
+ if not ret == Gst.PadLinkReturn.OK:
+ self.logger.error(
+ f"failed to link source to muxer becase {ret.value_name}")
+ self._quit()
+ finally:
+ self._muxer_lock.unlock()
+
+ def _link_pipeline(self) -> bool:
+ """
+ Attempt to link the entire pipeline.
+
+ Returns:
+ bool: False on failure, True on success.
+ """
+ self.logger.debug('linking pipeline')
+
+ # arrange for the sources to link to the muxer when they are ready
+ # (uridecodebin has "Sometimes" pads so needs to be linked by callback)
+ for source in self._sources: # type: Gst.Element
+ source.connect('pad-added', self._on_source_src_pad_create)
+
+ try:
+ # link the muxer to the first inference element
+ link(self._muxer, self._infer_elements[0])
+ link(self._infer_elements[0], self._tracker)
+ # if there are secondary inference elements
+ if self._infer_elements[1:]:
+ link_many(self._tracker, *self._infer_elements[1:])
+ # link the final inference element to distancing engine
+ link(self._infer_elements[-1], self._distance)
+ else:
+ # link tracker directly to the distancing element
+ link(self._tracker, self._distance)
+ link_many(
+ self._distance,
+ self._broker,
+ self._osd_converter,
+ self._tiler,
+ self._osd,
+ self._sink,
+ )
+ except GstLinkError as err:
+ self.logger.error(f"pipeline link fail because: {err}")
+ return False
+ self.logger.debug('linking pipeline successful')
+ return True
+
+ def stop(self):
+ """Stop the GstEngine process."""
+ self.logger.info('requesting stop')
+ self._stop_requested.set()
+
+ def _quit(self) -> Gst.StateChangeReturn:
+ """
+ Quit the GLib.MainLoop and set the pipeline to NULL.
+
+ Called by _on_stop. A separate function for testing purposes.
+ """
+ self.logger.info(f'{self.__class__.__name__} quitting.')
+ if self._main_loop and self._main_loop.is_running():
+ self._main_loop.quit()
+ if self._pipeline:
+ self._write_pdf('quit')
+ self.logger.debug('shifting pipeline to NULL state')
+ ret = self._pipeline.set_state(Gst.State.NULL)
+ if ret == Gst.StateChangeReturn.ASYNC:
+ ret = self._pipeline.get_state(10)
+ if ret == Gst.StateChangeReturn.SUCCESS:
+ return
+ else:
+ self.logger.error(
+ 'Failed to quit cleanly. Self terminating.')
+ self.terminate() # send SIGINT to self
+
+ def _on_stop(self):
+ """
+ Callback to shut down the process if stop() has been called.
+ """
+ if self._stop_requested.is_set():
+ self.logger.info(f'stopping {self.__class__.__name__}')
+ self._quit()
+ # clear stop_requested state
+ self._stop_requested.clear()
+ self.logger.info(f'{self.__class__.__name__} cleanly stopped')
+
+ def _write_pdf(self, suffix: str):
+ # create a debug pdf from the pipeline
+ dot_filename = bin_to_pdf(
+ self._pipeline, Gst.DebugGraphDetails.ALL, f'{self.__class__.__name__}.pipeline.{suffix}')
+ if dot_filename:
+ self.logger.debug(
+ f'.dot file written to {dot_filename}')
+
+ def run(self):
+ """Called on start(). Do not call this directly."""
+ self.logger.debug('run() called. Initializing Gstreamer.')
+
+ # set the .dot file dump path (this must be done prior to Gst.init)
+ if 'GST_DEBUG_DUMP_DOT_DIR' not in os.environ:
+ os.environ['GST_DEBUG_DUMP_DOT_DIR'] = self.logdir
+
+ # initialize GStreamer
+ Gst.init_check(None)
+
+ # create pipeline,
+ # create and add all elements:
+ if not self._create_all():
+ self.logger.debug('could not create pipeline')
+ return self._quit()
+
+ # register bus message callback
+ bus = self._pipeline.get_bus()
+ if not bus:
+ self.logger.error('could not get bus')
+ return self._quit()
+
+ self.logger.debug('registering bus message callback')
+ bus.add_watch(GLib.PRIORITY_DEFAULT, self.on_bus_message, None)
+
+ # link all pipeline elements:
+ if not self._link_pipeline():
+ self.logger.error('could not link pipeline')
+ return self._quit()
+
+ # register callback to check for the stop event when idle.
+ # TODO(mdegans): test to see if a higher priority is needed.
+ self.logger.debug('registering self._on_stop() idle callback with GLib MainLoop')
+ GLib.idle_add(self._on_stop)
+
+ # write a pdf before we attempt to start the pipeline
+ self._write_pdf('linked')
+
+ # set the pipeline to the playing state
+ self.logger.debug('setting pipeline to PLAYING state')
+ self._pipeline.set_state(Gst.State.PLAYING)
+
+ # write a pipeline after set the pipeline to PLAYING
+ self._write_pdf('playing')
+
+ # create and run the main loop.
+ # this has a built-in signal handler for SIGINT
+ self.logger.debug('creating the GLib.MainLoop')
+ self._main_loop = GLib.MainLoop()
+ self.logger.debug('starting the GLib.MainLoop')
+ self._main_loop.run()
+ self.logger.info("complete.")
diff --git a/libs/distance_pb2.py b/libs/distance_pb2.py
new file mode 100644
index 00000000..9940d73a
--- /dev/null
+++ b/libs/distance_pb2.py
@@ -0,0 +1,259 @@
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: distance.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='distance.proto',
+ package='distanceproto',
+ syntax='proto3',
+ serialized_pb=_b('\n\x0e\x64istance.proto\x12\rdistanceproto\"A\n\x05\x42\x61tch\x12\x12\n\nmax_frames\x18\x01 \x01(\r\x12$\n\x06\x66rames\x18\x02 \x03(\x0b\x32\x14.distanceproto.Frame\"h\n\x05\x46rame\x12\x11\n\tframe_num\x18\x01 \x01(\x05\x12\x11\n\tsource_id\x18\x02 \x01(\r\x12%\n\x06people\x18\x03 \x03(\x0b\x32\x15.distanceproto.Person\x12\x12\n\nsum_danger\x18\x04 \x01(\x02\"_\n\x06Person\x12\x0b\n\x03uid\x18\x01 \x01(\x05\x12\x11\n\tis_danger\x18\x02 \x01(\x08\x12\x12\n\ndanger_val\x18\x03 \x01(\x02\x12!\n\x04\x62\x62ox\x18\x04 \x01(\x0b\x32\x13.distanceproto.BBox\"@\n\x04\x42\x42ox\x12\x0c\n\x04left\x18\x01 \x01(\r\x12\x0b\n\x03top\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\r\n\x05width\x18\x04 \x01(\rb\x06proto3')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+
+
+
+_BATCH = _descriptor.Descriptor(
+ name='Batch',
+ full_name='distanceproto.Batch',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='max_frames', full_name='distanceproto.Batch.max_frames', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='frames', full_name='distanceproto.Batch.frames', index=1,
+ number=2, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=33,
+ serialized_end=98,
+)
+
+
+_FRAME = _descriptor.Descriptor(
+ name='Frame',
+ full_name='distanceproto.Frame',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='frame_num', full_name='distanceproto.Frame.frame_num', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='source_id', full_name='distanceproto.Frame.source_id', index=1,
+ number=2, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='people', full_name='distanceproto.Frame.people', index=2,
+ number=3, type=11, cpp_type=10, label=3,
+ has_default_value=False, default_value=[],
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='sum_danger', full_name='distanceproto.Frame.sum_danger', index=3,
+ number=4, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=100,
+ serialized_end=204,
+)
+
+
+_PERSON = _descriptor.Descriptor(
+ name='Person',
+ full_name='distanceproto.Person',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='uid', full_name='distanceproto.Person.uid', index=0,
+ number=1, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='is_danger', full_name='distanceproto.Person.is_danger', index=1,
+ number=2, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='danger_val', full_name='distanceproto.Person.danger_val', index=2,
+ number=3, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='bbox', full_name='distanceproto.Person.bbox', index=3,
+ number=4, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=206,
+ serialized_end=301,
+)
+
+
+_BBOX = _descriptor.Descriptor(
+ name='BBox',
+ full_name='distanceproto.BBox',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='left', full_name='distanceproto.BBox.left', index=0,
+ number=1, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='top', full_name='distanceproto.BBox.top', index=1,
+ number=2, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='height', full_name='distanceproto.BBox.height', index=2,
+ number=3, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ _descriptor.FieldDescriptor(
+ name='width', full_name='distanceproto.BBox.width', index=3,
+ number=4, type=13, cpp_type=3, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ options=None),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=303,
+ serialized_end=367,
+)
+
+_BATCH.fields_by_name['frames'].message_type = _FRAME
+_FRAME.fields_by_name['people'].message_type = _PERSON
+_PERSON.fields_by_name['bbox'].message_type = _BBOX
+DESCRIPTOR.message_types_by_name['Batch'] = _BATCH
+DESCRIPTOR.message_types_by_name['Frame'] = _FRAME
+DESCRIPTOR.message_types_by_name['Person'] = _PERSON
+DESCRIPTOR.message_types_by_name['BBox'] = _BBOX
+
+Batch = _reflection.GeneratedProtocolMessageType('Batch', (_message.Message,), dict(
+ DESCRIPTOR = _BATCH,
+ __module__ = 'distance_pb2'
+ # @@protoc_insertion_point(class_scope:distanceproto.Batch)
+ ))
+_sym_db.RegisterMessage(Batch)
+
+Frame = _reflection.GeneratedProtocolMessageType('Frame', (_message.Message,), dict(
+ DESCRIPTOR = _FRAME,
+ __module__ = 'distance_pb2'
+ # @@protoc_insertion_point(class_scope:distanceproto.Frame)
+ ))
+_sym_db.RegisterMessage(Frame)
+
+Person = _reflection.GeneratedProtocolMessageType('Person', (_message.Message,), dict(
+ DESCRIPTOR = _PERSON,
+ __module__ = 'distance_pb2'
+ # @@protoc_insertion_point(class_scope:distanceproto.Person)
+ ))
+_sym_db.RegisterMessage(Person)
+
+BBox = _reflection.GeneratedProtocolMessageType('BBox', (_message.Message,), dict(
+ DESCRIPTOR = _BBOX,
+ __module__ = 'distance_pb2'
+ # @@protoc_insertion_point(class_scope:distanceproto.BBox)
+ ))
+_sym_db.RegisterMessage(BBox)
+
+
+# @@protoc_insertion_point(module_scope)
\ No newline at end of file
diff --git a/neuralet-distancing.py b/neuralet-distancing.py
index 29f220e3..32379bd7 100644
--- a/neuralet-distancing.py
+++ b/neuralet-distancing.py
@@ -1,62 +1,86 @@
#!/usr/bin/python3
+import os
import argparse
-from multiprocessing import Process
+import logging
+import sys
import threading
+
+from multiprocessing import Process
from libs.config_engine import ConfigEngine
-import logging
+
logger = logging.getLogger(__name__)
-def start_engine(config, video_path):
- import pdb, traceback, sys
+def start_cv_engine(config, video_path):
try:
if video_path:
from libs.core import Distancing as CvEngine
engine = CvEngine(config)
engine.process_video(video_path)
else:
- logger.info('Skipping CVEngine as video_path is not set')
- except:
- extype, value, tb = sys.exc_info()
- traceback.print_exc()
- pdb.post_mortem(tb)
+ logger.error('"VideoPath" not set in .ini [App] section')
+ except Exception:
+ # this runs sys.excinfo() and logs the result
+ logger.error("CvEngine failed.", exc_info=True)
def start_web_gui(config):
from ui.web_gui import WebGUI
ui = WebGUI(config)
ui.start()
-
-def main(config):
- logging.basicConfig(level=logging.INFO)
+def main(config, verbose=False):
+ logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
if isinstance(config, str):
config = ConfigEngine(config)
-
video_path = config.get_section_dict("App").get("VideoPath", None)
- process_engine = Process(target=start_engine, args=(config, video_path,))
+
+ # create our inference process
+ if os.path.isdir('/opt/nvidia/deepstream'):
+ from libs.detectors.deepstream import GstEngine, DsConfig
+ process_engine = GstEngine(DsConfig(config), debug=verbose)
+ else:
+ # DeepStream is not available. Let's try CvEngine
+ process_engine = Process(target=start_cv_engine, args=(config, video_path,))
+
+ # create our ui process
process_api = Process(target=start_web_gui, args=(config,))
+ # start both processes
process_api.start()
process_engine.start()
logger.info("Services Started.")
+ # wait forever
forever = threading.Event()
try:
forever.wait()
except KeyboardInterrupt:
logger.info("Received interrupt. Terminating...")
- process_engine.terminate()
- process_engine.join()
- logger.info("CV Engine terminated.")
+ if hasattr(process_engine, 'stop'):
+ # DsEngine shuts down by asking
+ # GLib.MainLoop to quit. SIGTERM does this too,
+ # but it wouldn't call some extra debug stuff
+ # that's in GstEngine's quit()
+ # (debug .dot file, .pdf if graphviz is available)
+ # .stop() will call .terminate() if it times out.
+ process_engine.stop()
+ process_engine.join()
+ else:
+ process_engine.terminate()
+ process_engine.join()
+
+ logger.info("Inference Engine terminated.")
process_api.terminate()
process_api.join()
logger.info("Web GUI terminated.")
+ return 0
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--config', required=True)
+ parser.add_argument('--verbose', action='store_true')
args = parser.parse_args()
- main(args.config)
+ sys.exit(main(args.config, args.verbose))
diff --git a/requirements.in b/requirements.in
new file mode 100644
index 00000000..ca36102c
--- /dev/null
+++ b/requirements.in
@@ -0,0 +1,4 @@
+fastapi
+protobuf==3.12.2
+setuptools
+uvicorn
\ No newline at end of file
diff --git a/requirements.txt b/requirements.txt
new file mode 100644
index 00000000..6faa6b26
--- /dev/null
+++ b/requirements.txt
@@ -0,0 +1,128 @@
+#
+# This file is autogenerated by pip-compile
+# To update, run:
+#
+# pip-compile --allow-unsafe --generate-hashes
+#
+click==7.1.2 \
+ --hash=sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a \
+ --hash=sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc \
+ # via uvicorn
+dataclasses==0.7 \
+ --hash=sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836 \
+ --hash=sha256:494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6 \
+ # via pydantic
+fastapi==0.55.1 \
+ --hash=sha256:912bc1a1b187146fd74dd45e17ea10aede3d962c921142c412458e911c50dc4c \
+ --hash=sha256:b1a96ea772f10cd0235eb09d6e282b1f5e6135dad5121ed80d6beb8fa932e075 \
+ # via -r requirements.in
+h11==0.9.0 \
+ --hash=sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1 \
+ --hash=sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1 \
+ # via uvicorn
+httptools==0.1.1 \
+ --hash=sha256:0a4b1b2012b28e68306575ad14ad5e9120b34fccd02a81eb08838d7e3bbb48be \
+ --hash=sha256:3592e854424ec94bd17dc3e0c96a64e459ec4147e6d53c0a42d0ebcef9cb9c5d \
+ --hash=sha256:41b573cf33f64a8f8f3400d0a7faf48e1888582b6f6e02b82b9bd4f0bf7497ce \
+ --hash=sha256:56b6393c6ac7abe632f2294da53f30d279130a92e8ae39d8d14ee2e1b05ad1f2 \
+ --hash=sha256:86c6acd66765a934e8730bf0e9dfaac6fdcf2a4334212bd4a0a1c78f16475ca6 \
+ --hash=sha256:96da81e1992be8ac2fd5597bf0283d832287e20cb3cfde8996d2b00356d4e17f \
+ --hash=sha256:96eb359252aeed57ea5c7b3d79839aaa0382c9d3149f7d24dd7172b1bcecb009 \
+ --hash=sha256:a2719e1d7a84bb131c4f1e0cb79705034b48de6ae486eb5297a139d6a3296dce \
+ --hash=sha256:ac0aa11e99454b6a66989aa2d44bca41d4e0f968e395a0a8f164b401fefe359a \
+ --hash=sha256:bc3114b9edbca5a1eb7ae7db698c669eb53eb8afbbebdde116c174925260849c \
+ --hash=sha256:fa3cd71e31436911a44620473e873a256851e1f53dee56669dae403ba41756a4 \
+ --hash=sha256:fea04e126014169384dee76a153d4573d90d0cbd1d12185da089f73c78390437 \
+ # via uvicorn
+protobuf==3.12.2 \
+ --hash=sha256:304e08440c4a41a0f3592d2a38934aad6919d692bb0edfb355548786728f9a5e \
+ --hash=sha256:49ef8ab4c27812a89a76fa894fe7a08f42f2147078392c0dee51d4a444ef6df5 \
+ --hash=sha256:50b5fee674878b14baea73b4568dc478c46a31dd50157a5b5d2f71138243b1a9 \
+ --hash=sha256:5524c7020eb1fb7319472cb75c4c3206ef18b34d6034d2ee420a60f99cddeb07 \
+ --hash=sha256:612bc97e42b22af10ba25e4140963fbaa4c5181487d163f4eb55b0b15b3dfcd2 \
+ --hash=sha256:6f349adabf1c004aba53f7b4633459f8ca8a09654bf7e69b509c95a454755776 \
+ --hash=sha256:85b94d2653b0fdf6d879e39d51018bf5ccd86c81c04e18a98e9888694b98226f \
+ --hash=sha256:87535dc2d2ef007b9d44e309d2b8ea27a03d2fa09556a72364d706fcb7090828 \
+ --hash=sha256:a7ab28a8f1f043c58d157bceb64f80e4d2f7f1b934bc7ff5e7f7a55a337ea8b0 \
+ --hash=sha256:a96f8fc625e9ff568838e556f6f6ae8eca8b4837cdfb3f90efcb7c00e342a2eb \
+ --hash=sha256:b5a114ea9b7fc90c2cc4867a866512672a47f66b154c6d7ee7e48ddb68b68122 \
+ --hash=sha256:be04fe14ceed7f8641e30f36077c1a654ff6f17d0c7a5283b699d057d150d82a \
+ --hash=sha256:bff02030bab8b969f4de597543e55bd05e968567acb25c0a87495a31eb09e925 \
+ --hash=sha256:c9ca9f76805e5a637605f171f6c4772fc4a81eced4e2f708f79c75166a2c99ea \
+ --hash=sha256:e1464a4a2cf12f58f662c8e6421772c07947266293fb701cb39cd9c1e183f63c \
+ --hash=sha256:e72736dd822748b0721f41f9aaaf6a5b6d5cfc78f6c8690263aef8bba4457f0e \
+ --hash=sha256:eafe9fa19fcefef424ee089fb01ac7177ff3691af7cc2ae8791ae523eb6ca907 \
+ --hash=sha256:f4b73736108a416c76c17a8a09bc73af3d91edaa26c682aaa460ef91a47168d3 \
+ # via -r requirements.in
+pydantic==1.5.1 \
+ --hash=sha256:0a1cdf24e567d42dc762d3fed399bd211a13db2e8462af9dfa93b34c41648efb \
+ --hash=sha256:2007eb062ed0e57875ce8ead12760a6e44bf5836e6a1a7ea81d71eeecf3ede0f \
+ --hash=sha256:20a15a303ce1e4d831b4e79c17a4a29cb6740b12524f5bba3ea363bff65732bc \
+ --hash=sha256:2a6904e9f18dea58f76f16b95cba6a2f20b72d787abd84ecd67ebc526e61dce6 \
+ --hash=sha256:3714a4056f5bdbecf3a41e0706ec9b228c9513eee2ad884dc2c568c4dfa540e9 \
+ --hash=sha256:473101121b1bd454c8effc9fe66d54812fdc128184d9015c5aaa0d4e58a6d338 \
+ --hash=sha256:68dece67bff2b3a5cc188258e46b49f676a722304f1c6148ae08e9291e284d98 \
+ --hash=sha256:70f27d2f0268f490fe3de0a9b6fca7b7492b8fd6623f9fecd25b221ebee385e3 \
+ --hash=sha256:8433dbb87246c0f562af75d00fa80155b74e4f6924b0db6a2078a3cd2f11c6c4 \
+ --hash=sha256:8be325fc9da897029ee48d1b5e40df817d97fe969f3ac3fd2434ba7e198c55d5 \
+ --hash=sha256:93b9f265329d9827f39f0fca68f5d72cc8321881cdc519a1304fa73b9f8a75bd \
+ --hash=sha256:9be755919258d5d168aeffbe913ed6e8bd562e018df7724b68cabdee3371e331 \
+ --hash=sha256:ab863853cb502480b118187d670f753be65ec144e1654924bec33d63bc8b3ce2 \
+ --hash=sha256:b96ce81c4b5ca62ab81181212edfd057beaa41411cd9700fbcb48a6ba6564b4e \
+ --hash=sha256:da8099fca5ee339d5572cfa8af12cf0856ae993406f0b1eb9bb38c8a660e7416 \
+ --hash=sha256:e2c753d355126ddd1eefeb167fa61c7037ecd30b98e7ebecdc0d1da463b4ea09 \
+ --hash=sha256:f0018613c7a0d19df3240c2a913849786f21b6539b9f23d85ce4067489dfacfa \
+ # via fastapi
+six==1.15.0 \
+ --hash=sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259 \
+ --hash=sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced \
+ # via protobuf
+starlette==0.13.2 \
+ --hash=sha256:6169ee78ded501095d1dda7b141a1dc9f9934d37ad23196e180150ace2c6449b \
+ --hash=sha256:a9bb130fa7aa736eda8a814b6ceb85ccf7a209ed53843d0d61e246b380afa10f \
+ # via fastapi
+uvicorn==0.11.5 \
+ --hash=sha256:50577d599775dac2301bac8bd5b540d19a9560144143c5bdab13cba92783b6e7 \
+ --hash=sha256:596eaa8645b6dbc24d6610e335f8ddf5f925b4c4b86fdc7146abb0bf0da65d17 \
+ # via -r requirements.in
+uvloop==0.14.0 \
+ --hash=sha256:08b109f0213af392150e2fe6f81d33261bb5ce968a288eb698aad4f46eb711bd \
+ --hash=sha256:123ac9c0c7dd71464f58f1b4ee0bbd81285d96cdda8bc3519281b8973e3a461e \
+ --hash=sha256:4315d2ec3ca393dd5bc0b0089d23101276778c304d42faff5dc4579cb6caef09 \
+ --hash=sha256:4544dcf77d74f3a84f03dd6278174575c44c67d7165d4c42c71db3fdc3860726 \
+ --hash=sha256:afd5513c0ae414ec71d24f6f123614a80f3d27ca655a4fcf6cabe50994cc1891 \
+ --hash=sha256:b4f591aa4b3fa7f32fb51e2ee9fea1b495eb75b0b3c8d0ca52514ad675ae63f7 \
+ --hash=sha256:bcac356d62edd330080aed082e78d4b580ff260a677508718f88016333e2c9c5 \
+ --hash=sha256:e7514d7a48c063226b7d06617cbb12a14278d4323a065a8d46a7962686ce2e95 \
+ --hash=sha256:f07909cd9fc08c52d294b1570bba92186181ca01fe3dc9ffba68955273dd7362 \
+ # via uvicorn
+websockets==8.1 \
+ --hash=sha256:0e4fb4de42701340bd2353bb2eee45314651caa6ccee80dbd5f5d5978888fed5 \
+ --hash=sha256:1d3f1bf059d04a4e0eb4985a887d49195e15ebabc42364f4eb564b1d065793f5 \
+ --hash=sha256:20891f0dddade307ffddf593c733a3fdb6b83e6f9eef85908113e628fa5a8308 \
+ --hash=sha256:295359a2cc78736737dd88c343cd0747546b2174b5e1adc223824bcaf3e164cb \
+ --hash=sha256:2db62a9142e88535038a6bcfea70ef9447696ea77891aebb730a333a51ed559a \
+ --hash=sha256:3762791ab8b38948f0c4d281c8b2ddfa99b7e510e46bd8dfa942a5fff621068c \
+ --hash=sha256:3db87421956f1b0779a7564915875ba774295cc86e81bc671631379371af1170 \
+ --hash=sha256:3ef56fcc7b1ff90de46ccd5a687bbd13a3180132268c4254fc0fa44ecf4fc422 \
+ --hash=sha256:4f9f7d28ce1d8f1295717c2c25b732c2bc0645db3215cf757551c392177d7cb8 \
+ --hash=sha256:5c01fd846263a75bc8a2b9542606927cfad57e7282965d96b93c387622487485 \
+ --hash=sha256:5c65d2da8c6bce0fca2528f69f44b2f977e06954c8512a952222cea50dad430f \
+ --hash=sha256:751a556205d8245ff94aeef23546a1113b1dd4f6e4d102ded66c39b99c2ce6c8 \
+ --hash=sha256:7ff46d441db78241f4c6c27b3868c9ae71473fe03341340d2dfdbe8d79310acc \
+ --hash=sha256:965889d9f0e2a75edd81a07592d0ced54daa5b0785f57dc429c378edbcffe779 \
+ --hash=sha256:9b248ba3dd8a03b1a10b19efe7d4f7fa41d158fdaa95e2cf65af5a7b95a4f989 \
+ --hash=sha256:9bef37ee224e104a413f0780e29adb3e514a5b698aabe0d969a6ba426b8435d1 \
+ --hash=sha256:c1ec8db4fac31850286b7cd3b9c0e1b944204668b8eb721674916d4e28744092 \
+ --hash=sha256:c8a116feafdb1f84607cb3b14aa1418424ae71fee131642fc568d21423b51824 \
+ --hash=sha256:ce85b06a10fc65e6143518b96d3dca27b081a740bae261c2fb20375801a9d56d \
+ --hash=sha256:d705f8aeecdf3262379644e4b55107a3b55860eb812b673b28d0fbc347a60c55 \
+ --hash=sha256:e898a0863421650f0bebac8ba40840fc02258ef4714cb7e1fd76b6a6354bda36 \
+ --hash=sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b \
+ # via uvicorn
+
+# The following packages are considered to be unsafe in a requirements file:
+setuptools==47.1.1 \
+ --hash=sha256:145fa62b9d7bb544fce16e9b5a9bf4ab2032d2f758b7cd674af09a92736aff74 \
+ --hash=sha256:74f33f44290f95c5c4a7c13ccc9d6d1a16837fe9dce0acf411dd244e7de95143 \
+ # via -r requirements.in, protobuf
diff --git a/test.Dockerfile b/test.Dockerfile
new file mode 100644
index 00000000..3761812c
--- /dev/null
+++ b/test.Dockerfile
@@ -0,0 +1,3 @@
+FROM ubuntu:latest
+
+COPY ds_pybind_v0.9.tbz2 ./
diff --git a/x86.Dockerfile b/x86_64.Dockerfile
similarity index 100%
rename from x86.Dockerfile
rename to x86_64.Dockerfile