diff --git a/src/obstacle_detection/models/yolov8n.pt b/src/obstacle_detection/models/yolov8n.pt
new file mode 100644
index 0000000..0db4ca4
Binary files /dev/null and b/src/obstacle_detection/models/yolov8n.pt differ
diff --git a/src/obstacle_detection/models/yolov8n_openvino_model/metadata.yaml b/src/obstacle_detection/models/yolov8n_openvino_model/metadata.yaml
new file mode 100644
index 0000000..3cee20b
--- /dev/null
+++ b/src/obstacle_detection/models/yolov8n_openvino_model/metadata.yaml
@@ -0,0 +1,93 @@
+description: Ultralytics YOLOv8n model trained on coco.yaml
+author: Ultralytics
+date: '2024-11-22T04:09:47.358215'
+version: 8.3.36
+license: AGPL-3.0 License (https://ultralytics.com/license)
+docs: https://docs.ultralytics.com
+stride: 32
+task: detect
+batch: 1
+imgsz:
+- 640
+- 640
+names:
+ 0: person
+ 1: bicycle
+ 2: car
+ 3: motorcycle
+ 4: airplane
+ 5: bus
+ 6: train
+ 7: truck
+ 8: boat
+ 9: traffic light
+ 10: fire hydrant
+ 11: stop sign
+ 12: parking meter
+ 13: bench
+ 14: bird
+ 15: cat
+ 16: dog
+ 17: horse
+ 18: sheep
+ 19: cow
+ 20: elephant
+ 21: bear
+ 22: zebra
+ 23: giraffe
+ 24: backpack
+ 25: umbrella
+ 26: handbag
+ 27: tie
+ 28: suitcase
+ 29: frisbee
+ 30: skis
+ 31: snowboard
+ 32: sports ball
+ 33: kite
+ 34: baseball bat
+ 35: baseball glove
+ 36: skateboard
+ 37: surfboard
+ 38: tennis racket
+ 39: bottle
+ 40: wine glass
+ 41: cup
+ 42: fork
+ 43: knife
+ 44: spoon
+ 45: bowl
+ 46: banana
+ 47: apple
+ 48: sandwich
+ 49: orange
+ 50: broccoli
+ 51: carrot
+ 52: hot dog
+ 53: pizza
+ 54: donut
+ 55: cake
+ 56: chair
+ 57: couch
+ 58: potted plant
+ 59: bed
+ 60: dining table
+ 61: toilet
+ 62: tv
+ 63: laptop
+ 64: mouse
+ 65: remote
+ 66: keyboard
+ 67: cell phone
+ 68: microwave
+ 69: oven
+ 70: toaster
+ 71: sink
+ 72: refrigerator
+ 73: book
+ 74: clock
+ 75: vase
+ 76: scissors
+ 77: teddy bear
+ 78: hair drier
+ 79: toothbrush
diff --git a/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n-simplified.onnx b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n-simplified.onnx
new file mode 100644
index 0000000..5c3e968
Binary files /dev/null and b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n-simplified.onnx differ
diff --git a/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.bin b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.bin
new file mode 100644
index 0000000..56e493f
Binary files /dev/null and b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.bin differ
diff --git a/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.json b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.json
new file mode 100644
index 0000000..6818a84
--- /dev/null
+++ b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.json
@@ -0,0 +1,104 @@
+{
+ "model": {
+ "xml": "yolov8n.xml",
+ "bin": "yolov8n.bin"
+ },
+ "nn_config": {
+ "output_format": "detection",
+ "NN_family": "YOLO",
+ "input_size": "640x640",
+ "NN_specific_metadata": {
+ "classes": 80,
+ "coordinates": 4,
+ "anchors": [],
+ "anchor_masks": {},
+ "iou_threshold": 0.5,
+ "confidence_threshold": 0.5
+ }
+ },
+ "mappings": {
+ "labels": [
+ "person",
+ "bicycle",
+ "car",
+ "motorbike",
+ "aeroplane",
+ "bus",
+ "train",
+ "truck",
+ "boat",
+ "traffic light",
+ "fire hydrant",
+ "stop sign",
+ "parking meter",
+ "bench",
+ "bird",
+ "cat",
+ "dog",
+ "horse",
+ "sheep",
+ "cow",
+ "elephant",
+ "bear",
+ "zebra",
+ "giraffe",
+ "backpack",
+ "umbrella",
+ "handbag",
+ "tie",
+ "suitcase",
+ "frisbee",
+ "skis",
+ "snowboard",
+ "sports ball",
+ "kite",
+ "baseball bat",
+ "baseball glove",
+ "skateboard",
+ "surfboard",
+ "tennis racket",
+ "bottle",
+ "wine glass",
+ "cup",
+ "fork",
+ "knife",
+ "spoon",
+ "bowl",
+ "banana",
+ "apple",
+ "sandwich",
+ "orange",
+ "broccoli",
+ "carrot",
+ "hot dog",
+ "pizza",
+ "donut",
+ "cake",
+ "chair",
+ "sofa",
+ "pottedplant",
+ "bed",
+ "diningtable",
+ "toilet",
+ "tvmonitor",
+ "laptop",
+ "mouse",
+ "remote",
+ "keyboard",
+ "cell phone",
+ "microwave",
+ "oven",
+ "toaster",
+ "sink",
+ "refrigerator",
+ "book",
+ "clock",
+ "vase",
+ "scissors",
+ "teddy bear",
+ "hair drier",
+ "toothbrush"
+ ]
+ },
+ "version": 1
+}
\ No newline at end of file
diff --git a/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.xml b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.xml
new file mode 100644
index 0000000..875736b
--- /dev/null
+++ b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n.xml
@@ -0,0 +1,11104 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 16
+ 3
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 3
+ 640
+ 640
+
+
+ 16
+ 3
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 320
+ 320
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 16
+ 320
+ 320
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 16
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 320
+ 320
+
+
+ 32
+ 16
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+ 32
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 16
+ 16
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+ 16
+ 16
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 16
+ 16
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+ 16
+ 16
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+ 1
+ 16
+ 160
+ 160
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 48
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 48
+ 160
+ 160
+
+
+ 32
+ 48
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 160
+ 160
+
+
+ 64
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 80
+ 80
+
+
+ 64
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 128
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 128
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 40
+ 40
+
+
+ 128
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 256
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 256
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 256
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 256
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 256
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 384
+ 20
+ 20
+
+
+ 256
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 128
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 256
+ 512
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 512
+ 20
+ 20
+
+
+ 256
+ 512
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+ 1
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+ 1
+
+
+ 1
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 2
+
+
+ 2
+
+
+ 2
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 40
+ 40
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 384
+ 40
+ 40
+
+
+ 128
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 192
+ 40
+ 40
+
+
+ 128
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+ 1
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+ 1
+
+
+ 1
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 2
+
+
+ 2
+
+
+ 2
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 80
+ 80
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 192
+ 80
+ 80
+
+
+ 64
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 32
+ 32
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+ 1
+ 32
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 96
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 96
+ 80
+ 80
+
+
+ 64
+ 96
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 80
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 80
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+ 80
+ 80
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+ 80
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 1
+ 80
+ 80
+ 80
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 144
+ 80
+ 80
+
+
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 80
+ 80
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 192
+ 40
+ 40
+
+
+ 128
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 192
+ 40
+ 40
+
+
+ 128
+ 192
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 64
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 80
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 80
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+ 80
+ 80
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+ 80
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 40
+ 40
+
+
+ 1
+ 80
+ 40
+ 40
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 144
+ 40
+ 40
+
+
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 40
+ 40
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 256
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 384
+ 20
+ 20
+
+
+ 256
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 128
+ 128
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+ 1
+ 128
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 256
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 384
+ 20
+ 20
+
+
+ 256
+ 384
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 1
+ 256
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 256
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 64
+ 256
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+ 64
+ 64
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+ 64
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+ 1
+ 64
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 256
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 256
+ 20
+ 20
+
+
+ 80
+ 256
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 80
+ 3
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+ 80
+ 80
+ 3
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 80
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+ 80
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+ 1
+ 80
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 20
+ 20
+
+
+ 1
+ 80
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 144
+ 20
+ 20
+
+
+ 3
+
+
+
+
+
+
+
+
+ 1
+ 144
+ 6400
+
+
+ 1
+ 144
+ 1600
+
+
+ 1
+ 144
+ 400
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 144
+ 8400
+
+
+
+ 2
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 64
+ 8400
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 4
+ 16
+ 8400
+
+
+ 4
+
+
+
+
+
+
+
+
+ 1
+ 8400
+ 4
+ 16
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 4
+
+
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 3
+
+
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+
+
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 8400
+ 4
+ 16
+
+
+ 2
+
+
+
+
+
+
+
+
+ 33600
+ 16
+
+
+
+
+
+
+
+
+ 33600
+ 16
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 8400
+ 4
+ 16
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 16
+ 4
+ 8400
+
+
+ 1
+ 16
+ 1
+ 1
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 1
+ 4
+ 8400
+
+
+ 3
+
+
+
+
+
+
+
+ 1
+ 80
+ 8400
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 80
+ 8400
+
+
+ 1
+
+
+
+
+
+
+
+
+ 1
+ 4
+ 8400
+
+
+ 1
+ 1
+ 8400
+
+
+ 1
+ 80
+ 8400
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 85
+ 8400
+
+
+
+ 3
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 85
+ 400
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 85
+ 1600
+
+
+ 4
+
+
+
+
+
+
+
+
+
+
+
+
+ 1
+ 85
+ 6400
+
+
+ 4
+
+
+
+
+
+
+
+ 1
+ 85
+ 80
+ 80
+
+
+
+
+
+
+ 1
+ 85
+ 40
+ 40
+
+
+
+
+
+
+ 1
+ 85
+ 20
+ 20
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n_openvino_2022.1_6shave.blob b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n_openvino_2022.1_6shave.blob
new file mode 100644
index 0000000..8d347f8
Binary files /dev/null and b/src/obstacle_detection/models/yolov8n_openvino_model/yolov8n_openvino_2022.1_6shave.blob differ
diff --git a/src/obstacle_detection/obstacle_detection/__init__.py b/src/obstacle_detection/obstacle_detection/__init__.py
new file mode 100644
index 0000000..16c3061
--- /dev/null
+++ b/src/obstacle_detection/obstacle_detection/__init__.py
@@ -0,0 +1,4 @@
+from .camera_localizer import CameraLocalizer, CameraLocalizerDetection
+from .camera_localizer_node import CameraLocalizerNode
+
+__all__ = ['CameraLocalizer', 'CameraLocalizerDetection', 'CameraLocalizerNode']
\ No newline at end of file
diff --git a/src/obstacle_detection/obstacle_detection/camera_localizer.py b/src/obstacle_detection/obstacle_detection/camera_localizer.py
new file mode 100644
index 0000000..a33dc6b
--- /dev/null
+++ b/src/obstacle_detection/obstacle_detection/camera_localizer.py
@@ -0,0 +1,434 @@
+from typing import List
+import depthai as dai
+import numpy as np
+from pathlib import Path
+import threading
+import time
+import cv2
+from dataclasses import dataclass
+
+
+@dataclass
+class CameraLocalizerDetection:
+ angle: float
+ height: float
+ distance: float
+ xmin: float
+ xmax: float
+ ymin: float
+ ymax: float
+ spatial_x: float
+ spatial_y: float
+ spatial_z: float
+ confidence: float
+
+class CameraLocalizer:
+ MAX_HEIGHT = 130 # 130 mm = 13 cm = max height we can roll over
+
+ def __init__(
+ self,
+ model_path: str,
+ openvino_version: dai.OpenVINO.Version = dai.OpenVINO.VERSION_2022_1,
+ ) -> None:
+ """
+ Initializes the CameraLocalizer
+
+ Arguments:
+ - model_path: str - the relative path of the blob model
+ - openvino_version: dai.OpenVINO.Version - the version to use for loading the openvino model
+ """
+ self.openvino_version = openvino_version
+
+
+ # Store blob path
+ self.blob_path = str(Path(model_path).resolve().absolute())
+
+ # Verify blob file exists
+ if not Path(self.blob_path).exists():
+ raise FileNotFoundError(f"Model blob not found at: {self.blob_path}")
+
+ self.pipeline = None
+ self.lock = None
+ self._current_filtered_detections = [] # internals
+ self._current_detections = [] # used for internals
+ self.detections = [] # used for public method
+ self.filtered_detections = [] # used for public access
+
+ self.thread = None
+ self.blocking = False
+ self.current_frame = None
+ self._create_pipeline()
+
+ def _create_pipeline(self) -> None:
+ """
+ Create the pipeline, initializing inputs/outputs
+ for the pipeline.
+
+ If self.pipeline has already been created, this method
+ does nothing. Else, this will set our pipeline to a
+ dai.Pipeline object
+ """
+ if self.pipeline is not None:
+ return
+ syncNN = True
+ self.pipeline = dai.Pipeline()
+ self.pipeline.setOpenVINOVersion(self.openvino_version)
+
+ # Define sources and outputs
+ camRgb = self.pipeline.create(dai.node.ColorCamera)
+ spatialDetectionNetwork = self.pipeline.create(
+ dai.node.YoloSpatialDetectionNetwork
+ )
+ monoLeft = self.pipeline.create(dai.node.MonoCamera)
+ monoRight = self.pipeline.create(dai.node.MonoCamera)
+ stereo = self.pipeline.create(dai.node.StereoDepth)
+ nnNetworkOut = self.pipeline.create(dai.node.XLinkOut)
+
+ xoutRgb = self.pipeline.create(dai.node.XLinkOut)
+ xoutNN = self.pipeline.create(dai.node.XLinkOut)
+ xoutDepth = self.pipeline.create(dai.node.XLinkOut)
+
+ xoutRgb.setStreamName("rgb")
+ xoutNN.setStreamName("detections")
+ xoutDepth.setStreamName("depth")
+ nnNetworkOut.setStreamName("nnNetwork")
+
+ # # camRgb.setPreviewSize(640, 640)
+ # camRgb.setPreviewSize(416,416 )
+ # camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
+ # camRgb.setInterleaved(False)
+ # camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
+ # camRgb.setFps(30)
+
+ # YOLOv8 specific configuration
+ camRgb.setPreviewSize(640, 640) # YOLOv8 default size
+ camRgb.setResolution(dai.ColorCameraProperties.SensorResolution.THE_1080_P)
+ camRgb.setInterleaved(False)
+ camRgb.setColorOrder(dai.ColorCameraProperties.ColorOrder.BGR)
+ camRgb.setFps(30)
+
+ # Neural network configuration using OpenVINO
+ # spatialDetectionNetwork.setModelId(0) # Unique ID for the model
+ # spatialDetectionNetwork.setModelConfigPath(self.model_path) # XML path
+ # spatialDetectionNetwork.setModelWeightsPath(self.weights_path) # BIN path
+
+ monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
+ monoLeft.setCamera("left")
+ monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_400_P)
+ monoRight.setCamera("right")
+
+ # setting node configs
+ stereo.setDefaultProfilePreset(dai.node.StereoDepth.PresetMode.HIGH_DENSITY)
+ # Align depth map to the perspective of RGB camera, on which inference is done
+ stereo.setDepthAlign(dai.CameraBoardSocket.CAM_A)
+ stereo.setOutputSize(
+ monoLeft.getResolutionWidth(), monoLeft.getResolutionHeight()
+ )
+ stereo.setSubpixel(True)
+
+ # spatialDetectionNetwork.setBlobPath(self.model_path)
+ spatialDetectionNetwork.setBlobPath(self.blob_path) # Use the blob path
+ spatialDetectionNetwork.setConfidenceThreshold(0.8)
+ spatialDetectionNetwork.setBoundingBoxScaleFactor(0.5)
+ spatialDetectionNetwork.setDepthLowerThreshold(100)
+ spatialDetectionNetwork.setDepthUpperThreshold(5000)
+
+ # # Yolo specific parameters
+ # spatialDetectionNetwork.setNumClasses(80)
+ # spatialDetectionNetwork.setCoordinateSize(4)
+
+ # YOLOv8 specific parameters
+ spatialDetectionNetwork.setNumClasses(80) # COCO dataset classes
+ spatialDetectionNetwork.setCoordinateSize(4)
+ spatialDetectionNetwork.setIouThreshold(0.5)
+
+ # spatialDetectionNetwork.setAnchors(
+ # [10, 14, 23, 27, 37, 58, 81, 82, 135, 169, 344, 319]
+ # )
+ # spatialDetectionNetwork.setAnchorMasks(
+ # {"side26": [1, 2, 3], "side13": [3, 4, 5]}
+ # )
+
+ # YOLOv8 doesn't use anchors or masks
+ spatialDetectionNetwork.setAnchors([])
+ spatialDetectionNetwork.setAnchorMasks({})
+
+ # Linking
+ monoLeft.out.link(stereo.left)
+ monoRight.out.link(stereo.right)
+
+ camRgb.preview.link(spatialDetectionNetwork.input)
+ if syncNN:
+ spatialDetectionNetwork.passthrough.link(xoutRgb.input)
+ else:
+ camRgb.preview.link(xoutRgb.input)
+
+ spatialDetectionNetwork.out.link(xoutNN.input)
+
+ stereo.depth.link(spatialDetectionNetwork.inputDepth)
+ spatialDetectionNetwork.passthroughDepth.link(xoutDepth.input)
+ spatialDetectionNetwork.outNetwork.link(nnNetworkOut.input)
+
+ def start(self, blocking: bool = True) -> None:
+ """
+ Start the camera.
+
+ Arguments:
+ - blocking: bool - if True, this camera will block the main thread.
+ Else, this camera will run in a separate thread, and its detections will be visible
+ via the getDetections method. Note that, when starting in a separate thread,
+ it may take 3-10 seconds to get the camera opened and warmed up.
+ """
+ self.run = True
+ self.blocking = blocking
+ if blocking:
+ self._run()
+ else:
+ # clear internals/publics
+ self.detections = []
+ self._current_detections = []
+ self.filtered_detections = []
+ self._current_filtered_detections = []
+
+ # create/start thread
+ self.lock = threading.Lock()
+ self.thread = threading.Thread(target=self._run)
+ self.thread.start()
+
+ def _run(self) -> None:
+ """
+ Helper to take care of running the camera
+ """
+ maxSize = 2
+ fontColor = (0, 0, 255)
+
+ with dai.Device(self.pipeline) as device:
+ # Output queues will be used to get the rgb frames and nn data from the outputs defined above
+ previewQueue = device.getOutputQueue(
+ name="rgb", maxSize=maxSize, blocking=False
+ )
+ detectionNNQueue = device.getOutputQueue(
+ name="detections", maxSize=maxSize, blocking=False
+ )
+ depthQueue = device.getOutputQueue(
+ name="depth", maxSize=maxSize, blocking=False
+ )
+ networkQueue = device.getOutputQueue(
+ name="nnNetwork", maxSize=maxSize, blocking=False
+ )
+
+ startTime = time.monotonic()
+ counter = 0
+ fps = 0
+ color = (255, 255, 255)
+ printOutputLayersOnce = True
+
+ while self.run:
+ inPreview = previewQueue.get()
+ inDet = detectionNNQueue.get()
+ depth = depthQueue.get()
+ inNN = networkQueue.get()
+
+ if printOutputLayersOnce:
+ toPrint = "Output layer names:"
+ for ten in inNN.getAllLayerNames():
+ toPrint = f"{toPrint} {ten},"
+ print(toPrint)
+ printOutputLayersOnce = False
+
+ frame = inPreview.getCvFrame()
+ depthFrame = depth.getFrame() # depthFrame values are in millimeters
+
+ depth_downscaled = depthFrame[::maxSize]
+ if np.all(depth_downscaled == 0):
+ min_depth = 0 # Set a default minimum depth value when all elements are zero
+ else:
+ min_depth = np.percentile(
+ depth_downscaled[depth_downscaled != 0], 1
+ )
+ max_depth = np.percentile(depth_downscaled, 99)
+ depthFrameColor = np.interp(
+ depthFrame, (min_depth, max_depth), (0, 255)
+ ).astype(np.uint8)
+
+ depthFrameColor = cv2.applyColorMap(depthFrameColor, cv2.COLORMAP_HOT)
+
+ counter += 1
+ current_time = time.monotonic()
+ if (current_time - startTime) > 1:
+ fps = counter / (current_time - startTime)
+ counter = 0
+ startTime = current_time
+
+ detections = inDet.detections
+
+ # If the frame is available, draw bounding boxes on it and show the frame
+ height = frame.shape[0]
+ width = frame.shape[1]
+ for detection in detections:
+ roiData = detection.boundingBoxMapping
+ roi = roiData.roi
+ roi = roi.denormalize(
+ depthFrameColor.shape[1], depthFrameColor.shape[0]
+ )
+ topLeft = roi.topLeft()
+ bottomRight = roi.bottomRight()
+ xmin = int(topLeft.x)
+ ymin = int(topLeft.y)
+ xmax = int(bottomRight.x)
+ ymax = int(bottomRight.y)
+
+ if detection.spatialCoordinates.z != 0:
+ dep = detection.spatialCoordinates.z
+ else:
+ dep = 1e-6 # epsilon to avoid division by 0
+
+ # angles from the center
+ horizontal_angle = (
+ np.arctan2(detection.spatialCoordinates.x, dep) * 180 / np.pi
+ )
+ vertical_angle = (
+ np.arctan2(detection.spatialCoordinates.y, dep) * 180 / np.pi
+ )
+
+ # DFOV / HFOV / VFOV = 120° / 95° / 70°
+ # FOV = 2 x working distance x tan (AFOV/2) - somehow this gives 2* what we need, so ignore 2x
+ dist = detection.spatialCoordinates.z # in mm
+ deg2rad = np.pi / 180
+ vfov = np.tan(70 * deg2rad) * dist # in mm
+ hPercent = (ymax - ymin) / frame.shape[1] # px / px = percent
+ h = hPercent * vfov
+
+ # only visualize if running in blocking mode
+ if self.blocking:
+ cv2.rectangle(
+ depthFrameColor, (xmin, ymin), (xmax, ymax), color, 1
+ )
+
+ # Denormalize bounding box
+ x1 = int(detection.xmin * width)
+ x2 = int(detection.xmax * width)
+ y1 = int(detection.ymin * height)
+ y2 = int(detection.ymax * height)
+
+ increment = 15
+ start = y1 + 20
+
+ def putTexts(texts: List[str]):
+ for idx, text in enumerate(texts):
+ cv2.putText(
+ frame,
+ text,
+ (x1 + 10, start + increment * idx),
+ cv2.FONT_HERSHEY_TRIPLEX,
+ 0.5,
+ fontColor,
+ )
+
+ # H is the horizontal angle (angle from center of camera to object's horizontal position)
+ # V is the vertical angle (angle from center of camera to object's vertical position)
+ putTexts(
+ [
+ "rock",
+ "{:.2f}".format(detection.confidence * 100),
+ f"X: {int(detection.spatialCoordinates.x)} mm",
+ f"Y: {int(detection.spatialCoordinates.y)} mm",
+ f"Z: {int(detection.spatialCoordinates.z)} mm",
+ f"H*: {int(horizontal_angle)} deg",
+ f"V*: {int(vertical_angle)} deg",
+ f"HE: {int(h)} mm", # height in mm; max we can roll over is ~130mm
+ ]
+ )
+
+ cv2.rectangle(
+ frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX
+ )
+
+ else: # running in bg
+ with self.lock:
+ # we only want the distance using the x and z, not using y
+ # so we just use pythagorean theorem
+ xz_dist = np.sqrt(detection.spatialCoordinates.x**2 + detection.spatialCoordinates.z**2)
+ dec = CameraLocalizerDetection(
+ np.arctan2(detection.spatialCoordinates.x, dep) * 180 / np.pi,
+ h,
+ distance=xz_dist,
+ xmin=detection.xmin,
+ xmax=detection.xmax,
+ ymin=detection.ymin,
+ ymax=detection.ymax,
+ spatial_x=detection.spatialCoordinates.x,
+ spatial_y=detection.spatialCoordinates.y,
+ spatial_z=detection.spatialCoordinates.z,
+ confidence=detection.confidence
+ )
+ self._current_detections.append(dec)
+ if dec.height > CameraLocalizer.MAX_HEIGHT:
+ self._current_filtered_detections.append(dec)
+
+ if self.blocking:
+ cv2.putText(
+ frame,
+ "NN fps: {:.2f}".format(fps),
+ (2, frame.shape[0] - 4),
+ cv2.FONT_HERSHEY_TRIPLEX,
+ 0.4,
+ color,
+ )
+ cv2.imshow("rgb", frame)
+
+ if cv2.waitKey(1) == ord("q"):
+ break
+ else:
+ # update our publicly accessible lists
+ with self.lock:
+ self.detections = self._current_detections
+ self.filtered_detections = self._current_filtered_detections
+
+ # clear the internals
+ self._current_detections = []
+ self._current_filtered_detections = []
+ frame = inPreview.getCvFrame()
+ # Store current frame
+ self.current_frame = frame.copy()
+
+ def get_current_frame(self):
+ """
+ Returns the most recent camera frame.
+ """
+ if self.lock:
+ with self.lock:
+ return self.current_frame
+ return None
+
+ def getDetections(self) -> List[CameraLocalizerDetection]:
+ """
+ Retrieve the latest detections.
+ """
+ if self.lock:
+ with self.lock:
+ return self.detections
+ else:
+ raise Exception("You forgot to set it to nonblocking!!!!")
+
+ def getFilteredDetections(self) -> List[CameraLocalizerDetection]:
+ """
+ Retrieve the latest detections, filtered by height.
+ """
+ if self.lock:
+ with self.lock:
+ return self.filtered_detections
+ else:
+ raise Exception("You forgot to set it to nonblocking!!!!")
+
+ def stop(self):
+ """
+ Stops execution of the thread that is running in the background.
+ """
+ if not self.blocking:
+ self.run = False
+ self.thread.join()
+ else:
+ raise Exception(
+ "You appear to have called stop when it was never running in the background :<"
+ )
\ No newline at end of file
diff --git a/src/obstacle_detection/obstacle_detection/camera_localizer_node.py b/src/obstacle_detection/obstacle_detection/camera_localizer_node.py
new file mode 100644
index 0000000..2b41110
--- /dev/null
+++ b/src/obstacle_detection/obstacle_detection/camera_localizer_node.py
@@ -0,0 +1,118 @@
+import rclpy
+from rclpy.node import Node
+from .camera_localizer import CameraLocalizer, CameraLocalizerDetection
+from std_msgs.msg import Float32MultiArray
+from sensor_msgs.msg import Image
+from threading import Thread
+import numpy as np
+import cv2
+from cv_bridge import CvBridge
+import os
+from glob import glob
+from ament_index_python.packages import get_package_share_directory
+
+class CameraLocalizerNode(Node):
+ def __init__(self):
+ super().__init__('camera_localizer_node')
+ self.publisher_ = self.create_publisher(Float32MultiArray, 'detections', 10)
+ self.image_publisher_ = self.create_publisher(Image, 'camera_image', 10)
+ self.bridge = CvBridge()
+
+ # Initialize CameraLocalizer
+ package_share_dir = get_package_share_directory('obstacle_detection')
+ models_dir = os.path.join(package_share_dir, 'models')
+
+ # Find all .blob files in the models directory
+ blob_files = glob(os.path.join(models_dir, '**/*.blob'), recursive=True)
+
+ if not blob_files:
+ self.get_logger().error(f'No .blob files found in {models_dir}')
+ raise FileNotFoundError(f'No blob model files found in {models_dir}')
+
+ # Use the first blob file found
+ model_path = blob_files[0]
+ self.get_logger().info(f'Using model: {model_path}')
+
+ self.camera_localizer = CameraLocalizer(model_path=model_path)
+
+ # Start the camera in a separate thread
+ self.camera_thread = Thread(target=self.run_camera_localizer)
+ self.camera_thread.start()
+
+ # Create a timer to periodically publish detections
+ self.timer = self.create_timer(0.1, self.timer_callback)
+
+ def run_camera_localizer(self):
+ self.camera_localizer.start(blocking=False)
+
+
+
+ def timer_callback(self):
+ detections = self.camera_localizer.getDetections()
+ frame = self.camera_localizer.get_current_frame()
+
+ if frame is not None:
+ height, width = frame.shape[:2]
+ color = (255, 255, 255)
+ fontColor = (0, 0, 255)
+
+ for detection in detections:
+ # Denormalize bounding box
+ x1 = int(detection.xmin * width)
+ x2 = int(detection.xmax * width)
+ y1 = int(detection.ymin * height)
+ y2 = int(detection.ymax * height)
+
+ # Draw bounding box
+ cv2.rectangle(frame, (x1, y1), (x2, y2), color, cv2.FONT_HERSHEY_SIMPLEX)
+
+ # Add text information
+ increment = 15
+ start = y1 + 20
+ texts = [
+ "rock",
+ f"{detection.confidence * 100:.2f}",
+ f"X: {int(detection.spatial_x)} mm",
+ f"Y: {int(detection.spatial_y)} mm",
+ f"Z: {int(detection.spatial_z)} mm",
+ f"H*: {int(detection.angle)} deg",
+ f"HE: {int(detection.height)} mm",
+ ]
+
+ for idx, text in enumerate(texts):
+ cv2.putText(
+ frame,
+ text,
+ (x1 + 10, start + increment * idx),
+ cv2.FONT_HERSHEY_TRIPLEX,
+ 0.5,
+ fontColor,
+ )
+
+ # Convert to ROS Image message and publish
+ image_msg = self.bridge.cv2_to_imgmsg(frame, encoding='bgr8')
+ self.image_publisher_.publish(image_msg)
+
+ # Publish detection data
+ for detection in detections:
+ msg = Float32MultiArray()
+ msg.data = [detection.angle, detection.height, detection.distance]
+ self.publisher_.publish(msg)
+
+ def destroy_node(self):
+ self.camera_localizer.stop()
+ super().destroy_node()
+
+def main(args=None):
+ rclpy.init(args=args)
+ node = CameraLocalizerNode()
+ try:
+ rclpy.spin(node)
+ except KeyboardInterrupt:
+ pass
+ finally:
+ node.destroy_node()
+ rclpy.shutdown()
+
+if __name__ == '__main__':
+ main()
\ No newline at end of file
diff --git a/src/obstacle_detection/package.xml b/src/obstacle_detection/package.xml
new file mode 100644
index 0000000..417bc88
--- /dev/null
+++ b/src/obstacle_detection/package.xml
@@ -0,0 +1,26 @@
+
+
+
+ obstacle_detection
+ 0.0.0
+ ROS2 node wrapper for obstacle detection
+ SJSU Robotics Team
+ Apache-2.0
+
+ rclpy
+ sensor_msgs
+ visualization_msgs
+ geometry_msgs
+ cv_bridge
+ std_msgs
+ openvino
+
+ ament_copyright
+ ament_flake8
+ ament_pep257
+ python3-pytest
+
+
+ ament_python
+
+
\ No newline at end of file
diff --git a/src/obstacle_detection_node/obstacle_detection_node/__init__.py b/src/obstacle_detection/resource/obstacle_detection
similarity index 100%
rename from src/obstacle_detection_node/obstacle_detection_node/__init__.py
rename to src/obstacle_detection/resource/obstacle_detection
diff --git a/src/obstacle_detection/setup.cfg b/src/obstacle_detection/setup.cfg
new file mode 100644
index 0000000..1843e42
--- /dev/null
+++ b/src/obstacle_detection/setup.cfg
@@ -0,0 +1,4 @@
+[develop]
+script_dir=$base/lib/obstacle_detection
+[install]
+install_scripts=$base/lib/obstacle_detection
diff --git a/src/obstacle_detection_node/setup.py b/src/obstacle_detection/setup.py
similarity index 52%
rename from src/obstacle_detection_node/setup.py
rename to src/obstacle_detection/setup.py
index d1d8d73..8979e1b 100644
--- a/src/obstacle_detection_node/setup.py
+++ b/src/obstacle_detection/setup.py
@@ -1,6 +1,8 @@
from setuptools import find_packages, setup
+import os
+from glob import glob
-package_name = 'obstacle_detection_node'
+package_name = 'obstacle_detection'
setup(
name=package_name,
@@ -10,16 +12,19 @@
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
('share/' + package_name, ['package.xml']),
+ (os.path.join('share', package_name, 'models'),
+ glob('models/**/*.*', recursive=True)),
],
install_requires=['setuptools'],
zip_safe=True,
- maintainer='tegh',
- maintainer_email='tegh@todo.todo',
- description='TODO: Package description',
- license='TODO: License declaration',
+ maintainer='SJSU Robotics Team',
+ maintainer_email='Gill.Teghbir@gmail.com',
+ description='ROS2 node wrapper for obstacle detection',
+ license='Apache-2.0',
tests_require=['pytest'],
entry_points={
'console_scripts': [
+ 'camera_localizer_node = obstacle_detection.camera_localizer_node:main',
],
},
-)
+)
\ No newline at end of file
diff --git a/src/obstacle_detection_node/test/test_copyright.py b/src/obstacle_detection/test/test_copyright.py
similarity index 100%
rename from src/obstacle_detection_node/test/test_copyright.py
rename to src/obstacle_detection/test/test_copyright.py
diff --git a/src/obstacle_detection_node/test/test_flake8.py b/src/obstacle_detection/test/test_flake8.py
similarity index 100%
rename from src/obstacle_detection_node/test/test_flake8.py
rename to src/obstacle_detection/test/test_flake8.py
diff --git a/src/obstacle_detection_node/test/test_pep257.py b/src/obstacle_detection/test/test_pep257.py
similarity index 100%
rename from src/obstacle_detection_node/test/test_pep257.py
rename to src/obstacle_detection/test/test_pep257.py
diff --git a/src/obstacle_detection_node/package.xml b/src/obstacle_detection_node/package.xml
deleted file mode 100644
index 9a3004c..0000000
--- a/src/obstacle_detection_node/package.xml
+++ /dev/null
@@ -1,18 +0,0 @@
-
-
-
- obstacle_detection_node
- 0.0.0
- TODO: Package description
- tegh
- TODO: License declaration
-
- ament_copyright
- ament_flake8
- ament_pep257
- python3-pytest
-
-
- ament_python
-
-
diff --git a/src/obstacle_detection_node/resource/obstacle_detection_node b/src/obstacle_detection_node/resource/obstacle_detection_node
deleted file mode 100644
index e69de29..0000000
diff --git a/src/obstacle_detection_node/setup.cfg b/src/obstacle_detection_node/setup.cfg
deleted file mode 100644
index 3bdb4a4..0000000
--- a/src/obstacle_detection_node/setup.cfg
+++ /dev/null
@@ -1,4 +0,0 @@
-[develop]
-script_dir=$base/lib/obstacle_detection_node
-[install]
-install_scripts=$base/lib/obstacle_detection_node