-
Notifications
You must be signed in to change notification settings - Fork 63
/
real-time.py
59 lines (46 loc) · 1.84 KB
/
real-time.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import cv2 # use base python enviornment
import numpy as np
from unified_detector import Fingertips
from hand_detector.detector import SOLO, YOLO
hand_detection_method = 'yolo'
if hand_detection_method is 'solo':
hand = SOLO(weights='weights\solo.h5', threshold=0.8)
elif hand_detection_method is 'yolo':
hand = YOLO(weights='weights\yolo.h5', threshold=0.8)
else:
assert False, "'" + hand_detection_method + "' hand detection does not exist. use either 'solo' or 'yolo' as hand detection method"
fingertips = Fingertips(weights='weights/classes8.h5')
cam = cv2.VideoCapture(0)
print('AirCtrl Demo')
while True:
ret, image = cam.read()
if ret is False:
break
# hand detection
tl, br = hand.detect(image=image)
if tl and br is not None:
cropped_image = image[tl[1]:br[1], tl[0]: br[0]]
height, width, _ = cropped_image.shape
# gesture classification and fingertips regression
prob, pos = fingertips.classify(image=cropped_image)
pos = np.mean(pos, 0)
# post-processing
prob = np.asarray([(p >= 0.5) * 1.0 for p in prob])
for i in range(0, len(pos), 2):
pos[i] = pos[i] * width + tl[0]
pos[i + 1] = pos[i + 1] * height + tl[1]
# drawing
index = 0
color = [(15, 15, 240), (15, 240, 155), (240, 155, 15), (240, 15, 155), (240, 15, 240)]
image = cv2.rectangle(image, (tl[0], tl[1]), (br[0], br[1]), (235, 26, 158), 2)
for c, p in enumerate(prob):
if p > 0.5:
image = cv2.circle(image, (int(pos[index]), int(pos[index + 1])), radius=12,
color=color[c], thickness=-2)
index = index + 2
if cv2.waitKey(1) & 0xff == 27:
break
# display image
cv2.imshow('AirCtrl Demo', image)
cam.release()
cv2.destroyAllWindows()