Skip to content

Commit

Permalink
feat(hand-recognition)
Browse files Browse the repository at this point in the history
  • Loading branch information
liumengyuan1997 committed Aug 23, 2024
1 parent 3441954 commit 74c3941
Show file tree
Hide file tree
Showing 9 changed files with 148 additions and 33 deletions.
4 changes: 4 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,10 @@ deploy-face-landmarks:
$(FACE_MODELS_DIR)/blazeface-front.json \
$(FACE_MODELS_DIR)/emotion.bin \
$(FACE_MODELS_DIR)/emotion.json \
$(FACE_MODELS_DIR)/handlandmark-lite.bin \
$(FACE_MODELS_DIR)/handlandmark-lite.json \
$(FACE_MODELS_DIR)/handtrack.bin \
$(FACE_MODELS_DIR)/handtrack.json \
$(DEPLOY_DIR)

deploy-css:
Expand Down
4 changes: 4 additions & 0 deletions config.js
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,10 @@ var config = {
// // Specifies whether there is a notification when you are the next speaker in line.
// disableNextSpeakerNotification: false,

// // Specifies whether the raised hand recognition through the video stream will be enabled.
// // The FaceLandmarks configuration needs to be enabled for this to work.
// disableRaisedHandRecognition: false,

// // Specifies whether the raised hand will hide when someone becomes a dominant speaker or not.
// disableRemoveRaisedHandOnFocus: false,
// },
Expand Down
1 change: 1 addition & 0 deletions react/features/base/config/configType.ts
Original file line number Diff line number Diff line change
Expand Up @@ -491,6 +491,7 @@ export interface IConfig {
disableLowerHandByModerator?: boolean;
disableLowerHandNotification?: boolean;
disableNextSpeakerNotification?: boolean;
disableRaisedHandRecognition?: boolean;
disableRemoveRaisedHandOnFocus?: boolean;
};
readOnlyName?: boolean;
Expand Down
10 changes: 10 additions & 0 deletions react/features/base/config/functions.any.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,16 @@ export function getDisableNextSpeakerNotification(state: IReduxState) {
return state['features/base/config']?.raisedHands?.disableNextSpeakerNotification || false;
}

/**
* Selector used to get the disableRaisedHandRecognition.
*
* @param {Object} state - The global state.
* @returns {boolean}
*/
export function getDisableRaisedHandRecognition(state: IReduxState) {
return Boolean(state['features/base/config']?.raisedHands?.disableRaisedHandRecognition);
}

/**
* Selector used to get the endpoint used for fetching the recording.
*
Expand Down
27 changes: 23 additions & 4 deletions react/features/face-landmarks/FaceLandmarksDetector.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import 'image-capture';
import './createImageBitmap';
import { IStore } from '../app/types';
import { getDisableRaisedHandRecognition } from '../base/config/functions.any';
import { isMobileBrowser } from '../base/environment/utils';
import { raiseHand } from '../base/participants/actions';
import { getLocalParticipant, hasRaisedHand } from '../base/participants/functions';
import { getLocalVideoTrack } from '../base/tracks/functions';
import { getBaseUrl } from '../base/util/helpers';

Expand All @@ -11,12 +14,13 @@ import {
newFaceBox
} from './actions';
import {
DETECT,
DETECTION_TYPES,
DETECT_FACE,
FACE_LANDMARKS_DETECTION_ERROR_THRESHOLD,
INIT_WORKER,
NO_DETECTION,
NO_FACE_DETECTION_THRESHOLD,
RAISED_HAND_DURATION,
WEBHOOK_SEND_TIME_INTERVAL
} from './constants';
import {
Expand All @@ -35,6 +39,7 @@ class FaceLandmarksDetector {
private worker: Worker | null = null;
private lastFaceExpression: string | null = null;
private lastFaceExpressionTimestamp: number | null = null;
private lastRaisedHandTimestamp: number | null = null;
private webhookSendInterval: number | null = null;
private detectionInterval: number | null = null;
private recognitionActive = false;
Expand Down Expand Up @@ -107,8 +112,21 @@ class FaceLandmarksDetector {
workerUrl = window.URL.createObjectURL(workerBlob);
this.worker = new Worker(workerUrl, { name: 'Face Landmarks Worker' });
this.worker.onmessage = ({ data }: MessageEvent<any>) => {
const { faceExpression, faceBox, faceCount } = data;
const { faceExpression, faceBox, faceCount, raisedHand } = data;
const messageTimestamp = Date.now();
const localParticipant = getLocalParticipant(getState());

if (raisedHand && !hasRaisedHand(localParticipant)) {
if (!this.lastRaisedHandTimestamp) {
this.lastRaisedHandTimestamp = messageTimestamp;
}
if (messageTimestamp - this.lastRaisedHandTimestamp >= RAISED_HAND_DURATION) {
dispatch(raiseHand(true));
this.lastRaisedHandTimestamp = null;
}
} else {
this.lastRaisedHandTimestamp = null;
}

// if the number of faces detected is different from 1 we do not take into consideration that detection
if (faceCount !== 1) {
Expand Down Expand Up @@ -155,7 +173,8 @@ class FaceLandmarksDetector {
const { faceLandmarks } = state['features/base/config'];
const detectionTypes = [
faceLandmarks?.enableFaceCentering && DETECTION_TYPES.FACE_BOX,
faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS
faceLandmarks?.enableFaceExpressionsDetection && DETECTION_TYPES.FACE_EXPRESSIONS,
!getDisableRaisedHandRecognition(getState()) && DETECTION_TYPES.RAISED_HAND
].filter(Boolean);

this.worker.postMessage({
Expand Down Expand Up @@ -341,7 +360,7 @@ class FaceLandmarksDetector {
}

this.worker.postMessage({
type: DETECT_FACE,
type: DETECT,
image,
threshold: faceCenteringThreshold
});
Expand Down
112 changes: 88 additions & 24 deletions react/features/face-landmarks/FaceLandmarksHelper.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
import { Config, FaceResult, Human } from '@vladmandic/human';

import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
import { Config, FaceResult, HandResult, Human } from '@vladmandic/human';

import {
DETECTION_TYPES,
FACE_DETECTION_SCORE_THRESHOLD,
FACE_EXPRESSIONS_NAMING_MAPPING,
HAND_DETECTION_SCORE_THRESHOLD
} from './constants';
import { DetectInput, DetectOutput, FaceBox, FaceExpression, InitInput } from './types';

export interface IFaceLandmarksHelper {
detect: ({ image, threshold }: DetectInput) => Promise<DetectOutput>;
getDetectionInProgress: () => boolean;
getDetections: (image: ImageBitmap | ImageData) => Promise<Array<FaceResult>>;
getDetections: (image: ImageBitmap | ImageData) =>
Promise<{ faceDetections: FaceResult[]; handDetections: HandResult[]; }>;
getFaceBox: (detections: Array<FaceResult>, threshold: number) => FaceBox | undefined;
getFaceCount: (detections: Array<FaceResult>) => number;
getFaceExpression: (detections: Array<FaceResult>) => FaceExpression | undefined;
Expand All @@ -19,7 +25,7 @@ export interface IFaceLandmarksHelper {
*/
export class HumanHelper implements IFaceLandmarksHelper {
protected human: Human | undefined;
protected faceDetectionTypes: string[];
protected detectionTypes: string[];
protected baseUrl: string;
private detectionInProgress = false;
private lastValidFaceBox: FaceBox | undefined;
Expand Down Expand Up @@ -52,7 +58,17 @@ export class HumanHelper implements IFaceLandmarksHelper {
},
description: { enabled: false }
},
hand: { enabled: false },
hand: {
enabled: false,
rotation: false,
maxDetected: 1,
detector: {
modelPath: 'handtrack.json'
},
skeleton: {
modelPath: 'handlandmark-lite.json'
}
},
gesture: { enabled: false },
body: { enabled: false },
segmentation: { enabled: false }
Expand All @@ -65,7 +81,7 @@ export class HumanHelper implements IFaceLandmarksHelper {
* @returns {void}
*/
constructor({ baseUrl, detectionTypes }: InitInput) {
this.faceDetectionTypes = detectionTypes;
this.detectionTypes = detectionTypes;
this.baseUrl = baseUrl;
this.init();
}
Expand All @@ -85,18 +101,24 @@ export class HumanHelper implements IFaceLandmarksHelper {
setWasmPaths(this.baseUrl);
}

if (this.faceDetectionTypes.length > 0 && this.config.face) {
if ((this.detectionTypes.includes(DETECTION_TYPES.FACE_BOX)
|| this.detectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS))
&& this.config.face) {
this.config.face.enabled = true;
}

if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) {
if (this.detectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) {
this.config.face.detector.enabled = true;
}

if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && this.config.face?.emotion) {
if (this.detectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && this.config.face?.emotion) {
this.config.face.emotion.enabled = true;
}

if (this.detectionTypes.includes(DETECTION_TYPES.RAISED_HAND) && this.config.hand) {
this.config.hand.enabled = true;
}

const initialHuman = new Human(this.config);

try {
Expand Down Expand Up @@ -159,6 +181,34 @@ export class HumanHelper implements IFaceLandmarksHelper {
}
}

/**
* Check whether the hand is raised from the hand detection result.
*
* @param {Array<HandResult>} handDetections - The array with the hand detections.
* @returns {boolean}
*/
isRaisedHand(handDetections: Array<HandResult>): boolean {
// Only take the fingers with the hand of the max confidence score
const [ { landmarks: fingers = undefined, label: handLabel = undefined } = {} ] = handDetections;

if (handLabel !== 'hand') {
return false;
}

const validDirections = [ 'verticalUp', 'diagonalUpRight', 'diagonalUpLeft' ];
let counter = 0;

if (fingers) {
Object.values(fingers).forEach(value => {
if (value.curl === 'none' && validDirections.includes(value.direction)) {
counter += 1;
}
});
}

return counter > 3;
}

/**
* Gets the face count from the detections, which is the number of detections.
*
Expand All @@ -178,21 +228,29 @@ export class HumanHelper implements IFaceLandmarksHelper {
*
* @param {ImageBitmap | ImageData} image - The image captured from the track,
* if OffscreenCanvas available it will be ImageBitmap, otherwise it will be ImageData.
* @returns {Promise<Array<FaceResult>>}
* @returns {Promise<{ faceDetections: Array<FaceResult>, handDetections: Array<HandResult> }>}
*/
async getDetections(image: ImageBitmap | ImageData): Promise<Array<FaceResult>> {
if (!this.human || !this.faceDetectionTypes.length) {
return [];
async getDetections(image: ImageBitmap | ImageData):
Promise<{ faceDetections: Array<FaceResult>; handDetections: Array<HandResult>; } > {
if (!this.human || !this.detectionTypes.length) {
return { faceDetections: [],
handDetections: [] };
}

this.human.tf.engine().startScope();

const imageTensor = this.human.tf.browser.fromPixels(image);
const { face: detections } = await this.human.detect(imageTensor, this.config);
const { face: faceDetections, hand: handDetections } = await this.human.detect(imageTensor, this.config);

this.human.tf.engine().endScope();

return detections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD);
const faceDetection = faceDetections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD);
const handDetection = handDetections.filter(detection => detection.score > HAND_DETECTION_SCORE_THRESHOLD);

return {
faceDetections: faceDetection,
handDetections: handDetection
};
}

/**
Expand All @@ -204,19 +262,20 @@ export class HumanHelper implements IFaceLandmarksHelper {
public async detect({ image, threshold }: DetectInput): Promise<DetectOutput> {
let faceExpression;
let faceBox;
let raisedHand;

this.detectionInProgress = true;

const detections = await this.getDetections(image);
const { faceDetections, handDetections } = await this.getDetections(image);

if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
faceExpression = this.getFaceExpression(detections);
if (this.detectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
faceExpression = this.getFaceExpression(faceDetections);
}

if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
if (this.detectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
// if more than one face is detected the face centering will be disabled.
if (this.getFaceCount(detections) > 1) {
this.faceDetectionTypes.splice(this.faceDetectionTypes.indexOf(DETECTION_TYPES.FACE_BOX), 1);
if (this.getFaceCount(faceDetections) > 1) {
this.detectionTypes.splice(this.detectionTypes.indexOf(DETECTION_TYPES.FACE_BOX), 1);

// face-box for re-centering
faceBox = {
Expand All @@ -225,17 +284,22 @@ export class HumanHelper implements IFaceLandmarksHelper {
width: 100
};
} else {
faceBox = this.getFaceBox(detections, threshold);
faceBox = this.getFaceBox(faceDetections, threshold);
}

}

if (this.detectionTypes.includes(DETECTION_TYPES.RAISED_HAND)) {
raisedHand = this.isRaisedHand(handDetections);
}

this.detectionInProgress = false;

return {
faceExpression,
faceBox,
faceCount: this.getFaceCount(detections)
faceCount: this.getFaceCount(faceDetections),
raisedHand
};
}

Expand Down
17 changes: 14 additions & 3 deletions react/features/face-landmarks/constants.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ export const FACE_EXPRESSIONS_NAMING_MAPPING = {
*/
export const WEBHOOK_SEND_TIME_INTERVAL = 15000;

/**
* Time is ms used for checking raised hand duration.
*/
export const RAISED_HAND_DURATION = 2000;

/**
* Type of message sent from main thread to worker that contains init information:
* such as models directory and window screen size.
Expand All @@ -49,23 +54,29 @@ export const SEND_IMAGE_INTERVAL_MS = 1000;

/**
* Type of message sent from main thread to worker that contain image data and
* will trigger a response message from the worker containing the detected face(s) info.
* will trigger a response message from the worker containing the detected info.
*/
export const DETECT_FACE = 'DETECT_FACE';
export const DETECT = 'DETECT';

/**
* Available detection types.
*/
export const DETECTION_TYPES = {
FACE_BOX: 'face-box',
FACE_EXPRESSIONS: 'face-expressions'
FACE_EXPRESSIONS: 'face-expressions',
RAISED_HAND: 'raised-hand'
};

/**
* Threshold for detection score of face.
*/
export const FACE_DETECTION_SCORE_THRESHOLD = 0.75;

/**
* Threshold for detection score of hand.
*/
export const HAND_DETECTION_SCORE_THRESHOLD = 0.8;

/**
* Threshold for stopping detection after a certain number of consecutive errors have occurred.
*/
Expand Down
5 changes: 3 additions & 2 deletions react/features/face-landmarks/faceLandmarksWorker.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
import { HumanHelper, IFaceLandmarksHelper } from './FaceLandmarksHelper';
import { DETECT_FACE, INIT_WORKER } from './constants';
import { DETECT, INIT_WORKER } from './constants';

let helper: IFaceLandmarksHelper;

onmessage = async function({ data }: MessageEvent<any>) {
switch (data.type) {
case DETECT_FACE: {
case DETECT: {
if (!helper || helper.getDetectionInProgress()) {
return;
}

// detections include both face detections and hand detections
const detections = await helper.detect(data);

if (detections) {
Expand Down
Loading

0 comments on commit 74c3941

Please sign in to comment.