From 942b9bb1a2f9051502b6ea8d426b2fa5b5734cf4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Eero=20H=C3=A4kkinen?= Date: Fri, 4 Feb 2022 13:17:37 +0200 Subject: [PATCH] Face detection, background blur and eye gaze correction example --- index.html | 103 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 103 insertions(+) diff --git a/index.html b/index.html index c8de8d7..69d1f91 100644 --- a/index.html +++ b/index.html @@ -840,6 +840,109 @@

Examples

} }); await readable.pipeThrough(transformer).pipeTo(generator.writable); +}; + +
+// main.js:
+// Open camera.
+const stream = navigator.mediaDevices.getUserMedia({video: true});
+const [videoTrack] = stream.getVideoTracks();
+
+// Use a video worker and show to user.
+const videoElement = document.querySelector('video');
+const videoWorker = new Worker('video-worker.js');
+videoWorker.postMessage({track: videoTrack}, [videoTrack]);
+const {data} = await new Promise(r => videoWorker.onmessage);
+videoElement.srcObject = new MediaStream([data.videoTrack]);
+
+// video-worker.js:
+self.onmessage = async ({data: {track}}) => {
+  // Apply constraints.
+  let customBackgroundBlur = true;
+  let customEyeGazeCorrection = true;
+  let customFaceDetection = false;
+  let faceDetectionMode;
+  const capabilities = track.getCapabilities();
+  if (capabilities.backgroundBlur && capabilities.backgroundBlur.max > 0) {
+    // The platform supports background blurring.
+    // Let's use platform background blurring and skip the custom one.
+    await track.applyConstraints({
+      advanced: [{backgroundBlur: capabilities.backgroundBlur.max}]
+    });
+    customBackgroundBlur = false;
+  } else if ((capabilities.faceDetectionMode || []).includes('contour')) {
+    // The platform supports face contour detection but not background
+    // blurring. Let's use platform face contour detection to aid custom
+    // background blurring.
+    faceDetectionMode ||= 'contour';
+    await videoTrack.applyConstraints({
+      advanced: [{faceDetectionMode}]
+    });
+  } else {
+    // The platform does not support background blurring nor face contour
+    // detection. Let's use custom face contour detection to aid custom
+    // background blurring.
+    customFaceDetection = true;
+  }
+  if ((capabilities.eyeGazeCorrection || []).includes(true)) {
+    // The platform supports eye gaze correction.
+    // Let's use platform eye gaze correction and skip the custom one.
+    await videoTrack.applyConstraints({
+      advanced: [{eyeGazeCorrection: true}]
+    });
+    customEyeGazeCorrection = false;
+  } else if ((capabilities.faceDetectionLandmarks || []).includes(true)) {
+    // The platform supports face landmark detection but not eye gaze
+    // correction. Let's use platform face landmark detection to aid custom eye
+    // gaze correction.
+    faceDetectionMode ||= 'presence';
+    await videoTrack.applyConstraints({
+      advanced: [{
+        faceDetectionLandmarks: true,
+        faceDetectionMode
+      }]
+    });
+  } else {
+    // The platform does not support eye gaze correction nor face landmark
+    // detection. Let's use custom face landmark detection to aid custom eye
+    // gaze correction.
+    customFaceDetection = true;
+  }
+
+  // Load custom libraries which may utilize TensorFlow and/or WASM.
+  const requiredScripts = [].concat(
+    customBackgroundBlur    ? 'background.js' : [],
+    customEyeGazeCorrection ? 'eye-gaze.js'   : [],
+    customFaceDetection     ? 'face.js'       : []
+  );
+  importScripts(...requiredScripts);
+
+  const generator = new VideoTrackGenerator();
+  parent.postMessage({videoTrack: generator.track}, [generator.track]);
+  const {readable} = new MediaStreamTrackProcessor({track});
+  const transformer = new TransformStream({
+    async transform(frame, controller) {
+      // Detect faces or retrieve detected faces.
+      const detectedFaces =
+        customFaceDetection
+          ? await detectFaces(frame)
+          : frame.detectedFaces;
+      // Blur the background if needed.
+      if (customBackgroundBlur) {
+        const newFrame = await blurBackground(frame, detectedFaces);
+        frame.close();
+        frame = newFrame;
+      }
+      // Correct the eye gaze if needed.
+      if (customEyeGazeCorrection && (detectedFaces || []).length > 0) {
+        const newFrame = await correctEyeGaze(frame, detectedFaces);
+        frame.close();
+        frame = newFrame;
+      }
+      controller.enqueue(frame);
+    }
+  });
+  await readable.pipeThrough(transformer).pipeTo(generator.writable);
 };