✨(frontend) add face tracking
Implement face landmark processor to be able to track face live.
This commit is contained in:
committed by
aleb_the_flash
parent
f80603b4bb
commit
cd9b80b966
@@ -0,0 +1,251 @@
|
||||
import { ProcessorOptions, Track } from 'livekit-client'
|
||||
import posthog from 'posthog-js'
|
||||
import {
|
||||
FilesetResolver,
|
||||
FaceLandmarker,
|
||||
FaceLandmarkerResult,
|
||||
} from '@mediapipe/tasks-vision'
|
||||
import {
|
||||
CLEAR_TIMEOUT,
|
||||
SET_TIMEOUT,
|
||||
TIMEOUT_TICK,
|
||||
timerWorkerScript,
|
||||
} from './TimerWorker'
|
||||
import {
|
||||
BackgroundProcessorInterface,
|
||||
BackgroundOptions,
|
||||
ProcessorType,
|
||||
} from '.'
|
||||
|
||||
const PROCESSING_WIDTH = 256 * 3
|
||||
const PROCESSING_HEIGHT = 144 * 3
|
||||
|
||||
const FACE_LANDMARKS_CANVAS_ID = 'face-landmarks-local'
|
||||
|
||||
export class FaceLandmarksProcessor implements BackgroundProcessorInterface {
|
||||
options: BackgroundOptions
|
||||
name: string
|
||||
processedTrack?: MediaStreamTrack | undefined
|
||||
|
||||
source?: MediaStreamTrack
|
||||
sourceSettings?: MediaTrackSettings
|
||||
videoElement?: HTMLVideoElement
|
||||
videoElementLoaded?: boolean
|
||||
|
||||
// Canvas containing the video processing result
|
||||
outputCanvas?: HTMLCanvasElement
|
||||
outputCanvasCtx?: CanvasRenderingContext2D
|
||||
|
||||
faceLandmarker?: FaceLandmarker
|
||||
faceLandmarkerResult?: FaceLandmarkerResult
|
||||
|
||||
// The resized image of the video source
|
||||
sourceImageData?: ImageData
|
||||
|
||||
timerWorker?: Worker
|
||||
|
||||
type: ProcessorType
|
||||
|
||||
constructor(opts: BackgroundOptions) {
|
||||
this.name = 'face_landmarks'
|
||||
this.options = opts
|
||||
this.type = ProcessorType.FACE_LANDMARKS
|
||||
}
|
||||
|
||||
static get isSupported() {
|
||||
return true // Face landmarks should work in all modern browsers
|
||||
}
|
||||
|
||||
async init(opts: ProcessorOptions<Track.Kind>) {
|
||||
if (!opts.element) {
|
||||
throw new Error('Element is required for processing')
|
||||
}
|
||||
|
||||
this.source = opts.track as MediaStreamTrack
|
||||
this.sourceSettings = this.source!.getSettings()
|
||||
this.videoElement = opts.element as HTMLVideoElement
|
||||
|
||||
this._createMainCanvas()
|
||||
|
||||
const stream = this.outputCanvas!.captureStream()
|
||||
const tracks = stream.getVideoTracks()
|
||||
if (tracks.length == 0) {
|
||||
throw new Error('No tracks found for processing')
|
||||
}
|
||||
this.processedTrack = tracks[0]
|
||||
|
||||
await this.initFaceLandmarker()
|
||||
this._initWorker()
|
||||
|
||||
posthog.capture('face-landmarks-init')
|
||||
}
|
||||
|
||||
_initWorker() {
|
||||
this.timerWorker = new Worker(timerWorkerScript, {
|
||||
name: 'FaceLandmarks',
|
||||
})
|
||||
this.timerWorker.onmessage = (data) => this.onTimerMessage(data)
|
||||
if (this.videoElementLoaded) {
|
||||
this.timerWorker!.postMessage({
|
||||
id: SET_TIMEOUT,
|
||||
timeMs: 1000 / 30,
|
||||
})
|
||||
} else {
|
||||
this.videoElement!.onloadeddata = () => {
|
||||
this.videoElementLoaded = true
|
||||
this.timerWorker!.postMessage({
|
||||
id: SET_TIMEOUT,
|
||||
timeMs: 1000 / 30,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
onTimerMessage(response: { data: { id: number } }) {
|
||||
if (response.data.id === TIMEOUT_TICK) {
|
||||
this.process()
|
||||
}
|
||||
}
|
||||
|
||||
async initFaceLandmarker() {
|
||||
const vision = await FilesetResolver.forVisionTasks(
|
||||
'https://cdn.jsdelivr.net/npm/@mediapipe/tasks-vision/wasm'
|
||||
)
|
||||
this.faceLandmarker = await FaceLandmarker.createFromOptions(vision, {
|
||||
baseOptions: {
|
||||
modelAssetPath:
|
||||
'https://storage.googleapis.com/mediapipe-models/face_landmarker/face_landmarker/float16/latest/face_landmarker.task',
|
||||
delegate: 'GPU',
|
||||
},
|
||||
runningMode: 'VIDEO',
|
||||
outputFaceBlendshapes: true,
|
||||
outputFacialTransformationMatrixes: true,
|
||||
})
|
||||
}
|
||||
|
||||
async sizeSource() {
|
||||
this.outputCanvasCtx?.drawImage(
|
||||
this.videoElement!,
|
||||
0,
|
||||
0,
|
||||
this.videoElement!.videoWidth,
|
||||
this.videoElement!.videoHeight,
|
||||
0,
|
||||
0,
|
||||
PROCESSING_WIDTH,
|
||||
PROCESSING_HEIGHT
|
||||
)
|
||||
|
||||
this.sourceImageData = this.outputCanvasCtx?.getImageData(
|
||||
0,
|
||||
0,
|
||||
PROCESSING_WIDTH,
|
||||
PROCESSING_HEIGHT
|
||||
)
|
||||
}
|
||||
|
||||
async detectFaces() {
|
||||
const startTimeMs = performance.now()
|
||||
this.faceLandmarkerResult = this.faceLandmarker!.detectForVideo(
|
||||
this.sourceImageData!,
|
||||
startTimeMs
|
||||
)
|
||||
}
|
||||
|
||||
async drawFaceLandmarks() {
|
||||
// Draw the original video frame at the canvas size
|
||||
this.outputCanvasCtx!.drawImage(
|
||||
this.videoElement!,
|
||||
0,
|
||||
0,
|
||||
this.videoElement!.videoWidth,
|
||||
this.videoElement!.videoHeight,
|
||||
0,
|
||||
0,
|
||||
PROCESSING_WIDTH,
|
||||
PROCESSING_HEIGHT
|
||||
)
|
||||
|
||||
if (!this.faceLandmarkerResult?.faceLandmarks) {
|
||||
return
|
||||
}
|
||||
|
||||
// Draw face landmarks
|
||||
this.outputCanvasCtx!.strokeStyle = '#00FF00'
|
||||
this.outputCanvasCtx!.lineWidth = 2
|
||||
|
||||
for (const face of this.faceLandmarkerResult.faceLandmarks) {
|
||||
for (const landmark of face) {
|
||||
// Use the same dimensions as the canvas/video display size
|
||||
const x = landmark.x * PROCESSING_WIDTH
|
||||
const y = landmark.y * PROCESSING_HEIGHT
|
||||
|
||||
this.outputCanvasCtx!.beginPath()
|
||||
this.outputCanvasCtx!.arc(x, y, 2, 0, 2 * Math.PI)
|
||||
this.outputCanvasCtx!.stroke()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async process() {
|
||||
await this.sizeSource()
|
||||
await this.detectFaces()
|
||||
await this.drawFaceLandmarks()
|
||||
|
||||
this.timerWorker!.postMessage({
|
||||
id: SET_TIMEOUT,
|
||||
timeMs: 1000 / 30,
|
||||
})
|
||||
}
|
||||
|
||||
_createMainCanvas() {
|
||||
this.outputCanvas = document.querySelector(
|
||||
`#${FACE_LANDMARKS_CANVAS_ID}`
|
||||
) as HTMLCanvasElement
|
||||
if (!this.outputCanvas) {
|
||||
this.outputCanvas = this._createCanvas(
|
||||
FACE_LANDMARKS_CANVAS_ID,
|
||||
PROCESSING_WIDTH,
|
||||
PROCESSING_HEIGHT
|
||||
)
|
||||
}
|
||||
this.outputCanvasCtx = this.outputCanvas.getContext('2d')!
|
||||
}
|
||||
|
||||
_createCanvas(id: string, width: number, height: number) {
|
||||
const element = document.createElement('canvas')
|
||||
element.setAttribute('id', id)
|
||||
element.setAttribute('width', '' + width)
|
||||
element.setAttribute('height', '' + height)
|
||||
return element
|
||||
}
|
||||
|
||||
update(opts: BackgroundOptions): void {
|
||||
this.options = opts
|
||||
}
|
||||
|
||||
async restart(opts: ProcessorOptions<Track.Kind>) {
|
||||
await this.destroy()
|
||||
return this.init(opts)
|
||||
}
|
||||
|
||||
async destroy() {
|
||||
this.timerWorker?.postMessage({
|
||||
id: CLEAR_TIMEOUT,
|
||||
})
|
||||
|
||||
this.timerWorker?.terminate()
|
||||
this.faceLandmarker?.close()
|
||||
}
|
||||
|
||||
clone() {
|
||||
return new FaceLandmarksProcessor(this.options)
|
||||
}
|
||||
|
||||
serialize() {
|
||||
return {
|
||||
type: this.type,
|
||||
options: this.options,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,10 +3,12 @@ import { Track, TrackProcessor } from 'livekit-client'
|
||||
import { BackgroundBlurTrackProcessorJsWrapper } from './BackgroundBlurTrackProcessorJsWrapper'
|
||||
import { BackgroundCustomProcessor } from './BackgroundCustomProcessor'
|
||||
import { BackgroundVirtualTrackProcessorJsWrapper } from './BackgroundVirtualTrackProcessorJsWrapper'
|
||||
import { FaceLandmarksProcessor } from './FaceLandmarksProcessor'
|
||||
|
||||
export type BackgroundOptions = {
|
||||
blurRadius?: number
|
||||
imagePath?: string
|
||||
showFaceLandmarks?: boolean
|
||||
}
|
||||
|
||||
export interface ProcessorSerialized {
|
||||
@@ -25,11 +27,12 @@ export interface BackgroundProcessorInterface
|
||||
export enum ProcessorType {
|
||||
BLUR = 'blur',
|
||||
VIRTUAL = 'virtual',
|
||||
FACE_LANDMARKS = 'faceLandmarks'
|
||||
}
|
||||
|
||||
export class BackgroundProcessorFactory {
|
||||
static isSupported() {
|
||||
return ProcessorWrapper.isSupported || BackgroundCustomProcessor.isSupported
|
||||
return ProcessorWrapper.isSupported || BackgroundCustomProcessor.isSupported || FaceLandmarksProcessor.isSupported
|
||||
}
|
||||
|
||||
static getProcessor(
|
||||
@@ -50,6 +53,10 @@ export class BackgroundProcessorFactory {
|
||||
if (BackgroundCustomProcessor.isSupported) {
|
||||
return new BackgroundCustomProcessor(opts)
|
||||
}
|
||||
} else if (type === ProcessorType.FACE_LANDMARKS) {
|
||||
if (FaceLandmarksProcessor.isSupported) {
|
||||
return new FaceLandmarksProcessor(opts)
|
||||
}
|
||||
}
|
||||
return undefined
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ import { BlurOnStrong } from '@/components/icons/BlurOnStrong'
|
||||
import { useTrackToggle } from '@livekit/components-react'
|
||||
import { Loader } from '@/primitives/Loader'
|
||||
import { useSyncAfterDelay } from '@/hooks/useSyncAfterDelay'
|
||||
import { RiProhibited2Line } from '@remixicon/react'
|
||||
import { RiProhibited2Line, RiUserVoiceLine } from '@remixicon/react'
|
||||
|
||||
enum BlurRadius {
|
||||
NONE = 0,
|
||||
@@ -302,6 +302,49 @@ export const EffectsConfiguration = ({
|
||||
</ToggleButton>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={css({
|
||||
marginTop: '1.5rem',
|
||||
})}
|
||||
>
|
||||
<H
|
||||
lvl={3}
|
||||
style={{
|
||||
marginBottom: '1rem',
|
||||
}}
|
||||
variant="bodyXsBold"
|
||||
>
|
||||
{t('faceLandmarks.title')}
|
||||
</H>
|
||||
<div
|
||||
className={css({
|
||||
display: 'flex',
|
||||
gap: '1.25rem',
|
||||
})}
|
||||
>
|
||||
<ToggleButton
|
||||
variant="bigSquare"
|
||||
aria-label={tooltipLabel(ProcessorType.FACE_LANDMARKS, {
|
||||
blurRadius: 0,
|
||||
})}
|
||||
tooltip={tooltipLabel(ProcessorType.FACE_LANDMARKS, {
|
||||
blurRadius: 0,
|
||||
})}
|
||||
isDisabled={processorPendingReveal}
|
||||
onChange={async () =>
|
||||
await toggleEffect(ProcessorType.FACE_LANDMARKS, {
|
||||
blurRadius: 0,
|
||||
})
|
||||
}
|
||||
isSelected={isSelected(ProcessorType.FACE_LANDMARKS, {
|
||||
blurRadius: 0,
|
||||
})}
|
||||
data-attr="toggle-face-landmarks"
|
||||
>
|
||||
<RiUserVoiceLine />
|
||||
</ToggleButton>
|
||||
</div>
|
||||
</div>
|
||||
<div
|
||||
className={css({
|
||||
marginTop: '1.5rem',
|
||||
|
||||
@@ -135,22 +135,27 @@
|
||||
}
|
||||
},
|
||||
"effects": {
|
||||
"activateCamera": "",
|
||||
"notAvailable": "",
|
||||
"heading": "",
|
||||
"activateCamera": "Ihre Kamera ist deaktiviert. Wählen Sie eine Option, um sie zu aktivieren.",
|
||||
"notAvailable": "Videoeffekte werden in Kürze in Ihrem Browser verfügbar sein. Wir arbeiten daran! In der Zwischenzeit können Sie Google Chrome für beste Leistung oder Firefox verwenden :(",
|
||||
"heading": "Unschärfe",
|
||||
"blur": {
|
||||
"title": "",
|
||||
"light": "",
|
||||
"normal": "",
|
||||
"apply": "",
|
||||
"clear": ""
|
||||
"title": "Hintergrundunschärfe",
|
||||
"light": "Leichte Unschärfe",
|
||||
"normal": "Unschärfe",
|
||||
"apply": "Unschärfe aktivieren",
|
||||
"clear": "Unschärfe deaktivieren"
|
||||
},
|
||||
"virtual": {
|
||||
"title": "",
|
||||
"apply": "",
|
||||
"clear": ""
|
||||
"title": "Virtueller Hintergrund",
|
||||
"apply": "Virtuellen Hintergrund aktivieren",
|
||||
"clear": "Virtuellen Hintergrund deaktivieren"
|
||||
},
|
||||
"experimental": ""
|
||||
"faceLandmarks": {
|
||||
"title": "Gesichtsmerkmale",
|
||||
"apply": "Gesichtsmerkmale aktivieren",
|
||||
"clear": "Gesichtsmerkmale deaktivieren"
|
||||
},
|
||||
"experimental": "Experimentelle Funktion. Eine v2 kommt für vollständige Browserunterstützung und verbesserte Qualität."
|
||||
},
|
||||
"sidePanel": {
|
||||
"heading": {
|
||||
|
||||
@@ -149,6 +149,11 @@
|
||||
"apply": "Enable virtual background",
|
||||
"clear": "Disable virtual background"
|
||||
},
|
||||
"faceLandmarks": {
|
||||
"title": "Face landmarks",
|
||||
"apply": "Enable face landmarks",
|
||||
"clear": "Disable face landmarks"
|
||||
},
|
||||
"experimental": "Experimental feature. A v2 is coming for full browser support and improved quality."
|
||||
},
|
||||
"sidePanel": {
|
||||
|
||||
@@ -134,22 +134,27 @@
|
||||
}
|
||||
},
|
||||
"effects": {
|
||||
"activateCamera": "Votre camera est désactivée. Choisissez une option pour l'activer.",
|
||||
"notAvailable": "Les effets vidéo seront bientôt disponible sur votre navigateur. Nous y travaillons ! En attendant, vous pouvez utiliser Google Chrome pour une meilleure performance ou Firefox :(",
|
||||
"activateCamera": "Votre caméra est désactivée. Choisissez une option pour l'activer.",
|
||||
"notAvailable": "Les effets vidéo seront bientôt disponibles sur votre navigateur. Nous y travaillons ! En attendant, vous pouvez utiliser Google Chrome pour de meilleures performances ou Firefox :(",
|
||||
"heading": "Flou",
|
||||
"blur": {
|
||||
"title": "Flou d'arrière-plan",
|
||||
"light": "Léger flou",
|
||||
"light": "Flou léger",
|
||||
"normal": "Flou",
|
||||
"apply": "Appliquer le flou",
|
||||
"apply": "Activer le flou",
|
||||
"clear": "Désactiver le flou"
|
||||
},
|
||||
"virtual": {
|
||||
"title": "Arrière-plan virtuel",
|
||||
"apply": "Appliquer l'arrière plan virtuel",
|
||||
"clear": "Désactiver l'arrière plan virtuel"
|
||||
"apply": "Activer l'arrière-plan virtuel",
|
||||
"clear": "Désactiver l'arrière-plan virtuel"
|
||||
},
|
||||
"experimental": "Fonctionnalité expérimentale. Une v2 arrive pour un support complet sur tous les navigateurs et une meilleur qualité."
|
||||
"faceLandmarks": {
|
||||
"title": "Points du visage",
|
||||
"apply": "Activer les points du visage",
|
||||
"clear": "Désactiver les points du visage"
|
||||
},
|
||||
"experimental": "Fonctionnalité expérimentale. Une v2 arrive pour un support complet des navigateurs et une meilleure qualité."
|
||||
},
|
||||
"sidePanel": {
|
||||
"heading": {
|
||||
|
||||
@@ -134,22 +134,31 @@
|
||||
}
|
||||
},
|
||||
"effects": {
|
||||
"activateCamera": "Uw camera is uitgeschakeld. Kies een optie om hem in te schakelen.",
|
||||
"notAvailable": "Video-effecten zijn binnenkort beschikbaar in uw browser. We werken hier aan! Ondertussen kunt u Chrome gebruiken voor de beste prestaties, of Firefox :(",
|
||||
"activateCamera": "Uw camera is uitgeschakeld. Kies een optie om deze in te schakelen.",
|
||||
"notAvailable": "Video-effecten zijn binnenkort beschikbaar in uw browser. We werken eraan! In de tussentijd kunt u Google Chrome gebruiken voor de beste prestaties of Firefox :(",
|
||||
"heading": "Vervaging",
|
||||
"blur": {
|
||||
"title": "Achtergrond vervagen",
|
||||
"light": "Licht vervagen",
|
||||
"normal": "Vervagen",
|
||||
"apply": "Schakel vervaging in",
|
||||
"clear": "Schakel vervaging uit"
|
||||
"title": "Achtergrondvervaging",
|
||||
"light": "Lichte vervaging",
|
||||
"normal": "Vervaging",
|
||||
"apply": "Vervaging inschakelen",
|
||||
"clear": "Vervaging uitschakelen"
|
||||
},
|
||||
"virtual": {
|
||||
"title": "Virtuele achtergrond",
|
||||
"apply": "Schakel virtuele achtergrond in",
|
||||
"clear": "Schakel virtuele achtergrond uit"
|
||||
"apply": "Virtuele achtergrond inschakelen",
|
||||
"clear": "Virtuele achtergrond uitschakelen"
|
||||
},
|
||||
"experimental": "Experimentele functionaliteit. Een v2 komt er aan met volledige browser-ondersteuning en verbeterde kwaliteit."
|
||||
"faceLandmarks": {
|
||||
"title": "Gezichtskenmerken",
|
||||
"apply": "Gezichtskenmerken inschakelen",
|
||||
"clear": "Gezichtskenmerken uitschakelen",
|
||||
"tooltip": {
|
||||
"apply": "Gezichtskenmerken inschakelen",
|
||||
"clear": "Gezichtskenmerken uitschakelen"
|
||||
}
|
||||
},
|
||||
"experimental": "Experimentele functie. Een v2 komt eraan voor volledige browserondersteuning en verbeterde kwaliteit."
|
||||
},
|
||||
"sidePanel": {
|
||||
"heading": {
|
||||
|
||||
Reference in New Issue
Block a user