Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions apps/web/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
"@hugeicons/core-free-icons": "^3.1.1",
"@hugeicons/react": "^1.1.4",
"@huggingface/transformers": "^3.8.1",
"@mediapipe/face_mesh": "0.4.1657299874",
"@opencut/effects": "workspace:*",
"@opencut/env": "workspace:*",
"@opencut/ui": "workspace:*",
"@radix-ui/react-accordion": "^1.2.12",
Expand Down
50 changes: 0 additions & 50 deletions apps/web/src/lib/effects/definitions/blur.ts

This file was deleted.

15 changes: 2 additions & 13 deletions apps/web/src/lib/effects/definitions/index.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,2 @@
import { hasEffect, registerEffect } from "../registry";
import { blurEffectDefinition } from "./blur";

const defaultEffects = [blurEffectDefinition];

export function registerDefaultEffects(): void {
for (const definition of defaultEffects) {
if (hasEffect({ effectType: definition.type })) {
continue;
}
registerEffect({ definition });
}
}
/** Re-export from @opencut/effects package */
export { registerAllEffects as registerDefaultEffects } from "@opencut/effects";
11 changes: 8 additions & 3 deletions apps/web/src/lib/effects/index.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,14 @@
import { generateUUID } from "@/utils/id";
import { getEffect } from "./registry";
import type { Effect, EffectParamValues } from "@/types/effects";
import { getEffect } from "@opencut/effects";
import type { Effect, EffectParamValues } from "@opencut/effects";
import type { VisualElement } from "@/types/timeline";

export { getEffect, getAllEffects, hasEffect, registerEffect } from "./registry";
export {
getEffect,
getAllEffects,
hasEffect,
registerEffect,
} from "@opencut/effects";
export { registerDefaultEffects } from "./definitions";

export const EFFECT_TARGET_ELEMENT_TYPES: VisualElement["type"][] = [
Expand Down
39 changes: 8 additions & 31 deletions apps/web/src/lib/effects/registry.ts
Original file line number Diff line number Diff line change
@@ -1,31 +1,8 @@
import type { EffectDefinition } from "@/types/effects";

const effectDefinitions = new Map<string, EffectDefinition>();

export function registerEffect({
definition,
}: {
definition: EffectDefinition;
}): void {
effectDefinitions.set(definition.type, definition);
}

export function hasEffect({ effectType }: { effectType: string }): boolean {
return effectDefinitions.has(effectType);
}

export function getEffect({
effectType,
}: {
effectType: string;
}): EffectDefinition {
const definition = effectDefinitions.get(effectType);
if (!definition) {
throw new Error(`Unknown effect type: ${effectType}`);
}
return definition;
}

export function getAllEffects(): EffectDefinition[] {
return Array.from(effectDefinitions.values());
}
/** Re-export registry functions from @opencut/effects package */
export {
registerEffect,
hasEffect,
getEffect,
getAllEffects,
clearEffects,
} from "@opencut/effects";
244 changes: 244 additions & 0 deletions apps/web/src/services/face-mesh/face-mesh-provider.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,244 @@
import type { EffectContext } from "@opencut/effects";

/**
* Face mesh detection provider using MediaPipe Face Mesh.
* Lazy-loads the WASM module only when first needed.
* Runs detection per frame and caches results.
*/

import type { FaceMesh as FaceMeshType, Results } from "@mediapipe/face_mesh";

let faceMeshInstance: FaceMeshType | null = null;
let isLoading = false;
/** Shared in-flight promise for pending detections — avoids race conditions */
let pendingDetection: Promise<Results> | null = null;
let pendingResolve: ((results: Results) => void) | null = null;
let pendingReject: ((error: Error) => void) | null = null;
let pendingTimeoutId: ReturnType<typeof setTimeout> | null = null;

/** Detection timeout in milliseconds */
const DETECTION_TIMEOUT_MS = 5000;

/** MediaPipe Face Mesh version — must match package.json dependency */
const MEDIAPIPE_VERSION = "0.4.1657299874";

/** Types that MediaPipe FaceMesh accepts */
type MediaPipeImageSource =
| HTMLImageElement
| HTMLCanvasElement
| HTMLVideoElement;

/** Clear the pending timeout if it exists */
function clearPendingTimeout(): void {
if (pendingTimeoutId !== null) {
clearTimeout(pendingTimeoutId);
pendingTimeoutId = null;
}
}

/** Check if source is a valid MediaPipe image source */
function isMediaPipeImageSource(
source: CanvasImageSource,
): source is MediaPipeImageSource {
return (
source instanceof HTMLImageElement ||
source instanceof HTMLCanvasElement ||
source instanceof HTMLVideoElement
);
}

/** Convert OffscreenCanvas to HTMLCanvasElement for MediaPipe compatibility */
function toHTMLCanvas(source: OffscreenCanvas): HTMLCanvasElement {
const canvas = document.createElement("canvas");
canvas.width = source.width;
canvas.height = source.height;
const ctx = canvas.getContext("2d");
if (ctx) {
ctx.drawImage(source, 0, 0);
}
return canvas;
}

/** Prepare source for MediaPipe — converts OffscreenCanvas if needed */
function prepareSourceForMediaPipe(
source: CanvasImageSource,
): MediaPipeImageSource | null {
if (isMediaPipeImageSource(source)) {
return source;
}
if (source instanceof OffscreenCanvas) {
return toHTMLCanvas(source);
}
// ImageBitmap, SVGImageElement, VideoFrame are not supported
return null;
}

/** Lazy-load MediaPipe Face Mesh WASM module */
async function loadFaceMesh(): Promise<FaceMeshType | null> {
if (faceMeshInstance) return faceMeshInstance;
if (isLoading) return null;

isLoading = true;
try {
const { FaceMesh } = await import("@mediapipe/face_mesh");
const fm = new FaceMesh({
locateFile: (file: string) =>
`https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh@${MEDIAPIPE_VERSION}/${file}`,
});
Comment thread
coderabbitai[bot] marked this conversation as resolved.
fm.setOptions({
maxNumFaces: 1,
refineLandmarks: true,
minDetectionConfidence: 0.5,
minTrackingConfidence: 0.5,
});
fm.onResults((results: Results) => {
if (pendingResolve) {
clearPendingTimeout();
pendingResolve(results);
pendingResolve = null;
pendingReject = null;
}
});
Comment thread
coderabbitai[bot] marked this conversation as resolved.
faceMeshInstance = fm;
return fm;
} catch (err) {
return null;
Comment thread
doananh234 marked this conversation as resolved.
} finally {
isLoading = false;
}
}

/** MediaPipe face landmark indices for key regions */
const LANDMARK_INDICES = {
leftCheek: 234,
rightCheek: 454,
jawBottom: 152,
jawLeft: 132,
jawRight: 361,
leftEyeCenter: 159,
rightEyeCenter: 386,
mouthCenter: 13,
};

/** Convert MediaPipe face landmarks to EffectContext */
function landmarksToContext(
landmarks: Array<{ x: number; y: number; z: number }>,
): EffectContext {
const lc = landmarks[LANDMARK_INDICES.leftCheek];
const rc = landmarks[LANDMARK_INDICES.rightCheek];
const jaw = landmarks[LANDMARK_INDICES.jawBottom];
const jawL = landmarks[LANDMARK_INDICES.jawLeft];
const jawR = landmarks[LANDMARK_INDICES.jawRight];

// Estimate cheek radius from face width
const faceWidth = Math.abs(rc.x - lc.x);
const cheekRadius = faceWidth * 0.15;

return {
faceDetected: true,
cheekLeft: [lc.x, lc.y],
cheekRight: [rc.x, rc.y],
cheekRadius,
jawPoints: [jaw.x, jaw.y, jawL.x, jawL.y, jawR.x, jawR.y],
};
}

/** Detect face in the given image source and return EffectContext */
export async function detectFace(
source: CanvasImageSource,
): Promise<EffectContext> {
const fm = await loadFaceMesh();
if (!fm) {
return { faceDetected: false };
}

// Convert source to MediaPipe-compatible format
const mediaPipeSource = prepareSourceForMediaPipe(source);
if (!mediaPipeSource) {
// Source type not supported by MediaPipe
return { faceDetected: false };
}

// Reuse existing in-flight detection if one exists
if (pendingDetection) {
const results = await pendingDetection;
if (
!results?.multiFaceLandmarks ||
results.multiFaceLandmarks.length === 0
) {
return { faceDetected: false };
}
return landmarksToContext(results.multiFaceLandmarks[0]);
}

// Create new detection promise with timeout
pendingDetection = new Promise<Results>((resolve, reject) => {
pendingResolve = resolve;
pendingReject = reject;

// Set up timeout for detection
pendingTimeoutId = setTimeout(() => {
if (pendingReject) {
pendingReject(new Error("Face detection timeout"));
pendingResolve = null;
pendingReject = null;
pendingDetection = null;
pendingTimeoutId = null;
}
}, DETECTION_TIMEOUT_MS);

// Send image for detection, catching sync errors
try {
fm.send({ image: mediaPipeSource });
} catch (error) {
clearPendingTimeout();
reject(error instanceof Error ? error : new Error(String(error)));
}
});

let results: Results;
try {
results = await pendingDetection;
} catch (error) {
// Detection failed (timeout or error) — return no face detected
pendingDetection = null;
pendingResolve = null;
pendingReject = null;
return { faceDetected: false };
}

clearPendingTimeout();
pendingDetection = null;
pendingResolve = null;
pendingReject = null;

if (
!results?.multiFaceLandmarks ||
results.multiFaceLandmarks.length === 0
) {
return { faceDetected: false };
}

return landmarksToContext(results.multiFaceLandmarks[0]);
}

/** Check if MediaPipe is loaded (for conditional rendering) */
export function isFaceMeshReady(): boolean {
return faceMeshInstance !== null;
}

/** Clean up MediaPipe resources */
export function disposeFaceMesh(): void {
// Clear timeout and settle any pending detection before disposing
clearPendingTimeout();
if (pendingReject) {
pendingReject(new Error("Face mesh disposed"));
pendingReject = null;
pendingResolve = null;
pendingDetection = null;
}
if (faceMeshInstance) {
faceMeshInstance.close();
faceMeshInstance = null;
}
}
1 change: 1 addition & 0 deletions apps/web/src/services/face-mesh/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export { detectFace, isFaceMeshReady, disposeFaceMesh } from "./face-mesh-provider";
1 change: 1 addition & 0 deletions apps/web/src/services/renderer/nodes/effect-layer-node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ export class EffectLayerNode extends BaseNode<EffectLayerNodeParams> {
effectParams: this.params.effectParams,
width: renderer.width,
height: renderer.height,
time,
}),
}));
const effectResult = webglEffectRenderer.applyEffect({
Expand Down
2 changes: 1 addition & 1 deletion apps/web/src/services/renderer/nodes/image-node.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,7 +79,7 @@ export class ImageNode extends VisualNode<ImageNodeParams> {

const { source, width, height } = await this.cachedSource;

this.renderVisual({
await this.renderVisual({
renderer,
source,
sourceWidth: width || renderer.width,
Expand Down
Loading