mirror of
https://github.com/documenso/documenso.git
synced 2025-11-20 11:41:44 +10:00
fun: sign with nose
This commit is contained in:
@ -33,7 +33,7 @@ const config = {
|
||||
},
|
||||
swcPlugins: [['@lingui/swc-plugin', {}]],
|
||||
},
|
||||
reactStrictMode: true,
|
||||
reactStrictMode: false,
|
||||
transpilePackages: [
|
||||
'@documenso/assets',
|
||||
'@documenso/ee',
|
||||
|
||||
@ -25,9 +25,13 @@
|
||||
"@hookform/resolvers": "^3.1.0",
|
||||
"@lingui/macro": "^4.11.3",
|
||||
"@lingui/react": "^4.11.3",
|
||||
"@mediapipe/face_mesh": "^0.4.1633559619",
|
||||
"@simplewebauthn/browser": "^9.0.1",
|
||||
"@simplewebauthn/server": "^9.0.3",
|
||||
"@tanstack/react-query": "^4.29.5",
|
||||
"@tensorflow-models/face-landmarks-detection": "^1.0.6",
|
||||
"@tensorflow/tfjs": "^4.22.0",
|
||||
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
|
||||
"cookie-es": "^1.0.0",
|
||||
"formidable": "^2.1.1",
|
||||
"framer-motion": "^10.12.8",
|
||||
@ -52,6 +56,7 @@
|
||||
"react-hotkeys-hook": "^4.4.1",
|
||||
"react-icons": "^4.11.0",
|
||||
"react-rnd": "^10.4.1",
|
||||
"react-webcam": "^7.2.0",
|
||||
"recharts": "^2.7.2",
|
||||
"remeda": "^2.12.1",
|
||||
"sharp": "0.32.6",
|
||||
|
||||
@ -25,6 +25,7 @@ import { SignaturePad } from '@documenso/ui/primitives/signature-pad';
|
||||
import { useToast } from '@documenso/ui/primitives/use-toast';
|
||||
|
||||
import { SigningDisclosure } from '~/components/general/signing-disclosure';
|
||||
import { NoseCanvasDrawer } from '~/components/nose-canvas-drawer';
|
||||
|
||||
import { useRequiredDocumentAuthContext } from './document-auth-provider';
|
||||
import { useRequiredSigningContext } from './provider';
|
||||
@ -70,6 +71,8 @@ export const SignatureField = ({
|
||||
|
||||
const isLoading = isSignFieldWithTokenLoading || isRemoveSignedFieldWithTokenLoading || isPending;
|
||||
|
||||
const [isDrawing, setIsDrawing] = useState(false);
|
||||
|
||||
const [showSignatureModal, setShowSignatureModal] = useState(false);
|
||||
const [localSignature, setLocalSignature] = useState<string | null>(null);
|
||||
|
||||
@ -225,12 +228,16 @@ export const SignatureField = ({
|
||||
<Trans>Signature</Trans>
|
||||
</Label>
|
||||
|
||||
<SignaturePad
|
||||
id="signature"
|
||||
className="border-border mt-2 h-44 w-full rounded-md border"
|
||||
onChange={(value) => setLocalSignature(value)}
|
||||
allowTypedSignature={typedSignatureEnabled}
|
||||
/>
|
||||
<div className="mt-4">
|
||||
<NoseCanvasDrawer
|
||||
className="h-[320px]"
|
||||
onStart={() => setIsDrawing(true)}
|
||||
onStop={() => setIsDrawing(false)}
|
||||
onCapture={(dataUrl) => {
|
||||
setLocalSignature(dataUrl);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<SigningDisclosure />
|
||||
@ -250,7 +257,7 @@ export const SignatureField = ({
|
||||
<Button
|
||||
type="button"
|
||||
className="flex-1"
|
||||
disabled={!localSignature}
|
||||
disabled={!localSignature || isDrawing}
|
||||
onClick={() => onDialogSignClick()}
|
||||
>
|
||||
<Trans>Sign</Trans>
|
||||
|
||||
10
apps/web/src/app/demo/nose-drawer/layout.tsx
Normal file
10
apps/web/src/app/demo/nose-drawer/layout.tsx
Normal file
@ -0,0 +1,10 @@
|
||||
import type { Metadata } from 'next';
|
||||
|
||||
export const metadata: Metadata = {
|
||||
title: 'Nose Drawing Demo',
|
||||
description: 'Draw with your nose using face detection technology',
|
||||
};
|
||||
|
||||
export default function NoseDrawerLayout({ children }: { children: React.ReactNode }) {
|
||||
return children;
|
||||
}
|
||||
60
apps/web/src/app/demo/nose-drawer/page.tsx
Normal file
60
apps/web/src/app/demo/nose-drawer/page.tsx
Normal file
@ -0,0 +1,60 @@
|
||||
'use client';
|
||||
|
||||
import { useState } from 'react';
|
||||
|
||||
import { NoseCanvasDrawer } from '~/components/nose-canvas-drawer';
|
||||
|
||||
export default function NoseDrawerDemo() {
|
||||
const [capturedImage, setCapturedImage] = useState<string | null>(null);
|
||||
|
||||
const handleCapture = (dataUrl: string) => {
|
||||
setCapturedImage(dataUrl);
|
||||
};
|
||||
|
||||
return (
|
||||
<main className="container mx-auto p-4">
|
||||
<div className="mx-auto max-w-4xl">
|
||||
<h1 className="mb-6 text-3xl font-bold">Nose Drawing Demo</h1>
|
||||
|
||||
<div className="space-y-8">
|
||||
{/* Instructions */}
|
||||
<div className="bg-muted rounded-lg p-4">
|
||||
<h2 className="mb-2 font-semibold">How to use:</h2>
|
||||
<ol className="list-inside list-decimal space-y-2">
|
||||
<li>Click "Play" to start your camera</li>
|
||||
<li>Move your nose to draw on the canvas</li>
|
||||
<li>Click "Export as PNG" to save your drawing</li>
|
||||
<li>Use "Clear" to start over</li>
|
||||
</ol>
|
||||
</div>
|
||||
|
||||
{/* Canvas drawer */}
|
||||
<div className="bg-background rounded-lg border p-4">
|
||||
<NoseCanvasDrawer onCapture={handleCapture} />
|
||||
</div>
|
||||
|
||||
{/* Preview captured image */}
|
||||
{capturedImage && (
|
||||
<div className="rounded-lg border p-4">
|
||||
<h2 className="mb-4 font-semibold">Captured Drawing</h2>
|
||||
<img
|
||||
src={capturedImage}
|
||||
alt="Captured nose drawing"
|
||||
className="max-w-full rounded-lg"
|
||||
/>
|
||||
<div className="mt-4">
|
||||
<a
|
||||
href={capturedImage}
|
||||
download="nose-drawing.png"
|
||||
className="text-primary hover:underline"
|
||||
>
|
||||
Download Image
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
267
apps/web/src/components/nose-canvas-drawer.tsx
Normal file
267
apps/web/src/components/nose-canvas-drawer.tsx
Normal file
@ -0,0 +1,267 @@
|
||||
'use client';
|
||||
|
||||
import { useEffect, useRef, useState } from 'react';
|
||||
|
||||
import * as faceLandmarksDetection from '@tensorflow-models/face-landmarks-detection';
|
||||
import * as tf from '@tensorflow/tfjs';
|
||||
import '@tensorflow/tfjs-backend-webgl';
|
||||
import { Play, Square, X } from 'lucide-react';
|
||||
import type { StrokeOptions } from 'perfect-freehand';
|
||||
import { getStroke } from 'perfect-freehand';
|
||||
import Webcam from 'react-webcam';
|
||||
|
||||
import { cn } from '@documenso/ui/lib/utils';
|
||||
import { Button } from '@documenso/ui/primitives/button';
|
||||
import { getSvgPathFromStroke } from '@documenso/ui/primitives/signature-pad/helper';
|
||||
|
||||
export type NoseCanvasDrawerProps = {
|
||||
className?: string;
|
||||
onStart?: () => void;
|
||||
onStop?: () => void;
|
||||
onCapture?: (dataUrl: string) => void;
|
||||
};
|
||||
|
||||
export const NoseCanvasDrawer = ({
|
||||
className,
|
||||
onStart,
|
||||
onStop,
|
||||
onCapture,
|
||||
}: NoseCanvasDrawerProps) => {
|
||||
const $el = useRef<HTMLDivElement>(null);
|
||||
|
||||
const $webcam = useRef<Webcam>(null);
|
||||
const $canvas = useRef<HTMLCanvasElement>(null);
|
||||
|
||||
const $detector = useRef<faceLandmarksDetection.FaceLandmarksDetector | null>(null);
|
||||
const $animationFrameId = useRef<number | null>(null);
|
||||
|
||||
const $previousNosePosition = useRef<{ x: number; y: number } | null>(null);
|
||||
const $lines = useRef<{ x: number; y: number }[]>([]);
|
||||
|
||||
const $scaleFactor = useRef(1);
|
||||
|
||||
const [isPlaying, setIsPlaying] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
|
||||
const onTogglePlayingClick = () => {
|
||||
setIsPlaying((playing) => {
|
||||
if (playing && $animationFrameId.current) {
|
||||
cancelAnimationFrame($animationFrameId.current);
|
||||
|
||||
if ($canvas.current) {
|
||||
const ctx = $canvas.current.getContext('2d');
|
||||
|
||||
if (ctx) {
|
||||
ctx.save();
|
||||
|
||||
onCapture?.($canvas.current.toDataURL('image/png'));
|
||||
}
|
||||
|
||||
$lines.current = [];
|
||||
}
|
||||
}
|
||||
|
||||
return !playing;
|
||||
});
|
||||
};
|
||||
|
||||
const onClearClick = () => {
|
||||
if (isPlaying) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ($canvas.current) {
|
||||
const ctx = $canvas.current.getContext('2d');
|
||||
|
||||
if (ctx) {
|
||||
ctx.clearRect(0, 0, $canvas.current.width, $canvas.current.height);
|
||||
ctx.save();
|
||||
|
||||
onCapture?.($canvas.current.toDataURL('image/png'));
|
||||
}
|
||||
}
|
||||
|
||||
$lines.current = [];
|
||||
};
|
||||
|
||||
const loadModel = async () => {
|
||||
await tf.ready();
|
||||
|
||||
return await faceLandmarksDetection.createDetector(
|
||||
faceLandmarksDetection.SupportedModels.MediaPipeFaceMesh,
|
||||
{
|
||||
runtime: 'mediapipe',
|
||||
solutionPath: 'https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh',
|
||||
refineLandmarks: true,
|
||||
maxFaces: 1,
|
||||
},
|
||||
);
|
||||
};
|
||||
|
||||
const detectAndDraw = async () => {
|
||||
if (!$detector.current || !$canvas.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
const canvas = $canvas.current;
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
if (!ctx) {
|
||||
return;
|
||||
}
|
||||
|
||||
const video = $webcam.current?.video;
|
||||
|
||||
if (!video) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isPlaying) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('about to predict');
|
||||
|
||||
const predictions = await $detector.current.estimateFaces(video, {
|
||||
flipHorizontal: true,
|
||||
staticImageMode: false,
|
||||
});
|
||||
|
||||
console.log({ predictions });
|
||||
|
||||
if (predictions.length > 0) {
|
||||
const keypoints = predictions[0].keypoints;
|
||||
const nose = keypoints[1]; // Nose tip keypoint
|
||||
|
||||
const currentPosition = {
|
||||
x: nose.x * $scaleFactor.current,
|
||||
y: nose.y * $scaleFactor.current,
|
||||
};
|
||||
|
||||
if ($previousNosePosition.current) {
|
||||
$lines.current.push(currentPosition);
|
||||
|
||||
ctx.restore();
|
||||
|
||||
ctx.imageSmoothingEnabled = true;
|
||||
ctx.imageSmoothingQuality = 'high';
|
||||
ctx.fillStyle = 'red';
|
||||
|
||||
const strokeOptions: StrokeOptions = {
|
||||
size: 5,
|
||||
thinning: 0.25,
|
||||
streamline: 0.5,
|
||||
smoothing: 0.5,
|
||||
end: {
|
||||
taper: 5,
|
||||
},
|
||||
};
|
||||
|
||||
const pathData = new Path2D(getSvgPathFromStroke(getStroke($lines.current, strokeOptions)));
|
||||
|
||||
ctx.fill(pathData);
|
||||
|
||||
ctx.save();
|
||||
}
|
||||
|
||||
$previousNosePosition.current = currentPosition;
|
||||
} else {
|
||||
$previousNosePosition.current = null;
|
||||
}
|
||||
|
||||
$animationFrameId.current = requestAnimationFrame(() => void detectAndDraw());
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
setIsLoading(true);
|
||||
|
||||
void loadModel().then((model) => {
|
||||
$detector.current = model;
|
||||
setIsLoading(false);
|
||||
});
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
if (isPlaying) {
|
||||
void detectAndDraw();
|
||||
|
||||
onStart?.();
|
||||
} else {
|
||||
onStop?.();
|
||||
}
|
||||
}, [isPlaying]);
|
||||
|
||||
useEffect(() => {
|
||||
if (!$webcam.current?.video) {
|
||||
return;
|
||||
}
|
||||
|
||||
const observer = new ResizeObserver((_entries) => {
|
||||
if ($webcam.current?.video) {
|
||||
const videoWidth = $webcam.current.video.videoWidth;
|
||||
const videoHeight = $webcam.current.video.videoHeight;
|
||||
|
||||
const { width, height } = $webcam.current.video.getBoundingClientRect();
|
||||
|
||||
$scaleFactor.current = Math.min(width / videoWidth, height / videoHeight);
|
||||
|
||||
setIsPlaying(false);
|
||||
|
||||
if ($animationFrameId.current) {
|
||||
cancelAnimationFrame($animationFrameId.current);
|
||||
}
|
||||
|
||||
onClearClick();
|
||||
|
||||
if ($canvas.current) {
|
||||
console.log('resizing canvas');
|
||||
$canvas.current.width = width;
|
||||
$canvas.current.height = height;
|
||||
|
||||
const ctx = $canvas.current.getContext('2d');
|
||||
|
||||
if (ctx) {
|
||||
ctx.moveTo(0, 0);
|
||||
|
||||
ctx.save();
|
||||
ctx.scale(-1, 1);
|
||||
ctx.drawImage($webcam.current.video, 0, 0, width, height);
|
||||
ctx.restore();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
observer.observe($webcam.current.video);
|
||||
|
||||
return () => {
|
||||
observer.disconnect();
|
||||
};
|
||||
}, []);
|
||||
|
||||
return (
|
||||
<div ref={$el} className={cn('relative inline-block aspect-[4/3] h-full', className)}>
|
||||
<Webcam ref={$webcam} videoConstraints={{ facingMode: 'user' }} className="scale-x-[-1]" />
|
||||
|
||||
<canvas ref={$canvas} className="absolute inset-0 z-10" />
|
||||
|
||||
<div className="absolute bottom-2 right-2 z-20 flex items-center gap-x-2">
|
||||
<Button
|
||||
disabled={isLoading}
|
||||
onClick={onTogglePlayingClick}
|
||||
className="text-primary-foreground/80 h-8 w-8 rounded-full p-0"
|
||||
>
|
||||
{isPlaying ? <Square className="h-4 w-4" /> : <Play className="-mr-0.5 h-4 w-4" />}
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
disabled={isLoading || isPlaying}
|
||||
onClick={onClearClick}
|
||||
className="text-primary-foreground/80 h-8 w-8 rounded-full p-0"
|
||||
>
|
||||
<X className="h-4 w-4" />
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
3
apps/web/src/styles/globals.css
Normal file
3
apps/web/src/styles/globals.css
Normal file
@ -0,0 +1,3 @@
|
||||
.mirror {
|
||||
transform: scaleX(-1);
|
||||
}
|
||||
Reference in New Issue
Block a user