Spaces:
Runtime error
Runtime error
Commit
Β·
e123fec
1
Parent(s):
6525751
working on improvements
Browse files- README.md +9 -1
- app.py +34 -68
- client/src/app.tsx +52 -64
- client/src/components/{PoweredBy.tsx β About.tsx} +5 -5
- client/src/hooks/useFaceLandmarkDetection.tsx +100 -251
- client/src/hooks/useFacePokeAPI.ts +0 -4
- client/src/hooks/useMainStore.ts +395 -17
- client/src/layout.tsx +1 -1
- client/src/lib/convertImageToBase64.ts +3 -3
- client/src/lib/facePoke.ts +49 -185
- client/src/lib/mapRange.ts +4 -0
- client/src/types.ts +92 -0
- engine.py +101 -135
- liveportrait/utils/crop.py +1 -1
- loader.py +11 -2
- public/index.js +497 -474
- requirements.txt +18 -7
README.md
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
---
|
| 2 |
title: FacePoke
|
| 3 |
-
emoji:
|
| 4 |
colorFrom: yellow
|
| 5 |
colorTo: red
|
| 6 |
sdk: docker
|
|
@@ -115,6 +115,14 @@ The project structure is organized as follows:
|
|
| 115 |
- `src/`: TypeScript source files.
|
| 116 |
- `public/`: Static assets and built files.
|
| 117 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
## Contributing
|
| 119 |
|
| 120 |
Contributions to FacePoke are welcome! Please read our [Contributing Guidelines](CONTRIBUTING.md) for details on how to submit pull requests, report issues, or request features.
|
|
|
|
| 1 |
---
|
| 2 |
title: FacePoke
|
| 3 |
+
emoji: πββοΈπ
|
| 4 |
colorFrom: yellow
|
| 5 |
colorTo: red
|
| 6 |
sdk: docker
|
|
|
|
| 115 |
- `src/`: TypeScript source files.
|
| 116 |
- `public/`: Static assets and built files.
|
| 117 |
|
| 118 |
+
### Increasing the framerate
|
| 119 |
+
|
| 120 |
+
I am testing various things to increase the framerate.
|
| 121 |
+
|
| 122 |
+
One project is to only transmit the modified head, instead of the whole image.
|
| 123 |
+
|
| 124 |
+
Another one is to automatically adapt to the server and network speed.
|
| 125 |
+
|
| 126 |
## Contributing
|
| 127 |
|
| 128 |
Contributions to FacePoke are welcome! Please read our [Contributing Guidelines](CONTRIBUTING.md) for details on how to submit pull requests, report issues, or request features.
|
app.py
CHANGED
|
@@ -9,6 +9,8 @@ import sys
|
|
| 9 |
import asyncio
|
| 10 |
from aiohttp import web, WSMsgType
|
| 11 |
import json
|
|
|
|
|
|
|
| 12 |
import uuid
|
| 13 |
import logging
|
| 14 |
import os
|
|
@@ -18,16 +20,18 @@ import base64
|
|
| 18 |
import io
|
| 19 |
|
| 20 |
from PIL import Image
|
|
|
|
|
|
|
| 21 |
import pillow_avif
|
| 22 |
|
| 23 |
# Configure logging
|
| 24 |
-
logging.basicConfig(level=logging.
|
| 25 |
logger = logging.getLogger(__name__)
|
| 26 |
|
| 27 |
# Set asyncio logger to DEBUG level
|
| 28 |
-
logging.getLogger("asyncio").setLevel(logging.
|
| 29 |
|
| 30 |
-
logger.debug(f"Python version: {sys.version}")
|
| 31 |
|
| 32 |
# SIGSEGV handler
|
| 33 |
def SIGSEGV_signal_arises(signalNum, stack):
|
|
@@ -43,89 +47,51 @@ from engine import Engine, base64_data_uri_to_PIL_Image
|
|
| 43 |
DATA_ROOT = os.environ.get('DATA_ROOT', '/tmp/data')
|
| 44 |
MODELS_DIR = os.path.join(DATA_ROOT, "models")
|
| 45 |
|
| 46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
async def websocket_handler(request: web.Request) -> web.WebSocketResponse:
|
| 49 |
-
"""
|
| 50 |
-
Handle WebSocket connections for the FacePoke application.
|
| 51 |
-
|
| 52 |
-
Args:
|
| 53 |
-
request (web.Request): The incoming request object.
|
| 54 |
-
|
| 55 |
-
Returns:
|
| 56 |
-
web.WebSocketResponse: The WebSocket response object.
|
| 57 |
-
"""
|
| 58 |
ws = web.WebSocketResponse()
|
| 59 |
await ws.prepare(request)
|
|
|
|
| 60 |
try:
|
| 61 |
#logger.info("New WebSocket connection established")
|
| 62 |
-
|
| 63 |
while True:
|
| 64 |
msg = await ws.receive()
|
| 65 |
|
| 66 |
-
if msg.type
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
# let's not log user requests, they are heavy
|
| 70 |
-
#logger.debug(f"Received message: {data}")
|
| 71 |
-
|
| 72 |
-
if data['type'] == 'modify_image':
|
| 73 |
-
uuid = data.get('uuid')
|
| 74 |
-
if not uuid:
|
| 75 |
-
logger.warning("Received message without UUID")
|
| 76 |
|
| 77 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 79 |
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
|
|
|
| 83 |
|
| 84 |
except Exception as e:
|
| 85 |
logger.error(f"Error in websocket_handler: {str(e)}")
|
| 86 |
logger.exception("Full traceback:")
|
| 87 |
return ws
|
| 88 |
|
| 89 |
-
async def handle_modify_image(request: web.Request, ws: web.WebSocketResponse, msg: Dict[str, Any], uuid: str):
|
| 90 |
-
"""
|
| 91 |
-
Handle the 'modify_image' request.
|
| 92 |
-
|
| 93 |
-
Args:
|
| 94 |
-
request (web.Request): The incoming request object.
|
| 95 |
-
ws (web.WebSocketResponse): The WebSocket response object.
|
| 96 |
-
msg (Dict[str, Any]): The message containing the image or image_hash and modification parameters.
|
| 97 |
-
uuid: A unique identifier for the request.
|
| 98 |
-
"""
|
| 99 |
-
#logger.info("Received modify_image request")
|
| 100 |
-
try:
|
| 101 |
-
engine = request.app['engine']
|
| 102 |
-
image_hash = msg.get('image_hash')
|
| 103 |
-
|
| 104 |
-
if image_hash:
|
| 105 |
-
image_or_hash = image_hash
|
| 106 |
-
else:
|
| 107 |
-
image_data = msg['image']
|
| 108 |
-
image_or_hash = image_data
|
| 109 |
-
|
| 110 |
-
modified_image_base64 = await engine.modify_image(image_or_hash, msg['params'])
|
| 111 |
-
|
| 112 |
-
await ws.send_json({
|
| 113 |
-
"type": "modified_image",
|
| 114 |
-
"image": modified_image_base64,
|
| 115 |
-
"image_hash": engine.get_image_hash(image_or_hash),
|
| 116 |
-
"success": True,
|
| 117 |
-
"uuid": uuid # Include the UUID in the response
|
| 118 |
-
})
|
| 119 |
-
#logger.info("Successfully sent modified image")
|
| 120 |
-
except Exception as e:
|
| 121 |
-
#logger.error(f"Error in modify_image: {str(e)}")
|
| 122 |
-
await ws.send_json({
|
| 123 |
-
"type": "modified_image",
|
| 124 |
-
"success": False,
|
| 125 |
-
"error": str(e),
|
| 126 |
-
"uuid": uuid # Include the UUID even in error responses
|
| 127 |
-
})
|
| 128 |
-
|
| 129 |
async def index(request: web.Request) -> web.Response:
|
| 130 |
"""Serve the index.html file"""
|
| 131 |
content = open(os.path.join(os.path.dirname(__file__), "public", "index.html"), "r").read()
|
|
|
|
| 9 |
import asyncio
|
| 10 |
from aiohttp import web, WSMsgType
|
| 11 |
import json
|
| 12 |
+
from json import JSONEncoder
|
| 13 |
+
import numpy as np
|
| 14 |
import uuid
|
| 15 |
import logging
|
| 16 |
import os
|
|
|
|
| 20 |
import io
|
| 21 |
|
| 22 |
from PIL import Image
|
| 23 |
+
|
| 24 |
+
# by popular demand, let's add support for avif
|
| 25 |
import pillow_avif
|
| 26 |
|
| 27 |
# Configure logging
|
| 28 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
| 29 |
logger = logging.getLogger(__name__)
|
| 30 |
|
| 31 |
# Set asyncio logger to DEBUG level
|
| 32 |
+
#logging.getLogger("asyncio").setLevel(logging.INFO)
|
| 33 |
|
| 34 |
+
#logger.debug(f"Python version: {sys.version}")
|
| 35 |
|
| 36 |
# SIGSEGV handler
|
| 37 |
def SIGSEGV_signal_arises(signalNum, stack):
|
|
|
|
| 47 |
DATA_ROOT = os.environ.get('DATA_ROOT', '/tmp/data')
|
| 48 |
MODELS_DIR = os.path.join(DATA_ROOT, "models")
|
| 49 |
|
| 50 |
+
class NumpyEncoder(json.JSONEncoder):
|
| 51 |
+
def default(self, obj):
|
| 52 |
+
if isinstance(obj, np.integer):
|
| 53 |
+
return int(obj)
|
| 54 |
+
elif isinstance(obj, np.floating):
|
| 55 |
+
return float(obj)
|
| 56 |
+
elif isinstance(obj, np.ndarray):
|
| 57 |
+
return obj.tolist()
|
| 58 |
+
else:
|
| 59 |
+
return super(NumpyEncoder, self).default(obj)
|
| 60 |
|
| 61 |
async def websocket_handler(request: web.Request) -> web.WebSocketResponse:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
ws = web.WebSocketResponse()
|
| 63 |
await ws.prepare(request)
|
| 64 |
+
engine = request.app['engine']
|
| 65 |
try:
|
| 66 |
#logger.info("New WebSocket connection established")
|
|
|
|
| 67 |
while True:
|
| 68 |
msg = await ws.receive()
|
| 69 |
|
| 70 |
+
if msg.type in (WSMsgType.CLOSE, WSMsgType.ERROR):
|
| 71 |
+
#logger.warning(f"WebSocket connection closed: {msg.type}")
|
| 72 |
+
break
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
|
| 74 |
+
try:
|
| 75 |
+
if msg.type == WSMsgType.BINARY:
|
| 76 |
+
res = await engine.load_image(msg.data)
|
| 77 |
+
json_res = json.dumps(res, cls=NumpyEncoder)
|
| 78 |
+
await ws.send_str(json_res)
|
| 79 |
|
| 80 |
+
elif msg.type == WSMsgType.TEXT:
|
| 81 |
+
data = json.loads(msg.data)
|
| 82 |
+
webp_bytes = engine.transform_image(data.get('hash'), data.get('params'))
|
| 83 |
+
await ws.send_bytes(webp_bytes)
|
| 84 |
|
| 85 |
+
except Exception as e:
|
| 86 |
+
logger.error(f"Error in engine: {str(e)}")
|
| 87 |
+
logger.exception("Full traceback:")
|
| 88 |
+
await ws.send_json({"error": str(e)})
|
| 89 |
|
| 90 |
except Exception as e:
|
| 91 |
logger.error(f"Error in websocket_handler: {str(e)}")
|
| 92 |
logger.exception("Full traceback:")
|
| 93 |
return ws
|
| 94 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 95 |
async def index(request: web.Request) -> web.Response:
|
| 96 |
"""Serve the index.html file"""
|
| 97 |
content = open(os.path.join(os.path.dirname(__file__), "public", "index.html"), "r").read()
|
client/src/app.tsx
CHANGED
|
@@ -4,50 +4,41 @@ import { Download } from 'lucide-react';
|
|
| 4 |
import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';
|
| 5 |
import { truncateFileName } from './lib/utils';
|
| 6 |
import { useFaceLandmarkDetection } from './hooks/useFaceLandmarkDetection';
|
| 7 |
-
import {
|
| 8 |
import { Spinner } from './components/Spinner';
|
| 9 |
import { useFacePokeAPI } from './hooks/useFacePokeAPI';
|
| 10 |
import { Layout } from './layout';
|
| 11 |
import { useMainStore } from './hooks/useMainStore';
|
| 12 |
-
import { convertImageToBase64 } from './lib/convertImageToBase64';
|
| 13 |
|
| 14 |
export function App() {
|
| 15 |
const error = useMainStore(s => s.error);
|
| 16 |
const setError = useMainStore(s => s.setError);
|
| 17 |
const imageFile = useMainStore(s => s.imageFile);
|
| 18 |
const setImageFile = useMainStore(s => s.setImageFile);
|
| 19 |
-
const
|
| 20 |
-
const
|
|
|
|
|
|
|
|
|
|
| 21 |
const previewImage = useMainStore(s => s.previewImage);
|
| 22 |
-
const
|
| 23 |
-
const
|
| 24 |
-
const setOriginalImageHash = useMainStore(s => s.setOriginalImageHash);
|
| 25 |
|
| 26 |
const {
|
| 27 |
-
status,
|
| 28 |
-
setStatus,
|
| 29 |
isDebugMode,
|
| 30 |
setIsDebugMode,
|
| 31 |
interruptMessage,
|
| 32 |
} = useFacePokeAPI()
|
| 33 |
|
| 34 |
-
// State for face detection
|
| 35 |
const {
|
| 36 |
-
canvasRef,
|
| 37 |
canvasRefCallback,
|
| 38 |
-
mediaPipeRef,
|
| 39 |
-
faceLandmarks,
|
| 40 |
isMediaPipeReady,
|
| 41 |
-
blendShapes,
|
| 42 |
-
|
| 43 |
-
setFaceLandmarks,
|
| 44 |
-
setBlendShapes,
|
| 45 |
-
|
| 46 |
handleMouseDown,
|
| 47 |
handleMouseUp,
|
| 48 |
handleMouseMove,
|
| 49 |
-
|
| 50 |
-
|
|
|
|
| 51 |
currentOpacity
|
| 52 |
} = useFaceLandmarkDetection()
|
| 53 |
|
|
@@ -55,37 +46,10 @@ export function App() {
|
|
| 55 |
const videoRef = useRef<HTMLDivElement>(null);
|
| 56 |
|
| 57 |
// Handle file change
|
| 58 |
-
const handleFileChange = useCallback(
|
| 59 |
const files = event.target.files;
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
setStatus(`File selected: ${truncateFileName(files[0].name, 16)}`);
|
| 63 |
-
|
| 64 |
-
try {
|
| 65 |
-
const image = await convertImageToBase64(files[0]);
|
| 66 |
-
setPreviewImage(image);
|
| 67 |
-
setOriginalImage(image);
|
| 68 |
-
setOriginalImageHash('');
|
| 69 |
-
} catch (err) {
|
| 70 |
-
console.log(`failed to convert the image: `, err);
|
| 71 |
-
setImageFile(null);
|
| 72 |
-
setStatus('');
|
| 73 |
-
setPreviewImage('');
|
| 74 |
-
setOriginalImage('');
|
| 75 |
-
setOriginalImageHash('');
|
| 76 |
-
setFaceLandmarks([]);
|
| 77 |
-
setBlendShapes([]);
|
| 78 |
-
}
|
| 79 |
-
} else {
|
| 80 |
-
setImageFile(null);
|
| 81 |
-
setStatus('');
|
| 82 |
-
setPreviewImage('');
|
| 83 |
-
setOriginalImage('');
|
| 84 |
-
setOriginalImageHash('');
|
| 85 |
-
setFaceLandmarks([]);
|
| 86 |
-
setBlendShapes([]);
|
| 87 |
-
}
|
| 88 |
-
}, [isMediaPipeReady, setImageFile, setPreviewImage, setOriginalImage, setOriginalImageHash, setFaceLandmarks, setBlendShapes, setStatus]);
|
| 89 |
|
| 90 |
const handleDownload = useCallback(() => {
|
| 91 |
if (previewImage) {
|
|
@@ -139,7 +103,7 @@ export function App() {
|
|
| 139 |
<div className="mb-5 relative">
|
| 140 |
<div className="flex flex-row items-center justify-between w-full">
|
| 141 |
<div className="flex items-center space-x-2">
|
| 142 |
-
<div className="
|
| 143 |
<input
|
| 144 |
id="imageInput"
|
| 145 |
type="file"
|
|
@@ -155,7 +119,7 @@ export function App() {
|
|
| 155 |
} focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-slate-500 shadow-xl`}
|
| 156 |
>
|
| 157 |
<Spinner />
|
| 158 |
-
{imageFile ? truncateFileName(imageFile.name, 32) : (isMediaPipeReady ? 'Choose a portrait photo
|
| 159 |
</label>
|
| 160 |
</div>
|
| 161 |
{previewImage && (
|
|
@@ -168,15 +132,38 @@ export function App() {
|
|
| 168 |
</button>
|
| 169 |
)}
|
| 170 |
</div>
|
| 171 |
-
{previewImage && <
|
| 172 |
-
|
| 173 |
-
|
| 174 |
-
|
| 175 |
-
|
| 176 |
-
|
| 177 |
-
|
| 178 |
-
|
| 179 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 180 |
</div>
|
| 181 |
{previewImage && (
|
| 182 |
<div className="mt-5 relative shadow-2xl rounded-xl overflow-hidden">
|
|
@@ -188,11 +175,12 @@ export function App() {
|
|
| 188 |
<canvas
|
| 189 |
ref={canvasRefCallback}
|
| 190 |
className="absolute top-0 left-0 w-full h-full select-none"
|
| 191 |
-
onMouseEnter={handleMouseEnter}
|
| 192 |
-
onMouseLeave={handleMouseLeave}
|
| 193 |
onMouseDown={handleMouseDown}
|
| 194 |
onMouseUp={handleMouseUp}
|
| 195 |
onMouseMove={handleMouseMove}
|
|
|
|
|
|
|
|
|
|
| 196 |
style={{
|
| 197 |
position: 'absolute',
|
| 198 |
top: 0,
|
|
@@ -207,7 +195,7 @@ export function App() {
|
|
| 207 |
)}
|
| 208 |
{canDisplayBlendShapes && displayBlendShapes}
|
| 209 |
</div>
|
| 210 |
-
<
|
| 211 |
</Layout>
|
| 212 |
);
|
| 213 |
}
|
|
|
|
| 4 |
import { Alert, AlertDescription, AlertTitle } from '@/components/ui/alert';
|
| 5 |
import { truncateFileName } from './lib/utils';
|
| 6 |
import { useFaceLandmarkDetection } from './hooks/useFaceLandmarkDetection';
|
| 7 |
+
import { About } from './components/About';
|
| 8 |
import { Spinner } from './components/Spinner';
|
| 9 |
import { useFacePokeAPI } from './hooks/useFacePokeAPI';
|
| 10 |
import { Layout } from './layout';
|
| 11 |
import { useMainStore } from './hooks/useMainStore';
|
|
|
|
| 12 |
|
| 13 |
export function App() {
|
| 14 |
const error = useMainStore(s => s.error);
|
| 15 |
const setError = useMainStore(s => s.setError);
|
| 16 |
const imageFile = useMainStore(s => s.imageFile);
|
| 17 |
const setImageFile = useMainStore(s => s.setImageFile);
|
| 18 |
+
const isGazingAtCursor = useMainStore(s => s.isGazingAtCursor);
|
| 19 |
+
const setIsGazingAtCursor = useMainStore(s => s.setIsGazingAtCursor);
|
| 20 |
+
const isFollowingCursor = useMainStore(s => s.isFollowingCursor);
|
| 21 |
+
const setIsFollowingCursor = useMainStore(s => s.setIsFollowingCursor);
|
| 22 |
+
|
| 23 |
const previewImage = useMainStore(s => s.previewImage);
|
| 24 |
+
const status = useMainStore(s => s.status);
|
| 25 |
+
const blendShapes = useMainStore(s => s.blendShapes);
|
|
|
|
| 26 |
|
| 27 |
const {
|
|
|
|
|
|
|
| 28 |
isDebugMode,
|
| 29 |
setIsDebugMode,
|
| 30 |
interruptMessage,
|
| 31 |
} = useFacePokeAPI()
|
| 32 |
|
|
|
|
| 33 |
const {
|
|
|
|
| 34 |
canvasRefCallback,
|
|
|
|
|
|
|
| 35 |
isMediaPipeReady,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
handleMouseDown,
|
| 37 |
handleMouseUp,
|
| 38 |
handleMouseMove,
|
| 39 |
+
handleTouchStart,
|
| 40 |
+
handleTouchMove,
|
| 41 |
+
handleTouchEnd,
|
| 42 |
currentOpacity
|
| 43 |
} = useFaceLandmarkDetection()
|
| 44 |
|
|
|
|
| 46 |
const videoRef = useRef<HTMLDivElement>(null);
|
| 47 |
|
| 48 |
// Handle file change
|
| 49 |
+
const handleFileChange = useCallback((event: React.ChangeEvent<HTMLInputElement>) => {
|
| 50 |
const files = event.target.files;
|
| 51 |
+
setImageFile(files?.[0] || undefined)
|
| 52 |
+
}, [setImageFile]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 53 |
|
| 54 |
const handleDownload = useCallback(() => {
|
| 55 |
if (previewImage) {
|
|
|
|
| 103 |
<div className="mb-5 relative">
|
| 104 |
<div className="flex flex-row items-center justify-between w-full">
|
| 105 |
<div className="flex items-center space-x-2">
|
| 106 |
+
<div className="flex items-center justify-center">
|
| 107 |
<input
|
| 108 |
id="imageInput"
|
| 109 |
type="file"
|
|
|
|
| 119 |
} focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-slate-500 shadow-xl`}
|
| 120 |
>
|
| 121 |
<Spinner />
|
| 122 |
+
{imageFile ? truncateFileName(imageFile.name, 32) : (isMediaPipeReady ? 'Choose a portrait photo' : 'Initializing...')}
|
| 123 |
</label>
|
| 124 |
</div>
|
| 125 |
{previewImage && (
|
|
|
|
| 132 |
</button>
|
| 133 |
)}
|
| 134 |
</div>
|
| 135 |
+
{previewImage && <div className="flex items-center space-x-2">
|
| 136 |
+
{/* experimental features, not active yet */}
|
| 137 |
+
{/*
|
| 138 |
+
<label className="mt-4 flex items-center">
|
| 139 |
+
<input
|
| 140 |
+
type="checkbox"
|
| 141 |
+
checked={isGazingAtCursor}
|
| 142 |
+
onChange={(e) => setIsGazingAtCursor(!isGazingAtCursor)}
|
| 143 |
+
className="mr-2"
|
| 144 |
+
/>
|
| 145 |
+
Autotrack eyes
|
| 146 |
+
</label>
|
| 147 |
+
<label className="mt-4 flex items-center">
|
| 148 |
+
<input
|
| 149 |
+
type="checkbox"
|
| 150 |
+
checked={isFollowingCursor}
|
| 151 |
+
onChange={(e) => setIsFollowingCursor(!isFollowingCursor)}
|
| 152 |
+
className="mr-2"
|
| 153 |
+
/>
|
| 154 |
+
Autotrack head
|
| 155 |
+
</label>
|
| 156 |
+
*/}
|
| 157 |
+
<label className="mt-4 flex items-center">
|
| 158 |
+
<input
|
| 159 |
+
type="checkbox"
|
| 160 |
+
checked={isDebugMode}
|
| 161 |
+
onChange={(e) => setIsDebugMode(e.target.checked)}
|
| 162 |
+
className="mr-2"
|
| 163 |
+
/>
|
| 164 |
+
Show face markers
|
| 165 |
+
</label>
|
| 166 |
+
</div>}
|
| 167 |
</div>
|
| 168 |
{previewImage && (
|
| 169 |
<div className="mt-5 relative shadow-2xl rounded-xl overflow-hidden">
|
|
|
|
| 175 |
<canvas
|
| 176 |
ref={canvasRefCallback}
|
| 177 |
className="absolute top-0 left-0 w-full h-full select-none"
|
|
|
|
|
|
|
| 178 |
onMouseDown={handleMouseDown}
|
| 179 |
onMouseUp={handleMouseUp}
|
| 180 |
onMouseMove={handleMouseMove}
|
| 181 |
+
onTouchStart={handleTouchStart}
|
| 182 |
+
onTouchMove={handleTouchMove}
|
| 183 |
+
onTouchEnd={handleTouchEnd}
|
| 184 |
style={{
|
| 185 |
position: 'absolute',
|
| 186 |
top: 0,
|
|
|
|
| 195 |
)}
|
| 196 |
{canDisplayBlendShapes && displayBlendShapes}
|
| 197 |
</div>
|
| 198 |
+
<About />
|
| 199 |
</Layout>
|
| 200 |
);
|
| 201 |
}
|
client/src/components/{PoweredBy.tsx β About.tsx}
RENAMED
|
@@ -1,11 +1,11 @@
|
|
| 1 |
-
export function
|
| 2 |
return (
|
| 3 |
<div className="flex flex-row items-center justify-center font-sans mt-4 w-full">
|
| 4 |
-
|
| 5 |
style={{ textShadow: "rgb(255 255 255 / 80%) 0px 0px 2px" }}>
|
| 6 |
-
|
| 7 |
-
</span
|
| 8 |
-
<span className="mr-1">
|
| 9 |
<img src="/hf-logo.svg" alt="Hugging Face" className="w-5 h-5" />
|
| 10 |
</span>
|
| 11 |
<span className="text-neutral-900 text-sm font-semibold"
|
|
|
|
| 1 |
+
export function About() {
|
| 2 |
return (
|
| 3 |
<div className="flex flex-row items-center justify-center font-sans mt-4 w-full">
|
| 4 |
+
<span className="text-neutral-900 text-sm"
|
| 5 |
style={{ textShadow: "rgb(255 255 255 / 80%) 0px 0px 2px" }}>
|
| 6 |
+
Click and drag on the image.
|
| 7 |
+
</span>
|
| 8 |
+
<span className="ml-2 mr-1">
|
| 9 |
<img src="/hf-logo.svg" alt="Hugging Face" className="w-5 h-5" />
|
| 10 |
</span>
|
| 11 |
<span className="text-neutral-900 text-sm font-semibold"
|
client/src/hooks/useFaceLandmarkDetection.tsx
CHANGED
|
@@ -6,59 +6,36 @@ import { useMainStore } from './useMainStore';
|
|
| 6 |
import useThrottledCallback from 'beautiful-react-hooks/useThrottledCallback';
|
| 7 |
|
| 8 |
import { landmarkGroups, FACEMESH_LIPS, FACEMESH_LEFT_EYE, FACEMESH_LEFT_EYEBROW, FACEMESH_RIGHT_EYE, FACEMESH_RIGHT_EYEBROW, FACEMESH_FACE_OVAL } from './landmarks';
|
| 9 |
-
|
| 10 |
-
// New types for improved type safety
|
| 11 |
-
export type LandmarkGroup = 'lips' | 'leftEye' | 'leftEyebrow' | 'rightEye' | 'rightEyebrow' | 'faceOval' | 'background';
|
| 12 |
-
export type LandmarkCenter = { x: number; y: number; z: number };
|
| 13 |
-
export type ClosestLandmark = { group: LandmarkGroup; distance: number; vector: { x: number; y: number; z: number } };
|
| 14 |
-
|
| 15 |
-
export type MediaPipeResources = {
|
| 16 |
-
faceLandmarker: vision.FaceLandmarker | null;
|
| 17 |
-
drawingUtils: vision.DrawingUtils | null;
|
| 18 |
-
};
|
| 19 |
|
| 20 |
export function useFaceLandmarkDetection() {
|
| 21 |
-
const error = useMainStore(s => s.error);
|
| 22 |
const setError = useMainStore(s => s.setError);
|
| 23 |
-
const imageFile = useMainStore(s => s.imageFile);
|
| 24 |
-
const setImageFile = useMainStore(s => s.setImageFile);
|
| 25 |
-
const originalImage = useMainStore(s => s.originalImage);
|
| 26 |
-
const originalImageHash = useMainStore(s => s.originalImageHash);
|
| 27 |
-
const setOriginalImageHash = useMainStore(s => s.setOriginalImageHash);
|
| 28 |
const previewImage = useMainStore(s => s.previewImage);
|
| 29 |
-
const
|
| 30 |
-
const
|
| 31 |
|
| 32 |
-
;(window as any).debugJuju = useMainStore;
|
| 33 |
////////////////////////////////////////////////////////////////////////
|
| 34 |
-
//
|
| 35 |
-
//
|
| 36 |
-
const
|
|
|
|
| 37 |
////////////////////////////////////////////////////////////////////////
|
| 38 |
|
| 39 |
// State for face detection
|
| 40 |
-
const [faceLandmarks, setFaceLandmarks] = useState<vision.NormalizedLandmark[][]>([]);
|
| 41 |
const [isMediaPipeReady, setIsMediaPipeReady] = useState(false);
|
| 42 |
const [isDrawingUtilsReady, setIsDrawingUtilsReady] = useState(false);
|
| 43 |
-
const [blendShapes, setBlendShapes] = useState<vision.Classifications[]>([]);
|
| 44 |
|
| 45 |
// State for mouse interaction
|
| 46 |
const [dragStart, setDragStart] = useState<{ x: number; y: number } | null>(null);
|
| 47 |
-
const [dragEnd, setDragEnd] = useState<{ x: number; y: number } | null>(null);
|
| 48 |
|
| 49 |
const [isDragging, setIsDragging] = useState(false);
|
| 50 |
-
const [isWaitingForResponse, setIsWaitingForResponse] = useState(false);
|
| 51 |
const dragStartRef = useRef<{ x: number; y: number } | null>(null);
|
| 52 |
-
const currentMousePosRef = useRef<{ x: number; y: number } | null>(null);
|
| 53 |
-
const lastModifiedImageHashRef = useRef<string | null>(null);
|
| 54 |
|
| 55 |
const [currentLandmark, setCurrentLandmark] = useState<ClosestLandmark | null>(null);
|
| 56 |
const [previousLandmark, setPreviousLandmark] = useState<ClosestLandmark | null>(null);
|
| 57 |
const [currentOpacity, setCurrentOpacity] = useState(0);
|
| 58 |
const [previousOpacity, setPreviousOpacity] = useState(0);
|
| 59 |
|
| 60 |
-
const [isHovering, setIsHovering] = useState(false);
|
| 61 |
-
|
| 62 |
// Refs
|
| 63 |
const canvasRef = useRef<HTMLCanvasElement>(null);
|
| 64 |
const mediaPipeRef = useRef<MediaPipeResources>({
|
|
@@ -222,6 +199,9 @@ export function useFaceLandmarkDetection() {
|
|
| 222 |
|
| 223 |
// Detect face landmarks
|
| 224 |
const detectFaceLandmarks = useCallback(async (imageDataUrl: string) => {
|
|
|
|
|
|
|
|
|
|
| 225 |
// console.log('Attempting to detect face landmarks...');
|
| 226 |
if (!isMediaPipeReady) {
|
| 227 |
console.log('MediaPipe not ready. Skipping detection.');
|
|
@@ -247,6 +227,7 @@ export function useFaceLandmarkDetection() {
|
|
| 247 |
setFaceLandmarks(faceLandmarkerResult.faceLandmarks);
|
| 248 |
setBlendShapes(faceLandmarkerResult.faceBlendshapes || []);
|
| 249 |
|
|
|
|
| 250 |
if (faceLandmarkerResult.faceLandmarks && faceLandmarkerResult.faceLandmarks[0]) {
|
| 251 |
computeLandmarkCenters(faceLandmarkerResult.faceLandmarks[0]);
|
| 252 |
}
|
|
@@ -352,279 +333,147 @@ export function useFaceLandmarkDetection() {
|
|
| 352 |
detectFaceLandmarks(previewImage);
|
| 353 |
}, [isMediaPipeReady, isDrawingUtilsReady, previewImage])
|
| 354 |
|
| 355 |
-
|
| 356 |
-
|
| 357 |
-
const modifyImage = useCallback(({ landmark, vector }: {
|
| 358 |
-
landmark: ClosestLandmark
|
| 359 |
-
vector: { x: number; y: number; z: number }
|
| 360 |
-
}) => {
|
| 361 |
-
|
| 362 |
-
const {
|
| 363 |
-
originalImage,
|
| 364 |
-
originalImageHash,
|
| 365 |
-
params: previousParams,
|
| 366 |
-
setParams,
|
| 367 |
-
setError
|
| 368 |
-
} = useMainStore.getState()
|
| 369 |
-
|
| 370 |
-
|
| 371 |
-
if (!originalImage) {
|
| 372 |
-
console.error('Image file or facePoke not available');
|
| 373 |
-
return;
|
| 374 |
-
}
|
| 375 |
-
|
| 376 |
-
const params = {
|
| 377 |
-
...previousParams
|
| 378 |
-
}
|
| 379 |
-
|
| 380 |
-
const minX = -0.50;
|
| 381 |
-
const maxX = 0.50;
|
| 382 |
-
const minY = -0.50;
|
| 383 |
-
const maxY = 0.50;
|
| 384 |
-
|
| 385 |
-
// Function to map a value from one range to another
|
| 386 |
-
const mapRange = (value: number, inMin: number, inMax: number, outMin: number, outMax: number): number => {
|
| 387 |
-
return Math.min(outMax, Math.max(outMin, ((value - inMin) * (outMax - outMin)) / (inMax - inMin) + outMin));
|
| 388 |
-
};
|
| 389 |
-
|
| 390 |
-
console.log("modifyImage:", {
|
| 391 |
-
originalImage,
|
| 392 |
-
originalImageHash,
|
| 393 |
-
landmark,
|
| 394 |
-
vector,
|
| 395 |
-
minX,
|
| 396 |
-
maxX,
|
| 397 |
-
minY,
|
| 398 |
-
maxY,
|
| 399 |
-
})
|
| 400 |
-
|
| 401 |
-
// Map landmarks to ImageModificationParams
|
| 402 |
-
switch (landmark.group) {
|
| 403 |
-
case 'leftEye':
|
| 404 |
-
case 'rightEye':
|
| 405 |
-
// eyebrow (min: -20, max: 5, default: 0)
|
| 406 |
-
const eyesMin = -20
|
| 407 |
-
const eyesMax = 5
|
| 408 |
-
params.eyes = mapRange(-vector.y, minX, maxX, eyesMin, eyesMax);
|
| 409 |
-
|
| 410 |
-
break;
|
| 411 |
-
case 'leftEyebrow':
|
| 412 |
-
case 'rightEyebrow':
|
| 413 |
-
// moving the mouse vertically for the eyebrow
|
| 414 |
-
// should make them up/down
|
| 415 |
-
// eyebrow (min: -10, max: 15, default: 0)
|
| 416 |
-
const eyebrowMin = -10
|
| 417 |
-
const eyebrowMax = 15
|
| 418 |
-
params.eyebrow = mapRange(-vector.y, minY, maxY, eyebrowMin, eyebrowMax);
|
| 419 |
-
|
| 420 |
-
break;
|
| 421 |
-
case 'lips':
|
| 422 |
-
// aaa (min: -30, max: 120, default: 0)
|
| 423 |
-
//const aaaMin = -30
|
| 424 |
-
//const aaaMax = 120
|
| 425 |
-
//params.aaa = mapRange(vector.x, minY, maxY, aaaMin, aaaMax);
|
| 426 |
-
|
| 427 |
-
// eee (min: -20, max: 15, default: 0)
|
| 428 |
-
const eeeMin = -20
|
| 429 |
-
const eeeMax = 15
|
| 430 |
-
params.eee = mapRange(-vector.y, minY, maxY, eeeMin, eeeMax);
|
| 431 |
-
|
| 432 |
-
|
| 433 |
-
// woo (min: -20, max: 15, default: 0)
|
| 434 |
-
const wooMin = -20
|
| 435 |
-
const wooMax = 15
|
| 436 |
-
params.woo = mapRange(-vector.x, minX, maxX, wooMin, wooMax);
|
| 437 |
-
|
| 438 |
-
break;
|
| 439 |
-
case 'faceOval':
|
| 440 |
-
// displacing the face horizontally by moving the mouse on the X axis
|
| 441 |
-
// should perform a yaw rotation
|
| 442 |
-
// rotate_roll (min: -20, max: 20, default: 0)
|
| 443 |
-
const rollMin = -40
|
| 444 |
-
const rollMax = 40
|
| 445 |
-
|
| 446 |
-
// note: we invert the axis here
|
| 447 |
-
params.rotate_roll = mapRange(vector.x, minX, maxX, rollMin, rollMax);
|
| 448 |
-
break;
|
| 449 |
-
|
| 450 |
-
case 'background':
|
| 451 |
-
// displacing the face horizontally by moving the mouse on the X axis
|
| 452 |
-
// should perform a yaw rotation
|
| 453 |
-
// rotate_yaw (min: -20, max: 20, default: 0)
|
| 454 |
-
const yawMin = -40
|
| 455 |
-
const yawMax = 40
|
| 456 |
-
|
| 457 |
-
// note: we invert the axis here
|
| 458 |
-
params.rotate_yaw = mapRange(-vector.x, minX, maxX, yawMin, yawMax);
|
| 459 |
-
|
| 460 |
-
// displacing the face vertically by moving the mouse on the Y axis
|
| 461 |
-
// should perform a pitch rotation
|
| 462 |
-
// rotate_pitch (min: -20, max: 20, default: 0)
|
| 463 |
-
const pitchMin = -40
|
| 464 |
-
const pitchMax = 40
|
| 465 |
-
params.rotate_pitch = mapRange(vector.y, minY, maxY, pitchMin, pitchMax);
|
| 466 |
-
break;
|
| 467 |
-
default:
|
| 468 |
-
return
|
| 469 |
-
}
|
| 470 |
-
|
| 471 |
-
for (const [key, value] of Object.entries(params)) {
|
| 472 |
-
if (isNaN(value as any) || !isFinite(value as any)) {
|
| 473 |
-
console.log(`${key} is NaN, aborting`)
|
| 474 |
-
return
|
| 475 |
-
}
|
| 476 |
-
}
|
| 477 |
-
console.log(`PITCH=${params.rotate_pitch || 0}, YAW=${params.rotate_yaw || 0}, ROLL=${params.rotate_roll || 0}`);
|
| 478 |
-
|
| 479 |
-
setParams(params)
|
| 480 |
-
try {
|
| 481 |
-
// For the first request or when the image file changes, send the full image
|
| 482 |
-
if (!lastModifiedImageHashRef.current || lastModifiedImageHashRef.current !== originalImageHash) {
|
| 483 |
-
lastModifiedImageHashRef.current = originalImageHash;
|
| 484 |
-
facePoke.modifyImage(originalImage, null, params);
|
| 485 |
-
} else {
|
| 486 |
-
// For subsequent requests, send only the hash
|
| 487 |
-
facePoke.modifyImage(null, lastModifiedImageHashRef.current, params);
|
| 488 |
-
}
|
| 489 |
-
} catch (error) {
|
| 490 |
-
// console.error('Error modifying image:', error);
|
| 491 |
-
setError('Failed to modify image');
|
| 492 |
-
}
|
| 493 |
-
}, []);
|
| 494 |
-
|
| 495 |
-
// this is throttled by our average latency
|
| 496 |
const modifyImageWithRateLimit = useThrottledCallback((params: {
|
| 497 |
landmark: ClosestLandmark
|
| 498 |
vector: { x: number; y: number; z: number }
|
|
|
|
| 499 |
}) => {
|
| 500 |
-
modifyImage(params);
|
| 501 |
-
}, [
|
| 502 |
-
|
| 503 |
-
const handleMouseEnter = useCallback(() => {
|
| 504 |
-
setIsHovering(true);
|
| 505 |
-
}, []);
|
| 506 |
|
| 507 |
-
|
| 508 |
-
|
| 509 |
-
}, []);
|
| 510 |
|
| 511 |
-
|
| 512 |
-
const handleMouseDown = useCallback((event: React.MouseEvent<HTMLCanvasElement>) => {
|
| 513 |
if (!canvasRef.current) return;
|
| 514 |
|
| 515 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 516 |
-
const
|
| 517 |
-
const
|
| 518 |
|
| 519 |
-
const landmark = findClosestLandmark(
|
| 520 |
-
console.log(`
|
| 521 |
setActiveLandmark(landmark);
|
| 522 |
-
setDragStart({ x, y });
|
| 523 |
-
dragStartRef.current = { x, y };
|
| 524 |
}, [findClosestLandmark, setActiveLandmark, setDragStart]);
|
| 525 |
|
| 526 |
-
const
|
| 527 |
if (!canvasRef.current) return;
|
| 528 |
|
| 529 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 530 |
-
const
|
| 531 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 532 |
|
| 533 |
-
// only send an API request to modify the image if we are actively dragging
|
| 534 |
if (dragStart && dragStartRef.current) {
|
| 535 |
-
|
| 536 |
-
const landmark = findClosestLandmark(x, y, currentLandmark?.group);
|
| 537 |
-
|
| 538 |
-
console.log(`Dragging mouse (was over ${currentLandmark?.group || 'nothing'}, now over ${landmark.group})`);
|
| 539 |
-
|
| 540 |
-
// Compute the vector from the landmark center to the current mouse position
|
| 541 |
modifyImageWithRateLimit({
|
| 542 |
-
landmark: currentLandmark || landmark,
|
| 543 |
-
vector
|
| 544 |
-
|
| 545 |
-
y: y - landmarkCenters[landmark.group].y,
|
| 546 |
-
z: 0 // Z is 0 as mouse interaction is 2D
|
| 547 |
-
}
|
| 548 |
});
|
| 549 |
-
setIsDragging(true);
|
| 550 |
} else {
|
| 551 |
-
const landmark = findClosestLandmark(x, y);
|
| 552 |
-
|
| 553 |
-
//console.log(`Moving mouse over ${landmark.group}`);
|
| 554 |
-
// console.log(`Simple mouse move over ${landmark.group}`);
|
| 555 |
-
|
| 556 |
-
// we need to be careful here, we don't want to change the active
|
| 557 |
-
// landmark dynamically if we are busy dragging
|
| 558 |
-
|
| 559 |
if (!currentLandmark || (currentLandmark?.group !== landmark?.group)) {
|
| 560 |
-
// console.log("setting activeLandmark to ", landmark);
|
| 561 |
setActiveLandmark(landmark);
|
| 562 |
}
|
| 563 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 564 |
}
|
| 565 |
-
}, [currentLandmark, dragStart,
|
| 566 |
|
| 567 |
-
const
|
| 568 |
if (!canvasRef.current) return;
|
| 569 |
|
| 570 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 571 |
-
const
|
| 572 |
-
const
|
| 573 |
|
| 574 |
-
// only send an API request to modify the image if we are actively dragging
|
| 575 |
if (dragStart && dragStartRef.current) {
|
|
|
|
| 576 |
|
| 577 |
-
const landmark = findClosestLandmark(x, y, currentLandmark?.group);
|
| 578 |
-
|
| 579 |
-
console.log(`Mouse up (was over ${currentLandmark?.group || 'nothing'}, now over ${landmark.group})`);
|
| 580 |
-
|
| 581 |
-
// Compute the vector from the landmark center to the current mouse position
|
| 582 |
modifyImageWithRateLimit({
|
| 583 |
-
landmark: currentLandmark || landmark,
|
| 584 |
vector: {
|
| 585 |
-
x:
|
| 586 |
-
y:
|
| 587 |
-
z: 0
|
| 588 |
-
}
|
|
|
|
| 589 |
});
|
| 590 |
}
|
| 591 |
|
| 592 |
setIsDragging(false);
|
| 593 |
dragStartRef.current = null;
|
| 594 |
setActiveLandmark(undefined);
|
| 595 |
-
}, [currentLandmark, isDragging, modifyImageWithRateLimit, findClosestLandmark, setActiveLandmark, landmarkCenters,
|
| 596 |
|
| 597 |
-
|
| 598 |
-
|
| 599 |
-
|
| 600 |
-
|
| 601 |
-
|
| 602 |
-
|
| 603 |
-
|
| 604 |
-
|
| 605 |
-
}, [
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 606 |
|
| 607 |
return {
|
| 608 |
canvasRef,
|
| 609 |
canvasRefCallback,
|
| 610 |
mediaPipeRef,
|
| 611 |
-
faceLandmarks,
|
| 612 |
isMediaPipeReady,
|
| 613 |
isDrawingUtilsReady,
|
| 614 |
-
blendShapes,
|
| 615 |
-
|
| 616 |
-
//dragStart,
|
| 617 |
-
//setDragStart,
|
| 618 |
-
//dragEnd,
|
| 619 |
-
//setDragEnd,
|
| 620 |
-
setFaceLandmarks,
|
| 621 |
-
setBlendShapes,
|
| 622 |
|
| 623 |
handleMouseDown,
|
| 624 |
handleMouseUp,
|
| 625 |
handleMouseMove,
|
| 626 |
-
|
| 627 |
-
|
|
|
|
| 628 |
|
| 629 |
currentLandmark,
|
| 630 |
currentOpacity,
|
|
|
|
| 6 |
import useThrottledCallback from 'beautiful-react-hooks/useThrottledCallback';
|
| 7 |
|
| 8 |
import { landmarkGroups, FACEMESH_LIPS, FACEMESH_LEFT_EYE, FACEMESH_LEFT_EYEBROW, FACEMESH_RIGHT_EYE, FACEMESH_RIGHT_EYEBROW, FACEMESH_FACE_OVAL } from './landmarks';
|
| 9 |
+
import type { ActionMode, ClosestLandmark, LandmarkCenter, LandmarkGroup, MediaPipeResources } from '@/types';
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
|
| 11 |
export function useFaceLandmarkDetection() {
|
|
|
|
| 12 |
const setError = useMainStore(s => s.setError);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
const previewImage = useMainStore(s => s.previewImage);
|
| 14 |
+
const handleServerResponse = useMainStore(s => s.handleServerResponse);
|
| 15 |
+
const faceLandmarks = useMainStore(s => s.faceLandmarks);
|
| 16 |
|
|
|
|
| 17 |
////////////////////////////////////////////////////////////////////////
|
| 18 |
+
// if we only send the face/square then we can use 138ms
|
| 19 |
+
// unfortunately it doesn't work well yet
|
| 20 |
+
// const throttleInMs = 138ms
|
| 21 |
+
const throttleInMs = 180
|
| 22 |
////////////////////////////////////////////////////////////////////////
|
| 23 |
|
| 24 |
// State for face detection
|
|
|
|
| 25 |
const [isMediaPipeReady, setIsMediaPipeReady] = useState(false);
|
| 26 |
const [isDrawingUtilsReady, setIsDrawingUtilsReady] = useState(false);
|
|
|
|
| 27 |
|
| 28 |
// State for mouse interaction
|
| 29 |
const [dragStart, setDragStart] = useState<{ x: number; y: number } | null>(null);
|
|
|
|
| 30 |
|
| 31 |
const [isDragging, setIsDragging] = useState(false);
|
|
|
|
| 32 |
const dragStartRef = useRef<{ x: number; y: number } | null>(null);
|
|
|
|
|
|
|
| 33 |
|
| 34 |
const [currentLandmark, setCurrentLandmark] = useState<ClosestLandmark | null>(null);
|
| 35 |
const [previousLandmark, setPreviousLandmark] = useState<ClosestLandmark | null>(null);
|
| 36 |
const [currentOpacity, setCurrentOpacity] = useState(0);
|
| 37 |
const [previousOpacity, setPreviousOpacity] = useState(0);
|
| 38 |
|
|
|
|
|
|
|
| 39 |
// Refs
|
| 40 |
const canvasRef = useRef<HTMLCanvasElement>(null);
|
| 41 |
const mediaPipeRef = useRef<MediaPipeResources>({
|
|
|
|
| 199 |
|
| 200 |
// Detect face landmarks
|
| 201 |
const detectFaceLandmarks = useCallback(async (imageDataUrl: string) => {
|
| 202 |
+
const { setFaceLandmarks,setBlendShapes } = useMainStore.getState();
|
| 203 |
+
|
| 204 |
+
|
| 205 |
// console.log('Attempting to detect face landmarks...');
|
| 206 |
if (!isMediaPipeReady) {
|
| 207 |
console.log('MediaPipe not ready. Skipping detection.');
|
|
|
|
| 227 |
setFaceLandmarks(faceLandmarkerResult.faceLandmarks);
|
| 228 |
setBlendShapes(faceLandmarkerResult.faceBlendshapes || []);
|
| 229 |
|
| 230 |
+
|
| 231 |
if (faceLandmarkerResult.faceLandmarks && faceLandmarkerResult.faceLandmarks[0]) {
|
| 232 |
computeLandmarkCenters(faceLandmarkerResult.faceLandmarks[0]);
|
| 233 |
}
|
|
|
|
| 333 |
detectFaceLandmarks(previewImage);
|
| 334 |
}, [isMediaPipeReady, isDrawingUtilsReady, previewImage])
|
| 335 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 336 |
const modifyImageWithRateLimit = useThrottledCallback((params: {
|
| 337 |
landmark: ClosestLandmark
|
| 338 |
vector: { x: number; y: number; z: number }
|
| 339 |
+
mode: ActionMode
|
| 340 |
}) => {
|
| 341 |
+
useMainStore.getState().modifyImage(params);
|
| 342 |
+
}, [], throttleInMs);
|
|
|
|
|
|
|
|
|
|
|
|
|
| 343 |
|
| 344 |
+
useEffect(() => {
|
| 345 |
+
facePoke.setOnServerResponse(handleServerResponse);
|
| 346 |
+
}, [handleServerResponse]);
|
| 347 |
|
| 348 |
+
const handleStart = useCallback((x: number, y: number, mode: ActionMode) => {
|
|
|
|
| 349 |
if (!canvasRef.current) return;
|
| 350 |
|
| 351 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 352 |
+
const normalizedX = (x - rect.left) / rect.width;
|
| 353 |
+
const normalizedY = (y - rect.top) / rect.height;
|
| 354 |
|
| 355 |
+
const landmark = findClosestLandmark(normalizedX, normalizedY);
|
| 356 |
+
// console.log(`Interaction start on ${landmark.group}`);
|
| 357 |
setActiveLandmark(landmark);
|
| 358 |
+
setDragStart({ x: normalizedX, y: normalizedY });
|
| 359 |
+
dragStartRef.current = { x: normalizedX, y: normalizedY };
|
| 360 |
}, [findClosestLandmark, setActiveLandmark, setDragStart]);
|
| 361 |
|
| 362 |
+
const handleMove = useCallback((x: number, y: number, mode: ActionMode) => {
|
| 363 |
if (!canvasRef.current) return;
|
| 364 |
|
| 365 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 366 |
+
const normalizedX = (x - rect.left) / rect.width;
|
| 367 |
+
const normalizedY = (y - rect.top) / rect.height;
|
| 368 |
+
|
| 369 |
+
const landmark = findClosestLandmark(
|
| 370 |
+
normalizedX,
|
| 371 |
+
normalizedY,
|
| 372 |
+
dragStart && dragStartRef.current ? currentLandmark?.group : undefined
|
| 373 |
+
);
|
| 374 |
+
|
| 375 |
+
const landmarkData = landmarkCenters[landmark?.group]
|
| 376 |
+
const vector = landmarkData ? {
|
| 377 |
+
x: normalizedX - landmarkData.x,
|
| 378 |
+
y: normalizedY - landmarkData.y,
|
| 379 |
+
z: 0
|
| 380 |
+
} : {
|
| 381 |
+
x: 0.5,
|
| 382 |
+
y: 0.5,
|
| 383 |
+
z: 0
|
| 384 |
+
}
|
| 385 |
|
|
|
|
| 386 |
if (dragStart && dragStartRef.current) {
|
| 387 |
+
setIsDragging(true);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 388 |
modifyImageWithRateLimit({
|
| 389 |
+
landmark: currentLandmark || landmark,
|
| 390 |
+
vector,
|
| 391 |
+
mode
|
|
|
|
|
|
|
|
|
|
| 392 |
});
|
|
|
|
| 393 |
} else {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 394 |
if (!currentLandmark || (currentLandmark?.group !== landmark?.group)) {
|
|
|
|
| 395 |
setActiveLandmark(landmark);
|
| 396 |
}
|
| 397 |
+
modifyImageWithRateLimit({
|
| 398 |
+
landmark,
|
| 399 |
+
vector,
|
| 400 |
+
mode: 'HOVERING'
|
| 401 |
+
});
|
| 402 |
}
|
| 403 |
+
}, [currentLandmark, dragStart, setActiveLandmark, setIsDragging, modifyImageWithRateLimit, landmarkCenters]);
|
| 404 |
|
| 405 |
+
const handleEnd = useCallback((x: number, y: number, mode: ActionMode) => {
|
| 406 |
if (!canvasRef.current) return;
|
| 407 |
|
| 408 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 409 |
+
const normalizedX = (x - rect.left) / rect.width;
|
| 410 |
+
const normalizedY = (y - rect.top) / rect.height;
|
| 411 |
|
|
|
|
| 412 |
if (dragStart && dragStartRef.current) {
|
| 413 |
+
const landmark = findClosestLandmark(normalizedX, normalizedY, currentLandmark?.group);
|
| 414 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 415 |
modifyImageWithRateLimit({
|
| 416 |
+
landmark: currentLandmark || landmark,
|
| 417 |
vector: {
|
| 418 |
+
x: normalizedX - landmarkCenters[landmark.group].x,
|
| 419 |
+
y: normalizedY - landmarkCenters[landmark.group].y,
|
| 420 |
+
z: 0
|
| 421 |
+
},
|
| 422 |
+
mode
|
| 423 |
});
|
| 424 |
}
|
| 425 |
|
| 426 |
setIsDragging(false);
|
| 427 |
dragStartRef.current = null;
|
| 428 |
setActiveLandmark(undefined);
|
| 429 |
+
}, [currentLandmark, isDragging, modifyImageWithRateLimit, findClosestLandmark, setActiveLandmark, landmarkCenters, setIsDragging]);
|
| 430 |
|
| 431 |
+
const handleMouseDown = useCallback((event: React.MouseEvent<HTMLCanvasElement>) => {
|
| 432 |
+
const mode: ActionMode = event.button === 0 ? 'PRIMARY' : 'SECONDARY';
|
| 433 |
+
handleStart(event.clientX, event.clientY, mode);
|
| 434 |
+
}, [handleStart]);
|
| 435 |
+
|
| 436 |
+
const handleMouseMove = useCallback((event: React.MouseEvent<HTMLCanvasElement>) => {
|
| 437 |
+
const mode: ActionMode = event.buttons === 1 ? 'PRIMARY' : 'SECONDARY';
|
| 438 |
+
handleMove(event.clientX, event.clientY, mode);
|
| 439 |
+
}, [handleMove]);
|
| 440 |
+
|
| 441 |
+
const handleMouseUp = useCallback((event: React.MouseEvent<HTMLCanvasElement>) => {
|
| 442 |
+
const mode: ActionMode = event.buttons === 1 ? 'PRIMARY' : 'SECONDARY';
|
| 443 |
+
handleEnd(event.clientX, event.clientY, mode);
|
| 444 |
+
}, [handleEnd]);
|
| 445 |
+
|
| 446 |
+
const handleTouchStart = useCallback((event: React.TouchEvent<HTMLCanvasElement>) => {
|
| 447 |
+
const mode: ActionMode = event.touches.length === 1 ? 'PRIMARY' : 'SECONDARY';
|
| 448 |
+
const touch = event.touches[0];
|
| 449 |
+
handleStart(touch.clientX, touch.clientY, mode);
|
| 450 |
+
}, [handleStart]);
|
| 451 |
+
|
| 452 |
+
const handleTouchMove = useCallback((event: React.TouchEvent<HTMLCanvasElement>) => {
|
| 453 |
+
const mode: ActionMode = event.touches.length === 1 ? 'PRIMARY' : 'SECONDARY';
|
| 454 |
+
const touch = event.touches[0];
|
| 455 |
+
handleMove(touch.clientX, touch.clientY, mode);
|
| 456 |
+
}, [handleMove]);
|
| 457 |
+
|
| 458 |
+
const handleTouchEnd = useCallback((event: React.TouchEvent<HTMLCanvasElement>) => {
|
| 459 |
+
const mode: ActionMode = event.changedTouches.length === 1 ? 'PRIMARY' : 'SECONDARY';
|
| 460 |
+
const touch = event.changedTouches[0];
|
| 461 |
+
handleEnd(touch.clientX, touch.clientY, mode);
|
| 462 |
+
}, [handleEnd]);
|
| 463 |
|
| 464 |
return {
|
| 465 |
canvasRef,
|
| 466 |
canvasRefCallback,
|
| 467 |
mediaPipeRef,
|
|
|
|
| 468 |
isMediaPipeReady,
|
| 469 |
isDrawingUtilsReady,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 470 |
|
| 471 |
handleMouseDown,
|
| 472 |
handleMouseUp,
|
| 473 |
handleMouseMove,
|
| 474 |
+
handleTouchStart,
|
| 475 |
+
handleTouchMove,
|
| 476 |
+
handleTouchEnd,
|
| 477 |
|
| 478 |
currentLandmark,
|
| 479 |
currentOpacity,
|
client/src/hooks/useFacePokeAPI.ts
CHANGED
|
@@ -1,12 +1,10 @@
|
|
| 1 |
import { useEffect, useState } from "react";
|
| 2 |
|
| 3 |
import { facePoke } from "../lib/facePoke";
|
| 4 |
-
import { useMainStore } from "./useMainStore";
|
| 5 |
|
| 6 |
export function useFacePokeAPI() {
|
| 7 |
|
| 8 |
// State for FacePoke
|
| 9 |
-
const [status, setStatus] = useState('');
|
| 10 |
const [isDebugMode, setIsDebugMode] = useState(false);
|
| 11 |
const [interruptMessage, setInterruptMessage] = useState<string | null>(null);
|
| 12 |
|
|
@@ -33,8 +31,6 @@ export function useFacePokeAPI() {
|
|
| 33 |
|
| 34 |
return {
|
| 35 |
facePoke,
|
| 36 |
-
status,
|
| 37 |
-
setStatus,
|
| 38 |
isDebugMode,
|
| 39 |
setIsDebugMode,
|
| 40 |
interruptMessage,
|
|
|
|
| 1 |
import { useEffect, useState } from "react";
|
| 2 |
|
| 3 |
import { facePoke } from "../lib/facePoke";
|
|
|
|
| 4 |
|
| 5 |
export function useFacePokeAPI() {
|
| 6 |
|
| 7 |
// State for FacePoke
|
|
|
|
| 8 |
const [isDebugMode, setIsDebugMode] = useState(false);
|
| 9 |
const [interruptMessage, setInterruptMessage] = useState<string | null>(null);
|
| 10 |
|
|
|
|
| 31 |
|
| 32 |
return {
|
| 33 |
facePoke,
|
|
|
|
|
|
|
| 34 |
isDebugMode,
|
| 35 |
setIsDebugMode,
|
| 36 |
interruptMessage,
|
client/src/hooks/useMainStore.ts
CHANGED
|
@@ -1,32 +1,45 @@
|
|
| 1 |
import { create } from 'zustand'
|
| 2 |
-
|
| 3 |
-
import
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
activeLandmark?: ClosestLandmark
|
| 15 |
-
params: Partial<ImageModificationParams>
|
| 16 |
setError: (error?: string) => void
|
| 17 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 18 |
setOriginalImage: (url: string) => void
|
| 19 |
setOriginalImageHash: (hash: string) => void
|
| 20 |
setPreviewImage: (url: string) => void
|
| 21 |
resetImage: () => void
|
| 22 |
setAverageLatency: (averageLatency: number) => void
|
| 23 |
setActiveLandmark: (activeLandmark?: ClosestLandmark) => void
|
|
|
|
| 24 |
setParams: (params: Partial<ImageModificationParams>) => void
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
}
|
| 26 |
|
| 27 |
-
export const
|
|
|
|
| 28 |
error: '',
|
| 29 |
imageFile: null,
|
|
|
|
|
|
|
| 30 |
originalImage: '',
|
| 31 |
originalImageHash: '',
|
| 32 |
previewImage: '',
|
|
@@ -34,9 +47,56 @@ export const useMainStore = create<ImageState>((set, get) => ({
|
|
| 34 |
averageLatency: 190, // this should be the average for most people
|
| 35 |
maxLatency: 4000, // max time between requests
|
| 36 |
activeLandmark: undefined,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 37 |
params: {},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
setError: (error: string = '') => set({ error }),
|
| 39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
setOriginalImage: (url) => set({ originalImage: url }),
|
| 41 |
setOriginalImageHash: (originalImageHash) => set({ originalImageHash }),
|
| 42 |
setPreviewImage: (url) => set({ previewImage: url }),
|
|
@@ -48,6 +108,11 @@ export const useMainStore = create<ImageState>((set, get) => ({
|
|
| 48 |
},
|
| 49 |
setAverageLatency: (averageLatency: number) => set({ averageLatency }),
|
| 50 |
setActiveLandmark: (activeLandmark?: ClosestLandmark) => set({ activeLandmark }),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 51 |
setParams: (params: Partial<ImageModificationParams>) => {
|
| 52 |
const {params: previousParams } = get()
|
| 53 |
set({ params: {
|
|
@@ -55,4 +120,317 @@ export const useMainStore = create<ImageState>((set, get) => ({
|
|
| 55 |
...params
|
| 56 |
}})
|
| 57 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
}))
|
|
|
|
| 1 |
import { create } from 'zustand'
|
| 2 |
+
|
| 3 |
+
import * as vision from '@mediapipe/tasks-vision'
|
| 4 |
+
|
| 5 |
+
import { truncateFileName } from '@/lib/utils'
|
| 6 |
+
import { convertImageToBase64 } from '@/lib/convertImageToBase64'
|
| 7 |
+
import { type Metadata, type ImageModificationParams, type OnServerResponseParams, type ActionMode, type ImageStateValues, type ClosestLandmark } from '@/types'
|
| 8 |
+
import { mapRange } from '@/lib/mapRange'
|
| 9 |
+
import { facePoke } from '@/lib/facePoke'
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
export type ImageState = ImageStateValues & {
|
| 13 |
+
setStatus: (status?: string) => void
|
|
|
|
|
|
|
| 14 |
setError: (error?: string) => void
|
| 15 |
+
setFaceLandmarks: (faceLandmarks: vision.NormalizedLandmark[][]) => void
|
| 16 |
+
setBlendShapes: (blendShapes: vision.Classifications[]) => void
|
| 17 |
+
setImageFile: (file?: File) => Promise<void>
|
| 18 |
+
setIsFollowingCursor: (isFollowingCursor: boolean) => void
|
| 19 |
+
setIsGazingAtCursor: (isGazingAtCursor: boolean) => void
|
| 20 |
setOriginalImage: (url: string) => void
|
| 21 |
setOriginalImageHash: (hash: string) => void
|
| 22 |
setPreviewImage: (url: string) => void
|
| 23 |
resetImage: () => void
|
| 24 |
setAverageLatency: (averageLatency: number) => void
|
| 25 |
setActiveLandmark: (activeLandmark?: ClosestLandmark) => void
|
| 26 |
+
setMetadata: (metadata?: Metadata) => void
|
| 27 |
setParams: (params: Partial<ImageModificationParams>) => void
|
| 28 |
+
handleServerResponse: (params: OnServerResponseParams) => Promise<void>
|
| 29 |
+
applyModifiedHeadToCanvas: (headImageBlob: Blob) => Promise<string>
|
| 30 |
+
modifyImage: ({ landmark, vector, mode }: {
|
| 31 |
+
landmark: ClosestLandmark
|
| 32 |
+
vector: { x: number; y: number; z: number }
|
| 33 |
+
mode: ActionMode
|
| 34 |
+
}) => Promise<void>
|
| 35 |
}
|
| 36 |
|
| 37 |
+
export const getDefaultState = (): ImageStateValues => ({
|
| 38 |
+
status: '',
|
| 39 |
error: '',
|
| 40 |
imageFile: null,
|
| 41 |
+
isFollowingCursor: false,
|
| 42 |
+
isGazingAtCursor: false,
|
| 43 |
originalImage: '',
|
| 44 |
originalImageHash: '',
|
| 45 |
previewImage: '',
|
|
|
|
| 47 |
averageLatency: 190, // this should be the average for most people
|
| 48 |
maxLatency: 4000, // max time between requests
|
| 49 |
activeLandmark: undefined,
|
| 50 |
+
metadata: {
|
| 51 |
+
center: [0, 0],
|
| 52 |
+
size: 0,
|
| 53 |
+
bbox: [[0,0],[0,0],[0,0],[0,0]],
|
| 54 |
+
angle: 0,
|
| 55 |
+
},
|
| 56 |
params: {},
|
| 57 |
+
faceLandmarks: [],
|
| 58 |
+
blendShapes: [],
|
| 59 |
+
})
|
| 60 |
+
|
| 61 |
+
export const useMainStore = create<ImageState>((set, get) => ({
|
| 62 |
+
...getDefaultState(),
|
| 63 |
+
setStatus: (status: string = '') => set({ status }),
|
| 64 |
setError: (error: string = '') => set({ error }),
|
| 65 |
+
setFaceLandmarks: (faceLandmarks: vision.NormalizedLandmark[][]) => {
|
| 66 |
+
set({ faceLandmarks })
|
| 67 |
+
},
|
| 68 |
+
setBlendShapes: (blendShapes: vision.Classifications[]) => {
|
| 69 |
+
set({ blendShapes })
|
| 70 |
+
},
|
| 71 |
+
setImageFile: async (file?: File) => {
|
| 72 |
+
if (!file) {
|
| 73 |
+
set({
|
| 74 |
+
...getDefaultState(),
|
| 75 |
+
status: 'No file selected',
|
| 76 |
+
})
|
| 77 |
+
return;
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
try {
|
| 81 |
+
const image = await convertImageToBase64(file);
|
| 82 |
+
set({
|
| 83 |
+
...getDefaultState(),
|
| 84 |
+
imageFile: file,
|
| 85 |
+
status: `File selected: ${truncateFileName(file.name, 16)}`,
|
| 86 |
+
previewImage: image,
|
| 87 |
+
originalImage: image,
|
| 88 |
+
})
|
| 89 |
+
facePoke.loadImage(image);
|
| 90 |
+
} catch (err) {
|
| 91 |
+
console.log(`failed to load the image: `, err);
|
| 92 |
+
set({
|
| 93 |
+
...getDefaultState(),
|
| 94 |
+
status: 'Failed to load the image',
|
| 95 |
+
})
|
| 96 |
+
}
|
| 97 |
+
},
|
| 98 |
+
setIsFollowingCursor: (isFollowingCursor: boolean) => set({ isFollowingCursor }),
|
| 99 |
+
setIsGazingAtCursor: (isGazingAtCursor: boolean) => set({ isGazingAtCursor }),
|
| 100 |
setOriginalImage: (url) => set({ originalImage: url }),
|
| 101 |
setOriginalImageHash: (originalImageHash) => set({ originalImageHash }),
|
| 102 |
setPreviewImage: (url) => set({ previewImage: url }),
|
|
|
|
| 108 |
},
|
| 109 |
setAverageLatency: (averageLatency: number) => set({ averageLatency }),
|
| 110 |
setActiveLandmark: (activeLandmark?: ClosestLandmark) => set({ activeLandmark }),
|
| 111 |
+
setMetadata: (metadata?: Metadata) => set(metadata ? {
|
| 112 |
+
metadata
|
| 113 |
+
} : {
|
| 114 |
+
metadata: getDefaultState().metadata,
|
| 115 |
+
}),
|
| 116 |
setParams: (params: Partial<ImageModificationParams>) => {
|
| 117 |
const {params: previousParams } = get()
|
| 118 |
set({ params: {
|
|
|
|
| 120 |
...params
|
| 121 |
}})
|
| 122 |
},
|
| 123 |
+
handleServerResponse: async (params: OnServerResponseParams) => {
|
| 124 |
+
const { originalImage, setMetadata, setPreviewImage, setOriginalImageHash, applyModifiedHeadToCanvas, modifyImage } = useMainStore.getState();
|
| 125 |
+
if (typeof params.error === "string") {
|
| 126 |
+
console.error(`handleServerResponse: failed to perform the request, resetting the app (${params.error})`)
|
| 127 |
+
setPreviewImage(originalImage)
|
| 128 |
+
setOriginalImageHash('')
|
| 129 |
+
} else if (typeof params.image !== "undefined") {
|
| 130 |
+
|
| 131 |
+
// this is where we decide to paste back the image as a whole,
|
| 132 |
+
// or apply some shenanigans to only past back the head.
|
| 133 |
+
// the part about the head is not done yet, so we do it all for now.
|
| 134 |
+
|
| 135 |
+
// --- old way: use it whole ---
|
| 136 |
+
const image = await convertImageToBase64(params.image);
|
| 137 |
+
|
| 138 |
+
// --- future way: try to only apply the head ---
|
| 139 |
+
// const image = await applyModifiedHeadToCanvas(params.image);
|
| 140 |
+
|
| 141 |
+
setPreviewImage(image);
|
| 142 |
+
} else if (typeof params.loaded !== "undefined") {
|
| 143 |
+
//console.log(`handleServerResponse: received a json`, params)
|
| 144 |
+
setOriginalImageHash(params.loaded.h)
|
| 145 |
+
setMetadata({
|
| 146 |
+
center: params.loaded.c, // center - 2x1
|
| 147 |
+
size: params.loaded.s, // size - scalar
|
| 148 |
+
bbox: params.loaded.b, // bbox - 4x2
|
| 149 |
+
angle: params.loaded.a, //angle - rad, counterclockwise
|
| 150 |
+
})
|
| 151 |
+
|
| 152 |
+
// right after we received the hash, we perform a first blank request
|
| 153 |
+
await modifyImage({
|
| 154 |
+
landmark: {
|
| 155 |
+
group: 'background',
|
| 156 |
+
distance: 0,
|
| 157 |
+
vector: { x: 0.5, y: 0.5, z: 0 }
|
| 158 |
+
},
|
| 159 |
+
vector: {x: 0, y: 0, z: 0 },
|
| 160 |
+
mode: 'PRIMARY'
|
| 161 |
+
})
|
| 162 |
+
} else {
|
| 163 |
+
console.log(`handleServerResponse: received an unknown json`, params)
|
| 164 |
+
}
|
| 165 |
+
},
|
| 166 |
+
|
| 167 |
+
applyModifiedHeadToCanvas: async (headImageBlob: Blob): Promise<string> => {
|
| 168 |
+
return new Promise(async (resolve, reject) => {
|
| 169 |
+
const originalImg = new Image();
|
| 170 |
+
|
| 171 |
+
const { originalImage, metadata } = useMainStore.getState();
|
| 172 |
+
originalImg.onload = async () => {
|
| 173 |
+
const canvas = document.createElement('canvas');
|
| 174 |
+
const ctx = canvas.getContext('2d');
|
| 175 |
+
|
| 176 |
+
if (!ctx) {
|
| 177 |
+
reject(new Error('Failed to get 2D context'));
|
| 178 |
+
return;
|
| 179 |
+
}
|
| 180 |
+
|
| 181 |
+
// Get device pixel ratio
|
| 182 |
+
const pixelRatio = window.devicePixelRatio || 1;
|
| 183 |
+
|
| 184 |
+
canvas.width = originalImg.width;
|
| 185 |
+
canvas.height = originalImg.height;
|
| 186 |
+
|
| 187 |
+
// Draw original image
|
| 188 |
+
ctx.drawImage(originalImg, 0, 0);
|
| 189 |
+
|
| 190 |
+
const headImageBitmap = await createImageBitmap(headImageBlob, {
|
| 191 |
+
resizeQuality: 'high'
|
| 192 |
+
});
|
| 193 |
+
|
| 194 |
+
// Create a temporary canvas for the head image with gradient
|
| 195 |
+
const tempCanvas = document.createElement('canvas');
|
| 196 |
+
const tempCtx = tempCanvas.getContext('2d');
|
| 197 |
+
|
| 198 |
+
if (!tempCtx) {
|
| 199 |
+
reject(new Error('Failed to get 2D context for temporary canvas'));
|
| 200 |
+
return;
|
| 201 |
+
}
|
| 202 |
+
|
| 203 |
+
tempCanvas.width = headImageBitmap.width;
|
| 204 |
+
tempCanvas.height = headImageBitmap.height;
|
| 205 |
+
|
| 206 |
+
// Draw the head image on the temporary canvas
|
| 207 |
+
tempCtx.drawImage(headImageBitmap, 0, 0);
|
| 208 |
+
|
| 209 |
+
// Create gradient mask
|
| 210 |
+
const gradientSize = 20; // Size of the gradient in pixels
|
| 211 |
+
const gradient = tempCtx.createRadialGradient(
|
| 212 |
+
tempCanvas.width / 2, tempCanvas.height / 2, Math.min(tempCanvas.width, tempCanvas.height) / 2 - gradientSize,
|
| 213 |
+
tempCanvas.width / 2, tempCanvas.height / 2, Math.min(tempCanvas.width, tempCanvas.height) / 2
|
| 214 |
+
);
|
| 215 |
+
|
| 216 |
+
gradient.addColorStop(0, 'rgba(0, 0, 0, 1)');
|
| 217 |
+
gradient.addColorStop(1, 'rgba(0, 0, 0, 0)');
|
| 218 |
+
|
| 219 |
+
// Apply gradient mask
|
| 220 |
+
tempCtx.globalCompositeOperation = 'destination-in';
|
| 221 |
+
tempCtx.fillStyle = gradient;
|
| 222 |
+
tempCtx.fillRect(0, 0, tempCanvas.width, tempCanvas.height);
|
| 223 |
+
|
| 224 |
+
console.log("metadata:", metadata);
|
| 225 |
+
ctx.save();
|
| 226 |
+
ctx.rotate(metadata.angle);
|
| 227 |
+
|
| 228 |
+
// TODO: this is where we need to grab the coordinates from the metadata and start drawing our image
|
| 229 |
+
// now the issue if that there is an angle, so we need to see how this impacts the transformation
|
| 230 |
+
ctx.restore();
|
| 231 |
+
/*
|
| 232 |
+
TODO finish the implementation
|
| 233 |
+
|
| 234 |
+
console.log("metadata:", metadata);
|
| 235 |
+
ctx.save();
|
| 236 |
+
ctx.rotate(metadata.angle);
|
| 237 |
+
|
| 238 |
+
// TODO we need the coordinate inside the final image
|
| 239 |
+
ctx.drawImage(
|
| 240 |
+
tempCanvas,
|
| 241 |
+
topLeftCornerX,
|
| 242 |
+
topLeftCornerY,
|
| 243 |
+
width,
|
| 244 |
+
height,
|
| 245 |
+
);
|
| 246 |
+
|
| 247 |
+
ctx.restore();
|
| 248 |
+
*/
|
| 249 |
+
|
| 250 |
+
|
| 251 |
+
// Our head image already contains compression artifacts,
|
| 252 |
+
// so let's avoid double-encoding in here and use PNG
|
| 253 |
+
resolve(canvas.toDataURL('image/png'));
|
| 254 |
+
};
|
| 255 |
+
|
| 256 |
+
originalImg.src = originalImage;
|
| 257 |
+
})
|
| 258 |
+
},
|
| 259 |
+
modifyImage: async ({ landmark, vector, mode }: {
|
| 260 |
+
landmark: ClosestLandmark
|
| 261 |
+
vector: { x: number; y: number; z: number }
|
| 262 |
+
mode: ActionMode
|
| 263 |
+
}): Promise<void> => {
|
| 264 |
+
|
| 265 |
+
const {
|
| 266 |
+
originalImage,
|
| 267 |
+
originalImageHash,
|
| 268 |
+
params: previousParams,
|
| 269 |
+
setParams,
|
| 270 |
+
setError,
|
| 271 |
+
isFollowingCursor,
|
| 272 |
+
isGazingAtCursor
|
| 273 |
+
} = get()
|
| 274 |
+
|
| 275 |
+
|
| 276 |
+
if (!originalImage) {
|
| 277 |
+
console.error('Image file or facePoke not available');
|
| 278 |
+
return;
|
| 279 |
+
}
|
| 280 |
+
|
| 281 |
+
const params = {
|
| 282 |
+
...previousParams
|
| 283 |
+
}
|
| 284 |
+
|
| 285 |
+
const generalControl = {
|
| 286 |
+
minX: -0.30,
|
| 287 |
+
maxX: 0.30,
|
| 288 |
+
minY: -0.30,
|
| 289 |
+
maxY: 0.30
|
| 290 |
+
}
|
| 291 |
+
|
| 292 |
+
const pupilControl = {
|
| 293 |
+
minX: -0.50,
|
| 294 |
+
maxX: 0.50,
|
| 295 |
+
minY: -0.50,
|
| 296 |
+
maxY: 0.50
|
| 297 |
+
}
|
| 298 |
+
|
| 299 |
+
const eyeControl = {
|
| 300 |
+
minX: -0.50,
|
| 301 |
+
maxX: 0.50,
|
| 302 |
+
minY: -0.50,
|
| 303 |
+
maxY: 0.50
|
| 304 |
+
}
|
| 305 |
+
|
| 306 |
+
// for the min and max value, we can look here:
|
| 307 |
+
// https://github.com/fofr/cog-expression-editor/blob/main/predict.py
|
| 308 |
+
|
| 309 |
+
// regardless of the landmark, those rule will apply
|
| 310 |
+
if (isFollowingCursor) {
|
| 311 |
+
// displacing the face horizontally by moving the mouse on the X axis
|
| 312 |
+
// should perform a yaw rotation
|
| 313 |
+
// rotate_yaw (min: -20, max: 20, default: 0)
|
| 314 |
+
const yawMin = -40
|
| 315 |
+
const yawMax = 40
|
| 316 |
+
|
| 317 |
+
// note: we invert the axis here
|
| 318 |
+
params.rotate_yaw = mapRange(-vector.x, generalControl.minX, generalControl.maxX, yawMin, yawMax);
|
| 319 |
+
|
| 320 |
+
// displacing the face vertically by moving the mouse on the Y axis
|
| 321 |
+
// should perform a pitch rotation
|
| 322 |
+
// rotate_pitch (min: -20, max: 20, default: 0)
|
| 323 |
+
const pitchMin = -40
|
| 324 |
+
const pitchMax = 40
|
| 325 |
+
params.rotate_pitch = mapRange(vector.y, generalControl.minY, generalControl.maxY, pitchMin, pitchMax);
|
| 326 |
+
}
|
| 327 |
+
|
| 328 |
+
if (isGazingAtCursor) {
|
| 329 |
+
const pupilsXMin = -15
|
| 330 |
+
const pupilsXMax = 15
|
| 331 |
+
params.pupil_x = mapRange(vector.x, pupilControl.minX, pupilControl.maxX, pupilsXMin, pupilsXMax);
|
| 332 |
+
const pupilsYMin = -2 // -15
|
| 333 |
+
const pupilsYMax = 8 // 15
|
| 334 |
+
params.pupil_y = mapRange(-vector.y, pupilControl.minY, pupilControl.maxY, pupilsYMin, pupilsYMax);
|
| 335 |
+
}
|
| 336 |
+
|
| 337 |
+
// if the user clicked on the primary or secondary button,
|
| 338 |
+
// then it triggers some more things
|
| 339 |
+
if (mode !== 'HOVERING') {
|
| 340 |
+
switch (landmark.group) {
|
| 341 |
+
case 'leftEye':
|
| 342 |
+
case 'rightEye':
|
| 343 |
+
// if (mode === 'PRIMARY') {
|
| 344 |
+
const pupilsXMin = -15
|
| 345 |
+
const pupilsXMax = 15
|
| 346 |
+
params.pupil_x = mapRange(vector.x, pupilControl.minX, pupilControl.maxX, pupilsXMin, pupilsXMax);
|
| 347 |
+
|
| 348 |
+
const eyesMin = -20
|
| 349 |
+
const eyesMax = 5
|
| 350 |
+
params.eyes = mapRange(-vector.y, eyeControl.minX, eyeControl.maxX, eyesMin, eyesMax);
|
| 351 |
+
//}
|
| 352 |
+
|
| 353 |
+
break;
|
| 354 |
+
case 'leftEyebrow':
|
| 355 |
+
case 'rightEyebrow':
|
| 356 |
+
// moving the mouse vertically for the eyebrow
|
| 357 |
+
// should make them up/down
|
| 358 |
+
// eyebrow (min: -10, max: 15, default: 0)
|
| 359 |
+
const eyebrowMin = -10
|
| 360 |
+
const eyebrowMax = 15
|
| 361 |
+
params.eyebrow = mapRange(-vector.y, eyeControl.minY, eyeControl.maxY, eyebrowMin, eyebrowMax);
|
| 362 |
+
|
| 363 |
+
break;
|
| 364 |
+
case 'lips':
|
| 365 |
+
// aaa (min: -30, max: 120, default: 0)
|
| 366 |
+
const aaaMin = -30
|
| 367 |
+
const aaaMax = 120
|
| 368 |
+
params.aaa = mapRange(-vector.y, eyeControl.minY, eyeControl.maxY, aaaMin, aaaMax);
|
| 369 |
+
|
| 370 |
+
// eee (min: -20, max: 15, default: 0)
|
| 371 |
+
const eeeMin = -20
|
| 372 |
+
const eeeMax = 15
|
| 373 |
+
params.eee = mapRange(vector.x, eyeControl.minX, eyeControl.maxX, eeeMin, eeeMax);
|
| 374 |
+
|
| 375 |
+
// woo (min: -20, max: 15, default: 0)
|
| 376 |
+
//const wooMin = -20
|
| 377 |
+
//const wooMax = 15
|
| 378 |
+
//params.woo = mapRange(-vector.x, eyeControl.minX, eyeControl.maxX, wooMin, wooMax);
|
| 379 |
+
|
| 380 |
+
break;
|
| 381 |
+
case 'faceOval':
|
| 382 |
+
// displacing the face horizontally by moving the mouse on the X axis
|
| 383 |
+
// should perform a yaw rotation
|
| 384 |
+
// rotate_roll (min: -20, max: 20, default: 0)
|
| 385 |
+
const rollMin = -40
|
| 386 |
+
const rollMax = 40
|
| 387 |
+
|
| 388 |
+
// note: we invert the axis here
|
| 389 |
+
params.rotate_roll = mapRange(vector.x, eyeControl.minX, eyeControl.maxX, rollMin, rollMax);
|
| 390 |
+
break;
|
| 391 |
+
|
| 392 |
+
case 'background':
|
| 393 |
+
// displacing the face horizontally by moving the mouse on the X axis
|
| 394 |
+
// should perform a yaw rotation
|
| 395 |
+
// rotate_yaw (min: -20, max: 20, default: 0)
|
| 396 |
+
const yawMin = -40
|
| 397 |
+
const yawMax = 40
|
| 398 |
+
|
| 399 |
+
// note: we invert the axis here
|
| 400 |
+
params.rotate_yaw = mapRange(-vector.x, generalControl.minX, generalControl.maxX, yawMin, yawMax);
|
| 401 |
+
|
| 402 |
+
// displacing the face vertically by moving the mouse on the Y axis
|
| 403 |
+
// should perform a pitch rotation
|
| 404 |
+
// rotate_pitch (min: -20, max: 20, default: 0)
|
| 405 |
+
const pitchMin = -40
|
| 406 |
+
const pitchMax = 40
|
| 407 |
+
params.rotate_pitch = mapRange(vector.y, eyeControl.minY, eyeControl.maxY, pitchMin, pitchMax);
|
| 408 |
+
break;
|
| 409 |
+
default:
|
| 410 |
+
return
|
| 411 |
+
}
|
| 412 |
+
}
|
| 413 |
+
|
| 414 |
+
for (const [key, value] of Object.entries(params)) {
|
| 415 |
+
if (isNaN(value as any) || !isFinite(value as any)) {
|
| 416 |
+
console.log(`${key} is NaN, aborting`)
|
| 417 |
+
return
|
| 418 |
+
}
|
| 419 |
+
}
|
| 420 |
+
|
| 421 |
+
//console.log(`PITCH=${params.rotate_pitch || 0}, YAW=${params.rotate_yaw || 0}, ROLL=${params.rotate_roll || 0}`);
|
| 422 |
+
|
| 423 |
+
setParams(params)
|
| 424 |
+
|
| 425 |
+
try {
|
| 426 |
+
|
| 427 |
+
if (originalImageHash) {
|
| 428 |
+
facePoke.transformImage(originalImageHash, params);
|
| 429 |
+
}
|
| 430 |
+
|
| 431 |
+
} catch (error) {
|
| 432 |
+
// console.error('Error modifying image:', error);
|
| 433 |
+
setError('Failed to modify image');
|
| 434 |
+
}
|
| 435 |
+
}
|
| 436 |
}))
|
client/src/layout.tsx
CHANGED
|
@@ -5,7 +5,7 @@ export function Layout({ children }: { children: ReactNode }) {
|
|
| 5 |
<div className="fixed min-h-screen w-full flex items-center justify-center bg-gradient-to-br from-gray-300 to-stone-300"
|
| 6 |
style={{ boxShadow: "inset 0 0 10vh 0 rgb(0 0 0 / 30%)" }}>
|
| 7 |
<div className="min-h-screen w-full py-8 flex flex-col justify-center">
|
| 8 |
-
<div className="
|
| 9 |
{children}
|
| 10 |
</div>
|
| 11 |
</div>
|
|
|
|
| 5 |
<div className="fixed min-h-screen w-full flex items-center justify-center bg-gradient-to-br from-gray-300 to-stone-300"
|
| 6 |
style={{ boxShadow: "inset 0 0 10vh 0 rgb(0 0 0 / 30%)" }}>
|
| 7 |
<div className="min-h-screen w-full py-8 flex flex-col justify-center">
|
| 8 |
+
<div className="flex flex-col items-center justify-center p-4 sm:max-w-5xl sm:mx-auto">
|
| 9 |
{children}
|
| 10 |
</div>
|
| 11 |
</div>
|
client/src/lib/convertImageToBase64.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
export async function convertImageToBase64(
|
| 2 |
return new Promise((resolve, reject) => {
|
| 3 |
const reader = new FileReader();
|
| 4 |
|
|
@@ -11,9 +11,9 @@ export async function convertImageToBase64(imageFile: File): Promise<string> {
|
|
| 11 |
};
|
| 12 |
|
| 13 |
reader.onerror = () => {
|
| 14 |
-
reject(new Error('Error reading file'));
|
| 15 |
};
|
| 16 |
|
| 17 |
-
reader.readAsDataURL(
|
| 18 |
});
|
| 19 |
}
|
|
|
|
| 1 |
+
export async function convertImageToBase64(imageFileOrBlob: File | Blob): Promise<string> {
|
| 2 |
return new Promise((resolve, reject) => {
|
| 3 |
const reader = new FileReader();
|
| 4 |
|
|
|
|
| 11 |
};
|
| 12 |
|
| 13 |
reader.onerror = () => {
|
| 14 |
+
reject(new Error('Error reading file or blob'));
|
| 15 |
};
|
| 16 |
|
| 17 |
+
reader.readAsDataURL(imageFileOrBlob);
|
| 18 |
});
|
| 19 |
}
|
client/src/lib/facePoke.ts
CHANGED
|
@@ -1,153 +1,35 @@
|
|
| 1 |
-
import {
|
| 2 |
-
import { CircularBuffer } from './circularBuffer';
|
| 3 |
-
import { useMainStore } from '@/hooks/useMainStore';
|
| 4 |
|
| 5 |
-
/**
|
| 6 |
-
* Represents a tracked request with its UUID and timestamp.
|
| 7 |
-
*/
|
| 8 |
-
export interface TrackedRequest {
|
| 9 |
-
uuid: string;
|
| 10 |
-
timestamp: number;
|
| 11 |
-
}
|
| 12 |
-
|
| 13 |
-
/**
|
| 14 |
-
* Represents the parameters for image modification.
|
| 15 |
-
*/
|
| 16 |
-
export interface ImageModificationParams {
|
| 17 |
-
eyes: number;
|
| 18 |
-
eyebrow: number;
|
| 19 |
-
wink: number;
|
| 20 |
-
pupil_x: number;
|
| 21 |
-
pupil_y: number;
|
| 22 |
-
aaa: number;
|
| 23 |
-
eee: number;
|
| 24 |
-
woo: number;
|
| 25 |
-
smile: number;
|
| 26 |
-
rotate_pitch: number;
|
| 27 |
-
rotate_yaw: number;
|
| 28 |
-
rotate_roll: number;
|
| 29 |
-
}
|
| 30 |
-
|
| 31 |
-
/**
|
| 32 |
-
* Represents a message to modify an image.
|
| 33 |
-
*/
|
| 34 |
-
export interface ModifyImageMessage {
|
| 35 |
-
type: 'modify_image';
|
| 36 |
-
image?: string;
|
| 37 |
-
image_hash?: string;
|
| 38 |
-
params: Partial<ImageModificationParams>;
|
| 39 |
-
}
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
/**
|
| 43 |
-
* Callback type for handling modified images.
|
| 44 |
-
*/
|
| 45 |
-
type OnModifiedImage = (image: string, image_hash: string) => void;
|
| 46 |
-
|
| 47 |
-
/**
|
| 48 |
-
* Enum representing the different states of a WebSocket connection.
|
| 49 |
-
*/
|
| 50 |
-
enum WebSocketState {
|
| 51 |
-
CONNECTING = 0,
|
| 52 |
-
OPEN = 1,
|
| 53 |
-
CLOSING = 2,
|
| 54 |
-
CLOSED = 3
|
| 55 |
-
}
|
| 56 |
|
| 57 |
/**
|
| 58 |
* FacePoke class manages the WebSocket connection
|
| 59 |
*/
|
| 60 |
export class FacePoke {
|
| 61 |
private ws: WebSocket | null = null;
|
| 62 |
-
private readonly connectionId: string = uuidv4();
|
| 63 |
private isUnloading: boolean = false;
|
| 64 |
-
private
|
| 65 |
private reconnectAttempts: number = 0;
|
| 66 |
private readonly maxReconnectAttempts: number = 5;
|
| 67 |
private readonly reconnectDelay: number = 5000;
|
| 68 |
private readonly eventListeners: Map<string, Set<Function>> = new Map();
|
| 69 |
|
| 70 |
-
private requestTracker: Map<string, TrackedRequest> = new Map();
|
| 71 |
-
private responseTimeBuffer: CircularBuffer<number>;
|
| 72 |
-
private readonly MAX_TRACKED_TIMES = 5; // Number of recent response times to track
|
| 73 |
-
|
| 74 |
/**
|
| 75 |
* Creates an instance of FacePoke.
|
| 76 |
* Initializes the WebSocket connection.
|
| 77 |
*/
|
| 78 |
constructor() {
|
| 79 |
-
console.log(`[FacePoke] Initializing FacePoke instance
|
| 80 |
this.initializeWebSocket();
|
| 81 |
this.setupUnloadHandler();
|
| 82 |
-
|
| 83 |
-
this.responseTimeBuffer = new CircularBuffer<number>(this.MAX_TRACKED_TIMES);
|
| 84 |
-
console.log(`[FacePoke] Initialized response time tracker with capacity: ${this.MAX_TRACKED_TIMES}`);
|
| 85 |
-
}
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
/**
|
| 89 |
-
* Generates a unique UUID for a request and starts tracking it.
|
| 90 |
-
* @returns The generated UUID for the request.
|
| 91 |
-
*/
|
| 92 |
-
private trackRequest(): string {
|
| 93 |
-
const uuid = uuidv4();
|
| 94 |
-
this.requestTracker.set(uuid, { uuid, timestamp: Date.now() });
|
| 95 |
-
// console.log(`[FacePoke] Started tracking request with UUID: ${uuid}`);
|
| 96 |
-
return uuid;
|
| 97 |
-
}
|
| 98 |
-
|
| 99 |
-
/**
|
| 100 |
-
* Completes tracking for a request and updates response time statistics.
|
| 101 |
-
* @param uuid - The UUID of the completed request.
|
| 102 |
-
*/
|
| 103 |
-
private completeRequest(uuid: string): void {
|
| 104 |
-
const request = this.requestTracker.get(uuid);
|
| 105 |
-
if (request) {
|
| 106 |
-
const responseTime = Date.now() - request.timestamp;
|
| 107 |
-
this.responseTimeBuffer.push(responseTime);
|
| 108 |
-
this.requestTracker.delete(uuid);
|
| 109 |
-
this.updateThrottleTime();
|
| 110 |
-
console.log(`[FacePoke] Completed request ${uuid}. Response time: ${responseTime}ms`);
|
| 111 |
-
} else {
|
| 112 |
-
console.warn(`[FacePoke] Attempted to complete unknown request: ${uuid}`);
|
| 113 |
-
}
|
| 114 |
-
}
|
| 115 |
-
|
| 116 |
-
/**
|
| 117 |
-
* Calculates the average response time from recent requests.
|
| 118 |
-
* @returns The average response time in milliseconds.
|
| 119 |
-
*/
|
| 120 |
-
private calculateAverageResponseTime(): number {
|
| 121 |
-
const times = this.responseTimeBuffer.getAll();
|
| 122 |
-
|
| 123 |
-
const averageLatency = useMainStore.getState().averageLatency;
|
| 124 |
-
|
| 125 |
-
if (times.length === 0) return averageLatency;
|
| 126 |
-
const sum = times.reduce((acc, time) => acc + time, 0);
|
| 127 |
-
return sum / times.length;
|
| 128 |
-
}
|
| 129 |
-
|
| 130 |
-
/**
|
| 131 |
-
* Updates the throttle time based on recent response times.
|
| 132 |
-
*/
|
| 133 |
-
private updateThrottleTime(): void {
|
| 134 |
-
const { minLatency, maxLatency, averageLatency, setAverageLatency } = useMainStore.getState();
|
| 135 |
-
const avgResponseTime = this.calculateAverageResponseTime();
|
| 136 |
-
const newLatency = Math.min(minLatency, Math.max(minLatency, avgResponseTime));
|
| 137 |
-
|
| 138 |
-
if (newLatency !== averageLatency) {
|
| 139 |
-
setAverageLatency(newLatency)
|
| 140 |
-
console.log(`[FacePoke] Updated throttle time (latency is ${newLatency}ms)`);
|
| 141 |
-
}
|
| 142 |
}
|
| 143 |
|
| 144 |
/**
|
| 145 |
* Sets the callback function for handling modified images.
|
| 146 |
* @param handler - The function to be called when a modified image is received.
|
| 147 |
*/
|
| 148 |
-
public
|
| 149 |
-
this.
|
| 150 |
-
console.log(`[FacePoke]
|
| 151 |
}
|
| 152 |
|
| 153 |
/**
|
|
@@ -165,53 +47,36 @@ export class FacePoke {
|
|
| 165 |
* Implements exponential backoff for reconnection attempts.
|
| 166 |
*/
|
| 167 |
private async initializeWebSocket(): Promise<void> {
|
| 168 |
-
console.log(`[FacePoke]
|
| 169 |
|
| 170 |
const connect = () => {
|
| 171 |
this.ws = new WebSocket(`wss://${window.location.host}/ws`);
|
| 172 |
|
| 173 |
this.ws.onopen = this.handleWebSocketOpen.bind(this);
|
| 174 |
-
this.ws.onmessage = this.handleWebSocketMessage.bind(this);
|
| 175 |
this.ws.onclose = this.handleWebSocketClose.bind(this);
|
| 176 |
this.ws.onerror = this.handleWebSocketError.bind(this);
|
|
|
|
| 177 |
};
|
| 178 |
|
| 179 |
-
// const debouncedConnect = debounce(connect, this.reconnectDelay, { leading: true, trailing: false });
|
| 180 |
-
|
| 181 |
connect(); // Initial connection attempt
|
| 182 |
}
|
| 183 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 184 |
/**
|
| 185 |
* Handles the WebSocket open event.
|
| 186 |
*/
|
| 187 |
private handleWebSocketOpen(): void {
|
| 188 |
-
console.log(`[FacePoke]
|
| 189 |
this.reconnectAttempts = 0; // Reset reconnect attempts on successful connection
|
| 190 |
this.emitEvent('websocketOpen');
|
| 191 |
}
|
| 192 |
|
| 193 |
-
// Update handleWebSocketMessage to complete request tracking
|
| 194 |
-
private handleWebSocketMessage(event: MessageEvent): void {
|
| 195 |
-
try {
|
| 196 |
-
const data = JSON.parse(event.data);
|
| 197 |
-
// console.log(`[FacePoke][${this.connectionId}] Received JSON data:`, data);
|
| 198 |
-
|
| 199 |
-
if (data.uuid) {
|
| 200 |
-
this.completeRequest(data.uuid);
|
| 201 |
-
}
|
| 202 |
-
|
| 203 |
-
if (data.type === 'modified_image') {
|
| 204 |
-
if (data?.image) {
|
| 205 |
-
this.onModifiedImage(data.image, data.image_hash);
|
| 206 |
-
}
|
| 207 |
-
}
|
| 208 |
-
|
| 209 |
-
this.emitEvent('message', data);
|
| 210 |
-
} catch (error) {
|
| 211 |
-
console.error(`[FacePoke][${this.connectionId}] Error parsing WebSocket message:`, error);
|
| 212 |
-
}
|
| 213 |
-
}
|
| 214 |
-
|
| 215 |
/**
|
| 216 |
* Handles WebSocket close events.
|
| 217 |
* Implements reconnection logic with exponential backoff.
|
|
@@ -219,9 +84,9 @@ export class FacePoke {
|
|
| 219 |
*/
|
| 220 |
private handleWebSocketClose(event: CloseEvent): void {
|
| 221 |
if (event.wasClean) {
|
| 222 |
-
console.log(`[FacePoke]
|
| 223 |
} else {
|
| 224 |
-
console.warn(`[FacePoke]
|
| 225 |
}
|
| 226 |
|
| 227 |
this.emitEvent('websocketClose', event);
|
|
@@ -230,10 +95,10 @@ export class FacePoke {
|
|
| 230 |
if (!this.isUnloading && this.reconnectAttempts < this.maxReconnectAttempts) {
|
| 231 |
this.reconnectAttempts++;
|
| 232 |
const delay = Math.min(1000 * (2 ** this.reconnectAttempts), 30000); // Exponential backoff, max 30 seconds
|
| 233 |
-
console.log(`[FacePoke]
|
| 234 |
setTimeout(() => this.initializeWebSocket(), delay);
|
| 235 |
} else if (this.reconnectAttempts >= this.maxReconnectAttempts) {
|
| 236 |
-
console.error(`[FacePoke]
|
| 237 |
this.emitEvent('maxReconnectAttemptsReached');
|
| 238 |
}
|
| 239 |
}
|
|
@@ -243,7 +108,7 @@ export class FacePoke {
|
|
| 243 |
* @param error - The error event.
|
| 244 |
*/
|
| 245 |
private handleWebSocketError(error: Event): void {
|
| 246 |
-
console.error(`[FacePoke]
|
| 247 |
this.emitEvent('websocketError', error);
|
| 248 |
}
|
| 249 |
|
|
@@ -261,33 +126,32 @@ export class FacePoke {
|
|
| 261 |
this.emitEvent('cleanup');
|
| 262 |
}
|
| 263 |
|
| 264 |
-
|
| 265 |
-
|
| 266 |
-
|
| 267 |
-
* @param imageHash - The hash of the image to modify.
|
| 268 |
-
* @param params - The parameters for image modification.
|
| 269 |
-
*/
|
| 270 |
-
public modifyImage(image: string | null, imageHash: string | null, params: Partial<ImageModificationParams>): void {
|
| 271 |
-
try {
|
| 272 |
-
const message: ModifyImageMessage = {
|
| 273 |
-
type: 'modify_image',
|
| 274 |
-
params: params
|
| 275 |
-
};
|
| 276 |
|
| 277 |
-
|
| 278 |
-
|
| 279 |
-
|
| 280 |
-
|
| 281 |
-
} else {
|
| 282 |
-
throw new Error('Either image or imageHash must be provided');
|
| 283 |
-
}
|
| 284 |
|
| 285 |
-
|
| 286 |
-
|
| 287 |
-
|
| 288 |
-
|
| 289 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 290 |
}
|
|
|
|
| 291 |
|
| 292 |
/**
|
| 293 |
* Sends a JSON message through the WebSocket connection with request tracking.
|
|
@@ -301,11 +165,11 @@ export class FacePoke {
|
|
| 301 |
this.emitEvent('sendJsonMessageError', error);
|
| 302 |
throw error;
|
| 303 |
}
|
| 304 |
-
|
| 305 |
-
|
| 306 |
-
|
| 307 |
-
|
| 308 |
-
|
| 309 |
}
|
| 310 |
|
| 311 |
/**
|
|
|
|
| 1 |
+
import { WebSocketState, type ImageModificationParams, type OnServerResponse } from "@/types";
|
|
|
|
|
|
|
| 2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
|
| 4 |
/**
|
| 5 |
* FacePoke class manages the WebSocket connection
|
| 6 |
*/
|
| 7 |
export class FacePoke {
|
| 8 |
private ws: WebSocket | null = null;
|
|
|
|
| 9 |
private isUnloading: boolean = false;
|
| 10 |
+
private onServerResponse: OnServerResponse = async () => {};
|
| 11 |
private reconnectAttempts: number = 0;
|
| 12 |
private readonly maxReconnectAttempts: number = 5;
|
| 13 |
private readonly reconnectDelay: number = 5000;
|
| 14 |
private readonly eventListeners: Map<string, Set<Function>> = new Map();
|
| 15 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
/**
|
| 17 |
* Creates an instance of FacePoke.
|
| 18 |
* Initializes the WebSocket connection.
|
| 19 |
*/
|
| 20 |
constructor() {
|
| 21 |
+
console.log(`[FacePoke] Initializing FacePoke instance`);
|
| 22 |
this.initializeWebSocket();
|
| 23 |
this.setupUnloadHandler();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
}
|
| 25 |
|
| 26 |
/**
|
| 27 |
* Sets the callback function for handling modified images.
|
| 28 |
* @param handler - The function to be called when a modified image is received.
|
| 29 |
*/
|
| 30 |
+
public setOnServerResponse(handler: OnServerResponse): void {
|
| 31 |
+
this.onServerResponse = handler;
|
| 32 |
+
console.log(`[FacePoke] onServerResponse handler set`);
|
| 33 |
}
|
| 34 |
|
| 35 |
/**
|
|
|
|
| 47 |
* Implements exponential backoff for reconnection attempts.
|
| 48 |
*/
|
| 49 |
private async initializeWebSocket(): Promise<void> {
|
| 50 |
+
console.log(`[FacePoke] Initializing WebSocket connection`);
|
| 51 |
|
| 52 |
const connect = () => {
|
| 53 |
this.ws = new WebSocket(`wss://${window.location.host}/ws`);
|
| 54 |
|
| 55 |
this.ws.onopen = this.handleWebSocketOpen.bind(this);
|
|
|
|
| 56 |
this.ws.onclose = this.handleWebSocketClose.bind(this);
|
| 57 |
this.ws.onerror = this.handleWebSocketError.bind(this);
|
| 58 |
+
this.ws.onmessage = (this.handleWebSocketMessage.bind(this) as any)
|
| 59 |
};
|
| 60 |
|
|
|
|
|
|
|
| 61 |
connect(); // Initial connection attempt
|
| 62 |
}
|
| 63 |
|
| 64 |
+
private handleWebSocketMessage(msg: MessageEvent) {
|
| 65 |
+
if (typeof msg.data === "string") {
|
| 66 |
+
this.onServerResponse({ loaded: JSON.parse(msg.data) as any });
|
| 67 |
+
} else if (typeof msg.data !== "undefined" ) {
|
| 68 |
+
this.onServerResponse({ image: msg.data as unknown as Blob });
|
| 69 |
+
}
|
| 70 |
+
}
|
| 71 |
/**
|
| 72 |
* Handles the WebSocket open event.
|
| 73 |
*/
|
| 74 |
private handleWebSocketOpen(): void {
|
| 75 |
+
console.log(`[FacePoke] WebSocket connection opened`);
|
| 76 |
this.reconnectAttempts = 0; // Reset reconnect attempts on successful connection
|
| 77 |
this.emitEvent('websocketOpen');
|
| 78 |
}
|
| 79 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
/**
|
| 81 |
* Handles WebSocket close events.
|
| 82 |
* Implements reconnection logic with exponential backoff.
|
|
|
|
| 84 |
*/
|
| 85 |
private handleWebSocketClose(event: CloseEvent): void {
|
| 86 |
if (event.wasClean) {
|
| 87 |
+
console.log(`[FacePoke] WebSocket connection closed cleanly, code=${event.code}, reason=${event.reason}`);
|
| 88 |
} else {
|
| 89 |
+
console.warn(`[FacePoke] WebSocket connection abruptly closed`);
|
| 90 |
}
|
| 91 |
|
| 92 |
this.emitEvent('websocketClose', event);
|
|
|
|
| 95 |
if (!this.isUnloading && this.reconnectAttempts < this.maxReconnectAttempts) {
|
| 96 |
this.reconnectAttempts++;
|
| 97 |
const delay = Math.min(1000 * (2 ** this.reconnectAttempts), 30000); // Exponential backoff, max 30 seconds
|
| 98 |
+
console.log(`[FacePoke] Attempting to reconnect in ${delay}ms (Attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts})...`);
|
| 99 |
setTimeout(() => this.initializeWebSocket(), delay);
|
| 100 |
} else if (this.reconnectAttempts >= this.maxReconnectAttempts) {
|
| 101 |
+
console.error(`[FacePoke] Max reconnect attempts reached. Please refresh the page.`);
|
| 102 |
this.emitEvent('maxReconnectAttemptsReached');
|
| 103 |
}
|
| 104 |
}
|
|
|
|
| 108 |
* @param error - The error event.
|
| 109 |
*/
|
| 110 |
private handleWebSocketError(error: Event): void {
|
| 111 |
+
console.error(`[FacePoke] WebSocket error:`, error);
|
| 112 |
this.emitEvent('websocketError', error);
|
| 113 |
}
|
| 114 |
|
|
|
|
| 126 |
this.emitEvent('cleanup');
|
| 127 |
}
|
| 128 |
|
| 129 |
+
public async loadImage(image: string): Promise<void> {
|
| 130 |
+
// Extract the base64 part if it's a data URL
|
| 131 |
+
const base64Data = image.split(',')[1] || image;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 132 |
|
| 133 |
+
const buffer = new Uint8Array(atob(base64Data).split('').map(char => char.charCodeAt(0)));
|
| 134 |
+
const blob = new Blob([buffer], { type: 'application/octet-binary' });
|
| 135 |
+
this.sendBlobMessage(await blob.arrayBuffer());
|
| 136 |
+
}
|
|
|
|
|
|
|
|
|
|
| 137 |
|
| 138 |
+
public transformImage(hash: string, params: Partial<ImageModificationParams>): void {
|
| 139 |
+
this.sendJsonMessage({ hash, params });
|
| 140 |
+
}
|
| 141 |
+
|
| 142 |
+
private sendBlobMessage(buffer: ArrayBuffer): void {
|
| 143 |
+
if (!this.ws || this.ws.readyState !== WebSocketState.OPEN) {
|
| 144 |
+
const error = new Error('WebSocket connection is not open');
|
| 145 |
+
console.error('[FacePoke] Error sending JSON message:', error);
|
| 146 |
+
this.emitEvent('sendJsonMessageError', error);
|
| 147 |
+
throw error;
|
| 148 |
+
}
|
| 149 |
+
try {
|
| 150 |
+
this.ws.send(buffer);
|
| 151 |
+
} catch (err) {
|
| 152 |
+
console.error(`failed to send the WebSocket message: ${err}`)
|
| 153 |
}
|
| 154 |
+
}
|
| 155 |
|
| 156 |
/**
|
| 157 |
* Sends a JSON message through the WebSocket connection with request tracking.
|
|
|
|
| 165 |
this.emitEvent('sendJsonMessageError', error);
|
| 166 |
throw error;
|
| 167 |
}
|
| 168 |
+
try {
|
| 169 |
+
this.ws.send(JSON.stringify(message));
|
| 170 |
+
} catch (err) {
|
| 171 |
+
console.error(`failed to send the WebSocket message: ${err}`)
|
| 172 |
+
}
|
| 173 |
}
|
| 174 |
|
| 175 |
/**
|
client/src/lib/mapRange.ts
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
// Function to map a value from one range to another
|
| 2 |
+
export const mapRange = (value: number, inMin: number, inMax: number, outMin: number, outMax: number): number => {
|
| 3 |
+
return Math.min(outMax, Math.max(outMin, ((value - inMin) * (outMax - outMin)) / (inMax - inMin) + outMin));
|
| 4 |
+
};
|
client/src/types.ts
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import * as vision from '@mediapipe/tasks-vision';
|
| 2 |
+
|
| 3 |
+
/**
|
| 4 |
+
* Represents the parameters for image modification.
|
| 5 |
+
*/
|
| 6 |
+
export interface ImageModificationParams {
|
| 7 |
+
eyes: number;
|
| 8 |
+
eyebrow: number;
|
| 9 |
+
wink: number;
|
| 10 |
+
pupil_x: number;
|
| 11 |
+
pupil_y: number;
|
| 12 |
+
aaa: number;
|
| 13 |
+
eee: number;
|
| 14 |
+
woo: number;
|
| 15 |
+
smile: number;
|
| 16 |
+
rotate_pitch: number;
|
| 17 |
+
rotate_yaw: number;
|
| 18 |
+
rotate_roll: number;
|
| 19 |
+
}
|
| 20 |
+
|
| 21 |
+
export interface Metadata {
|
| 22 |
+
center: number[] //center - 2x1
|
| 23 |
+
size: number // size - scalar
|
| 24 |
+
bbox: number[][] // bbox - 4x2
|
| 25 |
+
angle: number //angle - rad, counterclockwise
|
| 26 |
+
}
|
| 27 |
+
|
| 28 |
+
/**
|
| 29 |
+
* Represents a message to modify an image.
|
| 30 |
+
*/
|
| 31 |
+
export interface ModifyImageMessage {
|
| 32 |
+
image?: string;
|
| 33 |
+
hash?: string;
|
| 34 |
+
params: Partial<ImageModificationParams>;
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
export type OnServerResponseParams = {
|
| 38 |
+
image?: Blob
|
| 39 |
+
error?: string
|
| 40 |
+
loaded?: {
|
| 41 |
+
h: string
|
| 42 |
+
} & {
|
| 43 |
+
c: number[] //center - 2x1
|
| 44 |
+
s: number // size - scalar
|
| 45 |
+
b: number[][] // bbox - 4x2
|
| 46 |
+
a: number // angle - rad, counterclockwise
|
| 47 |
+
}
|
| 48 |
+
}
|
| 49 |
+
|
| 50 |
+
/**
|
| 51 |
+
* Callback type for handling modified images.
|
| 52 |
+
*/
|
| 53 |
+
export type OnServerResponse = (params: OnServerResponseParams) => Promise<void>;
|
| 54 |
+
|
| 55 |
+
/**
|
| 56 |
+
* Enum representing the different states of a WebSocket connection.
|
| 57 |
+
*/
|
| 58 |
+
export enum WebSocketState {
|
| 59 |
+
CONNECTING = 0,
|
| 60 |
+
OPEN = 1,
|
| 61 |
+
CLOSING = 2,
|
| 62 |
+
CLOSED = 3
|
| 63 |
+
}
|
| 64 |
+
|
| 65 |
+
export type ActionMode = 'HOVERING' | 'PRIMARY' | 'SECONDARY'
|
| 66 |
+
export type LandmarkGroup = 'lips' | 'leftEye' | 'leftEyebrow' | 'rightEye' | 'rightEyebrow' | 'faceOval' | 'background';
|
| 67 |
+
export type LandmarkCenter = { x: number; y: number; z: number };
|
| 68 |
+
export type ClosestLandmark = { group: LandmarkGroup; distance: number; vector: { x: number; y: number; z: number } };
|
| 69 |
+
|
| 70 |
+
export type MediaPipeResources = {
|
| 71 |
+
faceLandmarker: vision.FaceLandmarker | null;
|
| 72 |
+
drawingUtils: vision.DrawingUtils | null;
|
| 73 |
+
};
|
| 74 |
+
|
| 75 |
+
export interface ImageStateValues {
|
| 76 |
+
status: string
|
| 77 |
+
error: string
|
| 78 |
+
imageFile: File | null
|
| 79 |
+
isFollowingCursor: boolean
|
| 80 |
+
isGazingAtCursor: boolean
|
| 81 |
+
originalImage: string
|
| 82 |
+
previewImage: string
|
| 83 |
+
originalImageHash: string
|
| 84 |
+
minLatency: number
|
| 85 |
+
averageLatency: number
|
| 86 |
+
maxLatency: number
|
| 87 |
+
activeLandmark?: ClosestLandmark
|
| 88 |
+
metadata: Metadata
|
| 89 |
+
params: Partial<ImageModificationParams>
|
| 90 |
+
faceLandmarks: vision.NormalizedLandmark[][]
|
| 91 |
+
blendShapes: vision.Classifications[]
|
| 92 |
+
}
|
engine.py
CHANGED
|
@@ -3,6 +3,7 @@ import hashlib
|
|
| 3 |
import os
|
| 4 |
import io
|
| 5 |
import asyncio
|
|
|
|
| 6 |
import base64
|
| 7 |
from queue import Queue
|
| 8 |
from typing import Dict, Any, List, Optional, Union
|
|
@@ -15,7 +16,7 @@ from PIL import Image
|
|
| 15 |
from liveportrait.config.argument_config import ArgumentConfig
|
| 16 |
from liveportrait.utils.camera import get_rotation_matrix
|
| 17 |
from liveportrait.utils.io import resize_to_limit
|
| 18 |
-
from liveportrait.utils.crop import prepare_paste_back, paste_back
|
| 19 |
|
| 20 |
# Configure logging
|
| 21 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
@@ -56,12 +57,11 @@ class Engine:
|
|
| 56 |
|
| 57 |
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 58 |
|
| 59 |
-
self.image_cache = {} # Stores the original images
|
| 60 |
self.processed_cache = {} # Stores the processed image data
|
| 61 |
|
| 62 |
logger.info("β
FacePoke Engine initialized successfully.")
|
| 63 |
|
| 64 |
-
def get_image_hash(self, image: Union[Image.Image, str, bytes]) -> str:
|
| 65 |
"""
|
| 66 |
Compute or retrieve the hash for an image.
|
| 67 |
|
|
@@ -86,35 +86,23 @@ class Engine:
|
|
| 86 |
else:
|
| 87 |
raise ValueError("Unsupported image type")
|
| 88 |
|
| 89 |
-
@
|
| 90 |
-
def
|
| 91 |
-
|
| 92 |
-
Process the input image and cache the results.
|
| 93 |
-
|
| 94 |
-
Args:
|
| 95 |
-
image_hash (str): Hash of the input image.
|
| 96 |
-
|
| 97 |
-
Returns:
|
| 98 |
-
Dict[str, Any]: Processed image data.
|
| 99 |
-
"""
|
| 100 |
-
# let's hide the logs as there are thousands of message slike this
|
| 101 |
-
#logger.info(f"Processing image with hash: {image_hash}")
|
| 102 |
|
| 103 |
-
|
| 104 |
-
raise ValueError(f"Image with hash {image_hash} not found in cache")
|
| 105 |
|
| 106 |
-
image = self.image_cache[image_hash]
|
| 107 |
img_rgb = np.array(image)
|
| 108 |
|
| 109 |
inference_cfg = self.live_portrait.live_portrait_wrapper.cfg
|
| 110 |
-
img_rgb = resize_to_limit
|
| 111 |
-
crop_info = self.live_portrait.cropper.crop_single_image
|
| 112 |
img_crop_256x256 = crop_info['img_crop_256x256']
|
| 113 |
|
| 114 |
-
I_s = self.live_portrait.live_portrait_wrapper.prepare_source
|
| 115 |
-
x_s_info = self.live_portrait.live_portrait_wrapper.get_kp_info
|
| 116 |
-
f_s = self.live_portrait.live_portrait_wrapper.extract_feature_3d
|
| 117 |
-
x_s = self.live_portrait.live_portrait_wrapper.transform_keypoint
|
| 118 |
|
| 119 |
processed_data = {
|
| 120 |
'img_rgb': img_rgb,
|
|
@@ -127,50 +115,78 @@ class Engine:
|
|
| 127 |
|
| 128 |
self.processed_cache[image_hash] = processed_data
|
| 129 |
|
| 130 |
-
|
|
|
|
| 131 |
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
Modify the input image based on the provided parameters, using caching for efficiency
|
| 135 |
-
and outputting the result as a WebP image.
|
| 136 |
|
| 137 |
-
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
|
|
|
|
|
|
|
|
|
| 141 |
|
| 142 |
-
|
| 143 |
-
|
|
|
|
|
|
|
| 144 |
|
| 145 |
-
|
| 146 |
-
ValueError: If there's an error modifying the image or WebP is not supported.
|
| 147 |
-
"""
|
| 148 |
-
# let's disable those logs completely as there are thousands of message slike this
|
| 149 |
-
#logger.info("Starting image modification")
|
| 150 |
-
#logger.debug(f"Modification parameters: {params}")
|
| 151 |
|
| 152 |
try:
|
| 153 |
-
image_hash = self.get_image_hash(image_or_hash)
|
| 154 |
-
|
| 155 |
-
# If we don't have the image in cache yet, add it
|
| 156 |
-
if image_hash not in self.image_cache:
|
| 157 |
-
if isinstance(image_or_hash, (Image.Image, bytes)):
|
| 158 |
-
self.image_cache[image_hash] = image_or_hash
|
| 159 |
-
elif isinstance(image_or_hash, str) and len(image_or_hash) != 32:
|
| 160 |
-
# It's a base64 string, not a hash
|
| 161 |
-
self.image_cache[image_hash] = base64_data_uri_to_PIL_Image(image_or_hash)
|
| 162 |
-
else:
|
| 163 |
-
raise ValueError("Image not found in cache and no valid image provided")
|
| 164 |
-
|
| 165 |
-
# Process the image (this will use the cache if available)
|
| 166 |
-
if image_hash not in self.processed_cache:
|
| 167 |
-
processed_data = await asyncio.to_thread(self._process_image, image_hash)
|
| 168 |
-
else:
|
| 169 |
-
processed_data = self.processed_cache[image_hash]
|
| 170 |
-
|
| 171 |
# Apply modifications based on params
|
| 172 |
x_d_new = processed_data['x_s_info']['kp'].clone()
|
| 173 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
|
| 175 |
# Apply rotation
|
| 176 |
R_new = get_rotation_matrix(
|
|
@@ -181,90 +197,40 @@ class Engine:
|
|
| 181 |
x_d_new = processed_data['x_s_info']['scale'] * (x_d_new @ R_new) + processed_data['x_s_info']['t']
|
| 182 |
|
| 183 |
# Apply stitching
|
| 184 |
-
x_d_new =
|
| 185 |
|
| 186 |
# Generate the output
|
| 187 |
-
out =
|
| 188 |
-
I_p = self.live_portrait.live_portrait_wrapper.parse_output(out['out'])
|
|
|
|
|
|
|
| 189 |
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 193 |
processed_data['inference_cfg'].mask_crop, processed_data['crop_info']['M_c2o'],
|
| 194 |
dsize=(processed_data['img_rgb'].shape[1], processed_data['img_rgb'].shape[0])
|
| 195 |
)
|
| 196 |
-
I_p_to_ori_blend =
|
| 197 |
-
|
| 198 |
-
I_p, processed_data['crop_info']['M_c2o'], processed_data['img_rgb'], mask_ori
|
| 199 |
)
|
| 200 |
-
|
| 201 |
-
# Convert the result to a PIL Image
|
| 202 |
result_image = Image.fromarray(I_p_to_ori_blend)
|
| 203 |
|
| 204 |
-
#
|
| 205 |
-
|
| 206 |
-
|
| 207 |
-
modified_image_base64 = base64.b64encode(buffered.getvalue()).decode('utf-8')
|
| 208 |
|
| 209 |
-
#
|
| 210 |
-
|
|
|
|
|
|
|
| 211 |
|
| 212 |
except Exception as e:
|
| 213 |
-
#logger.error(f"Error in modify_image: {str(e)}")
|
| 214 |
-
#logger.exception("Full traceback:")
|
| 215 |
raise ValueError(f"Failed to modify image: {str(e)}")
|
| 216 |
-
|
| 217 |
-
async def _apply_facial_modifications(self, x_d_new: torch.Tensor, params: Dict[str, float]) -> None:
|
| 218 |
-
"""
|
| 219 |
-
Apply facial modifications to the keypoints based on the provided parameters.
|
| 220 |
-
|
| 221 |
-
Args:
|
| 222 |
-
x_d_new (torch.Tensor): Tensor of facial keypoints to be modified.
|
| 223 |
-
params (Dict[str, float]): Parameters for face transformation.
|
| 224 |
-
"""
|
| 225 |
-
modifications = [
|
| 226 |
-
('smile', [
|
| 227 |
-
(0, 20, 1, -0.01), (0, 14, 1, -0.02), (0, 17, 1, 0.0065), (0, 17, 2, 0.003),
|
| 228 |
-
(0, 13, 1, -0.00275), (0, 16, 1, -0.00275), (0, 3, 1, -0.0035), (0, 7, 1, -0.0035)
|
| 229 |
-
]),
|
| 230 |
-
('aaa', [
|
| 231 |
-
(0, 19, 1, 0.001), (0, 19, 2, 0.0001), (0, 17, 1, -0.0001)
|
| 232 |
-
]),
|
| 233 |
-
('eee', [
|
| 234 |
-
(0, 20, 2, -0.001), (0, 20, 1, -0.001), (0, 14, 1, -0.001)
|
| 235 |
-
]),
|
| 236 |
-
('woo', [
|
| 237 |
-
(0, 14, 1, 0.001), (0, 3, 1, -0.0005), (0, 7, 1, -0.0005), (0, 17, 2, -0.0005)
|
| 238 |
-
]),
|
| 239 |
-
('wink', [
|
| 240 |
-
(0, 11, 1, 0.001), (0, 13, 1, -0.0003), (0, 17, 0, 0.0003),
|
| 241 |
-
(0, 17, 1, 0.0003), (0, 3, 1, -0.0003)
|
| 242 |
-
]),
|
| 243 |
-
('pupil_x', [
|
| 244 |
-
(0, 11, 0, 0.0007 if params.get('pupil_x', 0) > 0 else 0.001),
|
| 245 |
-
(0, 15, 0, 0.001 if params.get('pupil_x', 0) > 0 else 0.0007)
|
| 246 |
-
]),
|
| 247 |
-
('pupil_y', [
|
| 248 |
-
(0, 11, 1, -0.001), (0, 15, 1, -0.001)
|
| 249 |
-
]),
|
| 250 |
-
('eyes', [
|
| 251 |
-
(0, 11, 1, -0.001), (0, 13, 1, 0.0003), (0, 15, 1, -0.001), (0, 16, 1, 0.0003),
|
| 252 |
-
(0, 1, 1, -0.00025), (0, 2, 1, 0.00025)
|
| 253 |
-
]),
|
| 254 |
-
('eyebrow', [
|
| 255 |
-
(0, 1, 1, 0.001 if params.get('eyebrow', 0) > 0 else 0.0003),
|
| 256 |
-
(0, 2, 1, -0.001 if params.get('eyebrow', 0) > 0 else -0.0003),
|
| 257 |
-
(0, 1, 0, -0.001 if params.get('eyebrow', 0) <= 0 else 0),
|
| 258 |
-
(0, 2, 0, 0.001 if params.get('eyebrow', 0) <= 0 else 0)
|
| 259 |
-
])
|
| 260 |
-
]
|
| 261 |
-
|
| 262 |
-
for param_name, adjustments in modifications:
|
| 263 |
-
param_value = params.get(param_name, 0)
|
| 264 |
-
for i, j, k, factor in adjustments:
|
| 265 |
-
x_d_new[i, j, k] += param_value * factor
|
| 266 |
-
|
| 267 |
-
# Special case for pupil_y affecting eyes
|
| 268 |
-
x_d_new[0, 11, 1] -= params.get('pupil_y', 0) * 0.001
|
| 269 |
-
x_d_new[0, 15, 1] -= params.get('pupil_y', 0) * 0.001
|
| 270 |
-
params['eyes'] = params.get('eyes', 0) - params.get('pupil_y', 0) / 2.
|
|
|
|
| 3 |
import os
|
| 4 |
import io
|
| 5 |
import asyncio
|
| 6 |
+
from async_lru import alru_cache
|
| 7 |
import base64
|
| 8 |
from queue import Queue
|
| 9 |
from typing import Dict, Any, List, Optional, Union
|
|
|
|
| 16 |
from liveportrait.config.argument_config import ArgumentConfig
|
| 17 |
from liveportrait.utils.camera import get_rotation_matrix
|
| 18 |
from liveportrait.utils.io import resize_to_limit
|
| 19 |
+
from liveportrait.utils.crop import prepare_paste_back, paste_back, parse_bbox_from_landmark
|
| 20 |
|
| 21 |
# Configure logging
|
| 22 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
|
|
| 57 |
|
| 58 |
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 59 |
|
|
|
|
| 60 |
self.processed_cache = {} # Stores the processed image data
|
| 61 |
|
| 62 |
logger.info("β
FacePoke Engine initialized successfully.")
|
| 63 |
|
| 64 |
+
async def get_image_hash(self, image: Union[Image.Image, str, bytes]) -> str:
|
| 65 |
"""
|
| 66 |
Compute or retrieve the hash for an image.
|
| 67 |
|
|
|
|
| 86 |
else:
|
| 87 |
raise ValueError("Unsupported image type")
|
| 88 |
|
| 89 |
+
@alru_cache(maxsize=512)
|
| 90 |
+
async def load_image(self, data):
|
| 91 |
+
image = Image.open(io.BytesIO(data))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 92 |
|
| 93 |
+
image_hash = await self.get_image_hash(image)
|
|
|
|
| 94 |
|
|
|
|
| 95 |
img_rgb = np.array(image)
|
| 96 |
|
| 97 |
inference_cfg = self.live_portrait.live_portrait_wrapper.cfg
|
| 98 |
+
img_rgb = await asyncio.to_thread(resize_to_limit, img_rgb, inference_cfg.ref_max_shape, inference_cfg.ref_shape_n)
|
| 99 |
+
crop_info = await asyncio.to_thread(self.live_portrait.cropper.crop_single_image, img_rgb)
|
| 100 |
img_crop_256x256 = crop_info['img_crop_256x256']
|
| 101 |
|
| 102 |
+
I_s = await asyncio.to_thread(self.live_portrait.live_portrait_wrapper.prepare_source, img_crop_256x256)
|
| 103 |
+
x_s_info = await asyncio.to_thread(self.live_portrait.live_portrait_wrapper.get_kp_info, I_s)
|
| 104 |
+
f_s = await asyncio.to_thread(self.live_portrait.live_portrait_wrapper.extract_feature_3d, I_s)
|
| 105 |
+
x_s = await asyncio.to_thread(self.live_portrait.live_portrait_wrapper.transform_keypoint, x_s_info)
|
| 106 |
|
| 107 |
processed_data = {
|
| 108 |
'img_rgb': img_rgb,
|
|
|
|
| 115 |
|
| 116 |
self.processed_cache[image_hash] = processed_data
|
| 117 |
|
| 118 |
+
# Calculate the bounding box
|
| 119 |
+
bbox_info = parse_bbox_from_landmark(processed_data['crop_info']['lmk_crop'], scale=1.0)
|
| 120 |
|
| 121 |
+
return {
|
| 122 |
+
'h': image_hash,
|
|
|
|
|
|
|
| 123 |
|
| 124 |
+
# those aren't easy to serialize
|
| 125 |
+
'c': bbox_info['center'], # 2x1
|
| 126 |
+
's': bbox_info['size'], # scalar
|
| 127 |
+
'b': bbox_info['bbox'], # 4x2
|
| 128 |
+
'a': bbox_info['angle'], # rad, counterclockwise
|
| 129 |
+
# 'bbox_rot': bbox_info['bbox_rot'].toList(), # 4x2
|
| 130 |
+
}
|
| 131 |
|
| 132 |
+
def transform_image(self, image_hash: str, params: Dict[str, float]) -> bytes:
|
| 133 |
+
# If we don't have the image in cache yet, add it
|
| 134 |
+
if image_hash not in self.processed_cache:
|
| 135 |
+
raise ValueError("cache miss")
|
| 136 |
|
| 137 |
+
processed_data = self.processed_cache[image_hash]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 138 |
|
| 139 |
try:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 140 |
# Apply modifications based on params
|
| 141 |
x_d_new = processed_data['x_s_info']['kp'].clone()
|
| 142 |
+
|
| 143 |
+
modifications = [
|
| 144 |
+
('smile', [
|
| 145 |
+
(0, 20, 1, -0.01), (0, 14, 1, -0.02), (0, 17, 1, 0.0065), (0, 17, 2, 0.003),
|
| 146 |
+
(0, 13, 1, -0.00275), (0, 16, 1, -0.00275), (0, 3, 1, -0.0035), (0, 7, 1, -0.0035)
|
| 147 |
+
]),
|
| 148 |
+
('aaa', [
|
| 149 |
+
(0, 19, 1, 0.001), (0, 19, 2, 0.0001), (0, 17, 1, -0.0001)
|
| 150 |
+
]),
|
| 151 |
+
('eee', [
|
| 152 |
+
(0, 20, 2, -0.001), (0, 20, 1, -0.001), (0, 14, 1, -0.001)
|
| 153 |
+
]),
|
| 154 |
+
('woo', [
|
| 155 |
+
(0, 14, 1, 0.001), (0, 3, 1, -0.0005), (0, 7, 1, -0.0005), (0, 17, 2, -0.0005)
|
| 156 |
+
]),
|
| 157 |
+
('wink', [
|
| 158 |
+
(0, 11, 1, 0.001), (0, 13, 1, -0.0003), (0, 17, 0, 0.0003),
|
| 159 |
+
(0, 17, 1, 0.0003), (0, 3, 1, -0.0003)
|
| 160 |
+
]),
|
| 161 |
+
('pupil_x', [
|
| 162 |
+
(0, 11, 0, 0.0007 if params.get('pupil_x', 0) > 0 else 0.001),
|
| 163 |
+
(0, 15, 0, 0.001 if params.get('pupil_x', 0) > 0 else 0.0007)
|
| 164 |
+
]),
|
| 165 |
+
('pupil_y', [
|
| 166 |
+
(0, 11, 1, -0.001), (0, 15, 1, -0.001)
|
| 167 |
+
]),
|
| 168 |
+
('eyes', [
|
| 169 |
+
(0, 11, 1, -0.001), (0, 13, 1, 0.0003), (0, 15, 1, -0.001), (0, 16, 1, 0.0003),
|
| 170 |
+
(0, 1, 1, -0.00025), (0, 2, 1, 0.00025)
|
| 171 |
+
]),
|
| 172 |
+
('eyebrow', [
|
| 173 |
+
(0, 1, 1, 0.001 if params.get('eyebrow', 0) > 0 else 0.0003),
|
| 174 |
+
(0, 2, 1, -0.001 if params.get('eyebrow', 0) > 0 else -0.0003),
|
| 175 |
+
(0, 1, 0, -0.001 if params.get('eyebrow', 0) <= 0 else 0),
|
| 176 |
+
(0, 2, 0, 0.001 if params.get('eyebrow', 0) <= 0 else 0)
|
| 177 |
+
])
|
| 178 |
+
]
|
| 179 |
+
|
| 180 |
+
for param_name, adjustments in modifications:
|
| 181 |
+
param_value = params.get(param_name, 0)
|
| 182 |
+
for i, j, k, factor in adjustments:
|
| 183 |
+
x_d_new[i, j, k] += param_value * factor
|
| 184 |
+
|
| 185 |
+
# Special case for pupil_y affecting eyes
|
| 186 |
+
x_d_new[0, 11, 1] -= params.get('pupil_y', 0) * 0.001
|
| 187 |
+
x_d_new[0, 15, 1] -= params.get('pupil_y', 0) * 0.001
|
| 188 |
+
params['eyes'] = params.get('eyes', 0) - params.get('pupil_y', 0) / 2.
|
| 189 |
+
|
| 190 |
|
| 191 |
# Apply rotation
|
| 192 |
R_new = get_rotation_matrix(
|
|
|
|
| 197 |
x_d_new = processed_data['x_s_info']['scale'] * (x_d_new @ R_new) + processed_data['x_s_info']['t']
|
| 198 |
|
| 199 |
# Apply stitching
|
| 200 |
+
x_d_new = self.live_portrait.live_portrait_wrapper.stitching(processed_data['x_s'], x_d_new)
|
| 201 |
|
| 202 |
# Generate the output
|
| 203 |
+
out = self.live_portrait.live_portrait_wrapper.warp_decode(processed_data['f_s'], processed_data['x_s'], x_d_new)
|
| 204 |
+
I_p = self.live_portrait.live_portrait_wrapper.parse_output(out['out'])
|
| 205 |
+
|
| 206 |
+
buffered = io.BytesIO()
|
| 207 |
|
| 208 |
+
####################################################
|
| 209 |
+
# this part is about stitching the image back into the original.
|
| 210 |
+
#
|
| 211 |
+
# this is an expensive operation, not just because of the compute
|
| 212 |
+
# but because the payload will also be bigger (we send back the whole pic)
|
| 213 |
+
#
|
| 214 |
+
# I'm currently running some experiments to do it in the frontend
|
| 215 |
+
#
|
| 216 |
+
# --- old way: we do it in the server-side: ---
|
| 217 |
+
mask_ori = prepare_paste_back(
|
| 218 |
processed_data['inference_cfg'].mask_crop, processed_data['crop_info']['M_c2o'],
|
| 219 |
dsize=(processed_data['img_rgb'].shape[1], processed_data['img_rgb'].shape[0])
|
| 220 |
)
|
| 221 |
+
I_p_to_ori_blend = paste_back(
|
| 222 |
+
I_p[0], processed_data['crop_info']['M_c2o'], processed_data['img_rgb'], mask_ori
|
|
|
|
| 223 |
)
|
|
|
|
|
|
|
| 224 |
result_image = Image.fromarray(I_p_to_ori_blend)
|
| 225 |
|
| 226 |
+
# --- maybe future way: do it in the frontend: ---
|
| 227 |
+
#result_image = Image.fromarray(I_p[0])
|
| 228 |
+
####################################################
|
|
|
|
| 229 |
|
| 230 |
+
# write it into a webp
|
| 231 |
+
result_image.save(buffered, format="WebP", quality=82, lossless=False, method=6)
|
| 232 |
+
|
| 233 |
+
return buffered.getvalue()
|
| 234 |
|
| 235 |
except Exception as e:
|
|
|
|
|
|
|
| 236 |
raise ValueError(f"Failed to modify image: {str(e)}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
liveportrait/utils/crop.py
CHANGED
|
@@ -409,4 +409,4 @@ def paste_back(image_to_processed, crop_M_c2o, rgb_ori, mask_ori):
|
|
| 409 |
dsize = (rgb_ori.shape[1], rgb_ori.shape[0])
|
| 410 |
result = _transform_img(image_to_processed, crop_M_c2o, dsize=dsize)
|
| 411 |
result = np.clip(mask_ori * result + (1 - mask_ori) * rgb_ori, 0, 255).astype(np.uint8)
|
| 412 |
-
return result
|
|
|
|
| 409 |
dsize = (rgb_ori.shape[1], rgb_ori.shape[0])
|
| 410 |
result = _transform_img(image_to_processed, crop_M_c2o, dsize=dsize)
|
| 411 |
result = np.clip(mask_ori * result + (1 - mask_ori) * rgb_ori, 0, 255).astype(np.uint8)
|
| 412 |
+
return result
|
loader.py
CHANGED
|
@@ -5,8 +5,6 @@ import asyncio
|
|
| 5 |
import aiohttp
|
| 6 |
import requests
|
| 7 |
from huggingface_hub import hf_hub_download
|
| 8 |
-
import sentencepiece
|
| 9 |
-
|
| 10 |
|
| 11 |
# Configure logging
|
| 12 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
|
|
@@ -24,6 +22,7 @@ HF_REPO_ID = "jbilcke-hf/model-cocktail"
|
|
| 24 |
MODEL_FILES = [
|
| 25 |
"dwpose/dw-ll_ucoco_384.pth",
|
| 26 |
"face-detector/s3fd-619a316812.pth",
|
|
|
|
| 27 |
"liveportrait/spade_generator.pth",
|
| 28 |
"liveportrait/warping_module.pth",
|
| 29 |
"liveportrait/motion_extractor.pth",
|
|
@@ -31,6 +30,16 @@ MODEL_FILES = [
|
|
| 31 |
"liveportrait/appearance_feature_extractor.pth",
|
| 32 |
"liveportrait/landmark.onnx",
|
| 33 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 34 |
# this is a hack, instead we should probably try to
|
| 35 |
# fix liveportrait/utils/dependencies/insightface/utils/storage.py
|
| 36 |
"insightface/models/buffalo_l.zip",
|
|
|
|
| 5 |
import aiohttp
|
| 6 |
import requests
|
| 7 |
from huggingface_hub import hf_hub_download
|
|
|
|
|
|
|
| 8 |
|
| 9 |
# Configure logging
|
| 10 |
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
|
|
|
|
| 22 |
MODEL_FILES = [
|
| 23 |
"dwpose/dw-ll_ucoco_384.pth",
|
| 24 |
"face-detector/s3fd-619a316812.pth",
|
| 25 |
+
|
| 26 |
"liveportrait/spade_generator.pth",
|
| 27 |
"liveportrait/warping_module.pth",
|
| 28 |
"liveportrait/motion_extractor.pth",
|
|
|
|
| 30 |
"liveportrait/appearance_feature_extractor.pth",
|
| 31 |
"liveportrait/landmark.onnx",
|
| 32 |
|
| 33 |
+
# For animal mode πΆπ±
|
| 34 |
+
# however they say animal mode doesn't support stitching yet?
|
| 35 |
+
# https://github.com/KwaiVGI/LivePortrait/blob/main/assets/docs/changelog/2024-08-02.md#updates-on-animals-mode
|
| 36 |
+
#"liveportrait-animals/warping_module.pth",
|
| 37 |
+
#"liveportrait-animals/spade_generator.pth",
|
| 38 |
+
#"liveportrait-animals/motion_extractor.pth",
|
| 39 |
+
#"liveportrait-animals/appearance_feature_extractor.pth",
|
| 40 |
+
#"liveportrait-animals/stitching_retargeting_module.pth",
|
| 41 |
+
#"liveportrait-animals/xpose.pth",
|
| 42 |
+
|
| 43 |
# this is a hack, instead we should probably try to
|
| 44 |
# fix liveportrait/utils/dependencies/insightface/utils/storage.py
|
| 45 |
"insightface/models/buffalo_l.zip",
|
public/index.js
CHANGED
|
@@ -29660,148 +29660,7 @@ Yh.prototype.detectForVideo = Yh.prototype.F, Yh.prototype.detect = Yh.prototype
|
|
| 29660 |
return Ka(Yh, t2, e2);
|
| 29661 |
}, Yh.POSE_CONNECTIONS = Eh;
|
| 29662 |
|
| 29663 |
-
//
|
| 29664 |
-
function unsafeStringify(arr, offset = 0) {
|
| 29665 |
-
return (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + "-" + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + "-" + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + "-" + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + "-" + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase();
|
| 29666 |
-
}
|
| 29667 |
-
var byteToHex = [];
|
| 29668 |
-
for (i2 = 0;i2 < 256; ++i2) {
|
| 29669 |
-
byteToHex.push((i2 + 256).toString(16).slice(1));
|
| 29670 |
-
}
|
| 29671 |
-
var i2;
|
| 29672 |
-
|
| 29673 |
-
// node_modules/uuid/dist/esm-browser/rng.js
|
| 29674 |
-
var getRandomValues;
|
| 29675 |
-
var rnds8 = new Uint8Array(16);
|
| 29676 |
-
function rng() {
|
| 29677 |
-
if (!getRandomValues) {
|
| 29678 |
-
getRandomValues = typeof crypto !== "undefined" && crypto.getRandomValues && crypto.getRandomValues.bind(crypto);
|
| 29679 |
-
if (!getRandomValues) {
|
| 29680 |
-
throw new Error("crypto.getRandomValues() not supported. See https://github.com/uuidjs/uuid#getrandomvalues-not-supported");
|
| 29681 |
-
}
|
| 29682 |
-
}
|
| 29683 |
-
return getRandomValues(rnds8);
|
| 29684 |
-
}
|
| 29685 |
-
|
| 29686 |
-
// node_modules/uuid/dist/esm-browser/native.js
|
| 29687 |
-
var randomUUID = typeof crypto !== "undefined" && crypto.randomUUID && crypto.randomUUID.bind(crypto);
|
| 29688 |
-
var native_default = {
|
| 29689 |
-
randomUUID
|
| 29690 |
-
};
|
| 29691 |
-
|
| 29692 |
-
// node_modules/uuid/dist/esm-browser/v4.js
|
| 29693 |
-
var v4 = function(options, buf, offset) {
|
| 29694 |
-
if (native_default.randomUUID && !buf && !options) {
|
| 29695 |
-
return native_default.randomUUID();
|
| 29696 |
-
}
|
| 29697 |
-
options = options || {};
|
| 29698 |
-
var rnds = options.random || (options.rng || rng)();
|
| 29699 |
-
rnds[6] = rnds[6] & 15 | 64;
|
| 29700 |
-
rnds[8] = rnds[8] & 63 | 128;
|
| 29701 |
-
if (buf) {
|
| 29702 |
-
offset = offset || 0;
|
| 29703 |
-
for (var i2 = 0;i2 < 16; ++i2) {
|
| 29704 |
-
buf[offset + i2] = rnds[i2];
|
| 29705 |
-
}
|
| 29706 |
-
return buf;
|
| 29707 |
-
}
|
| 29708 |
-
return unsafeStringify(rnds);
|
| 29709 |
-
};
|
| 29710 |
-
var v4_default = v4;
|
| 29711 |
-
// src/lib/circularBuffer.ts
|
| 29712 |
-
class CircularBuffer {
|
| 29713 |
-
capacity;
|
| 29714 |
-
buffer;
|
| 29715 |
-
pointer;
|
| 29716 |
-
constructor(capacity) {
|
| 29717 |
-
this.capacity = capacity;
|
| 29718 |
-
this.buffer = new Array(capacity);
|
| 29719 |
-
this.pointer = 0;
|
| 29720 |
-
}
|
| 29721 |
-
push(item) {
|
| 29722 |
-
this.buffer[this.pointer] = item;
|
| 29723 |
-
this.pointer = (this.pointer + 1) % this.capacity;
|
| 29724 |
-
}
|
| 29725 |
-
getAll() {
|
| 29726 |
-
return this.buffer.filter((item) => item !== undefined);
|
| 29727 |
-
}
|
| 29728 |
-
}
|
| 29729 |
-
|
| 29730 |
-
// node_modules/zustand/esm/vanilla.mjs
|
| 29731 |
-
var createStoreImpl = (createState) => {
|
| 29732 |
-
let state;
|
| 29733 |
-
const listeners = new Set;
|
| 29734 |
-
const setState = (partial, replace) => {
|
| 29735 |
-
const nextState = typeof partial === "function" ? partial(state) : partial;
|
| 29736 |
-
if (!Object.is(nextState, state)) {
|
| 29737 |
-
const previousState = state;
|
| 29738 |
-
state = (replace != null ? replace : typeof nextState !== "object" || nextState === null) ? nextState : Object.assign({}, state, nextState);
|
| 29739 |
-
listeners.forEach((listener) => listener(state, previousState));
|
| 29740 |
-
}
|
| 29741 |
-
};
|
| 29742 |
-
const getState = () => state;
|
| 29743 |
-
const getInitialState = () => initialState;
|
| 29744 |
-
const subscribe = (listener) => {
|
| 29745 |
-
listeners.add(listener);
|
| 29746 |
-
return () => listeners.delete(listener);
|
| 29747 |
-
};
|
| 29748 |
-
const api = { setState, getState, getInitialState, subscribe };
|
| 29749 |
-
const initialState = state = createState(setState, getState, api);
|
| 29750 |
-
return api;
|
| 29751 |
-
};
|
| 29752 |
-
var createStore = (createState) => createState ? createStoreImpl(createState) : createStoreImpl;
|
| 29753 |
-
|
| 29754 |
-
// node_modules/zustand/esm/react.mjs
|
| 29755 |
-
var import_react3 = __toESM(require_react(), 1);
|
| 29756 |
-
var useStore = function(api, selector = identity) {
|
| 29757 |
-
const slice = import_react3.default.useSyncExternalStore(api.subscribe, () => selector(api.getState()), () => selector(api.getInitialState()));
|
| 29758 |
-
import_react3.default.useDebugValue(slice);
|
| 29759 |
-
return slice;
|
| 29760 |
-
};
|
| 29761 |
-
var identity = (arg) => arg;
|
| 29762 |
-
var createImpl = (createState) => {
|
| 29763 |
-
const api = createStore(createState);
|
| 29764 |
-
const useBoundStore = (selector) => useStore(api, selector);
|
| 29765 |
-
Object.assign(useBoundStore, api);
|
| 29766 |
-
return useBoundStore;
|
| 29767 |
-
};
|
| 29768 |
-
var create = (createState) => createState ? createImpl(createState) : createImpl;
|
| 29769 |
-
|
| 29770 |
-
// src/hooks/useMainStore.ts
|
| 29771 |
-
var useMainStore = create((set, get) => ({
|
| 29772 |
-
error: "",
|
| 29773 |
-
imageFile: null,
|
| 29774 |
-
originalImage: "",
|
| 29775 |
-
originalImageHash: "",
|
| 29776 |
-
previewImage: "",
|
| 29777 |
-
minLatency: 20,
|
| 29778 |
-
averageLatency: 190,
|
| 29779 |
-
maxLatency: 4000,
|
| 29780 |
-
activeLandmark: undefined,
|
| 29781 |
-
params: {},
|
| 29782 |
-
setError: (error = "") => set({ error }),
|
| 29783 |
-
setImageFile: (file) => set({ imageFile: file }),
|
| 29784 |
-
setOriginalImage: (url) => set({ originalImage: url }),
|
| 29785 |
-
setOriginalImageHash: (originalImageHash) => set({ originalImageHash }),
|
| 29786 |
-
setPreviewImage: (url) => set({ previewImage: url }),
|
| 29787 |
-
resetImage: () => {
|
| 29788 |
-
const { originalImage } = get();
|
| 29789 |
-
if (originalImage) {
|
| 29790 |
-
set({ previewImage: originalImage });
|
| 29791 |
-
}
|
| 29792 |
-
},
|
| 29793 |
-
setAverageLatency: (averageLatency) => set({ averageLatency }),
|
| 29794 |
-
setActiveLandmark: (activeLandmark) => set({ activeLandmark }),
|
| 29795 |
-
setParams: (params) => {
|
| 29796 |
-
const { params: previousParams } = get();
|
| 29797 |
-
set({ params: {
|
| 29798 |
-
...previousParams,
|
| 29799 |
-
...params
|
| 29800 |
-
} });
|
| 29801 |
-
}
|
| 29802 |
-
}));
|
| 29803 |
-
|
| 29804 |
-
// src/lib/facePoke.ts
|
| 29805 |
var WebSocketState;
|
| 29806 |
(function(WebSocketState2) {
|
| 29807 |
WebSocketState2[WebSocketState2["CONNECTING"] = 0] = "CONNECTING";
|
|
@@ -29810,63 +29669,24 @@ var WebSocketState;
|
|
| 29810 |
WebSocketState2[WebSocketState2["CLOSED"] = 3] = "CLOSED";
|
| 29811 |
})(WebSocketState || (WebSocketState = {}));
|
| 29812 |
|
|
|
|
| 29813 |
class FacePoke {
|
| 29814 |
ws = null;
|
| 29815 |
-
connectionId = v4_default();
|
| 29816 |
isUnloading = false;
|
| 29817 |
-
|
| 29818 |
};
|
| 29819 |
reconnectAttempts = 0;
|
| 29820 |
maxReconnectAttempts = 5;
|
| 29821 |
reconnectDelay = 5000;
|
| 29822 |
eventListeners = new Map;
|
| 29823 |
-
requestTracker = new Map;
|
| 29824 |
-
responseTimeBuffer;
|
| 29825 |
-
MAX_TRACKED_TIMES = 5;
|
| 29826 |
constructor() {
|
| 29827 |
-
console.log(`[FacePoke] Initializing FacePoke instance
|
| 29828 |
this.initializeWebSocket();
|
| 29829 |
this.setupUnloadHandler();
|
| 29830 |
-
this.responseTimeBuffer = new CircularBuffer(this.MAX_TRACKED_TIMES);
|
| 29831 |
-
console.log(`[FacePoke] Initialized response time tracker with capacity: ${this.MAX_TRACKED_TIMES}`);
|
| 29832 |
-
}
|
| 29833 |
-
trackRequest() {
|
| 29834 |
-
const uuid = v4_default();
|
| 29835 |
-
this.requestTracker.set(uuid, { uuid, timestamp: Date.now() });
|
| 29836 |
-
return uuid;
|
| 29837 |
-
}
|
| 29838 |
-
completeRequest(uuid) {
|
| 29839 |
-
const request = this.requestTracker.get(uuid);
|
| 29840 |
-
if (request) {
|
| 29841 |
-
const responseTime = Date.now() - request.timestamp;
|
| 29842 |
-
this.responseTimeBuffer.push(responseTime);
|
| 29843 |
-
this.requestTracker.delete(uuid);
|
| 29844 |
-
this.updateThrottleTime();
|
| 29845 |
-
console.log(`[FacePoke] Completed request ${uuid}. Response time: ${responseTime}ms`);
|
| 29846 |
-
} else {
|
| 29847 |
-
console.warn(`[FacePoke] Attempted to complete unknown request: ${uuid}`);
|
| 29848 |
-
}
|
| 29849 |
-
}
|
| 29850 |
-
calculateAverageResponseTime() {
|
| 29851 |
-
const times = this.responseTimeBuffer.getAll();
|
| 29852 |
-
const averageLatency = useMainStore.getState().averageLatency;
|
| 29853 |
-
if (times.length === 0)
|
| 29854 |
-
return averageLatency;
|
| 29855 |
-
const sum = times.reduce((acc, time) => acc + time, 0);
|
| 29856 |
-
return sum / times.length;
|
| 29857 |
-
}
|
| 29858 |
-
updateThrottleTime() {
|
| 29859 |
-
const { minLatency, maxLatency, averageLatency, setAverageLatency } = useMainStore.getState();
|
| 29860 |
-
const avgResponseTime = this.calculateAverageResponseTime();
|
| 29861 |
-
const newLatency = Math.min(minLatency, Math.max(minLatency, avgResponseTime));
|
| 29862 |
-
if (newLatency !== averageLatency) {
|
| 29863 |
-
setAverageLatency(newLatency);
|
| 29864 |
-
console.log(`[FacePoke] Updated throttle time (latency is ${newLatency}ms)`);
|
| 29865 |
-
}
|
| 29866 |
}
|
| 29867 |
-
|
| 29868 |
-
this.
|
| 29869 |
-
console.log(`[FacePoke]
|
| 29870 |
}
|
| 29871 |
async startWebSocket() {
|
| 29872 |
console.log(`[FacePoke] Starting WebSocket connection.`);
|
|
@@ -29875,56 +29695,47 @@ class FacePoke {
|
|
| 29875 |
}
|
| 29876 |
}
|
| 29877 |
async initializeWebSocket() {
|
| 29878 |
-
console.log(`[FacePoke]
|
| 29879 |
const connect = () => {
|
| 29880 |
this.ws = new WebSocket(`wss://${window.location.host}/ws`);
|
| 29881 |
this.ws.onopen = this.handleWebSocketOpen.bind(this);
|
| 29882 |
-
this.ws.onmessage = this.handleWebSocketMessage.bind(this);
|
| 29883 |
this.ws.onclose = this.handleWebSocketClose.bind(this);
|
| 29884 |
this.ws.onerror = this.handleWebSocketError.bind(this);
|
|
|
|
| 29885 |
};
|
| 29886 |
connect();
|
| 29887 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29888 |
handleWebSocketOpen() {
|
| 29889 |
-
console.log(`[FacePoke]
|
| 29890 |
this.reconnectAttempts = 0;
|
| 29891 |
this.emitEvent("websocketOpen");
|
| 29892 |
}
|
| 29893 |
-
handleWebSocketMessage(event) {
|
| 29894 |
-
try {
|
| 29895 |
-
const data = JSON.parse(event.data);
|
| 29896 |
-
if (data.uuid) {
|
| 29897 |
-
this.completeRequest(data.uuid);
|
| 29898 |
-
}
|
| 29899 |
-
if (data.type === "modified_image") {
|
| 29900 |
-
if (data?.image) {
|
| 29901 |
-
this.onModifiedImage(data.image, data.image_hash);
|
| 29902 |
-
}
|
| 29903 |
-
}
|
| 29904 |
-
this.emitEvent("message", data);
|
| 29905 |
-
} catch (error) {
|
| 29906 |
-
console.error(`[FacePoke][${this.connectionId}] Error parsing WebSocket message:`, error);
|
| 29907 |
-
}
|
| 29908 |
-
}
|
| 29909 |
handleWebSocketClose(event) {
|
| 29910 |
if (event.wasClean) {
|
| 29911 |
-
console.log(`[FacePoke]
|
| 29912 |
} else {
|
| 29913 |
-
console.warn(`[FacePoke]
|
| 29914 |
}
|
| 29915 |
this.emitEvent("websocketClose", event);
|
| 29916 |
if (!this.isUnloading && this.reconnectAttempts < this.maxReconnectAttempts) {
|
| 29917 |
this.reconnectAttempts++;
|
| 29918 |
const delay = Math.min(1000 * 2 ** this.reconnectAttempts, 30000);
|
| 29919 |
-
console.log(`[FacePoke]
|
| 29920 |
setTimeout(() => this.initializeWebSocket(), delay);
|
| 29921 |
} else if (this.reconnectAttempts >= this.maxReconnectAttempts) {
|
| 29922 |
-
console.error(`[FacePoke]
|
| 29923 |
this.emitEvent("maxReconnectAttemptsReached");
|
| 29924 |
}
|
| 29925 |
}
|
| 29926 |
handleWebSocketError(error) {
|
| 29927 |
-
console.error(`[FacePoke]
|
| 29928 |
this.emitEvent("websocketError", error);
|
| 29929 |
}
|
| 29930 |
cleanup() {
|
|
@@ -29937,22 +29748,26 @@ class FacePoke {
|
|
| 29937 |
console.log("[FacePoke] Cleanup completed");
|
| 29938 |
this.emitEvent("cleanup");
|
| 29939 |
}
|
| 29940 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29941 |
try {
|
| 29942 |
-
|
| 29943 |
-
type: "modify_image",
|
| 29944 |
-
params
|
| 29945 |
-
};
|
| 29946 |
-
if (image) {
|
| 29947 |
-
message.image = image;
|
| 29948 |
-
} else if (imageHash) {
|
| 29949 |
-
message.image_hash = imageHash;
|
| 29950 |
-
} else {
|
| 29951 |
-
throw new Error("Either image or imageHash must be provided");
|
| 29952 |
-
}
|
| 29953 |
-
this.sendJsonMessage(message);
|
| 29954 |
} catch (err) {
|
| 29955 |
-
console.error(`
|
| 29956 |
}
|
| 29957 |
}
|
| 29958 |
sendJsonMessage(message) {
|
|
@@ -29962,9 +29777,11 @@ class FacePoke {
|
|
| 29962 |
this.emitEvent("sendJsonMessageError", error);
|
| 29963 |
throw error;
|
| 29964 |
}
|
| 29965 |
-
|
| 29966 |
-
|
| 29967 |
-
|
|
|
|
|
|
|
| 29968 |
}
|
| 29969 |
setupUnloadHandler() {
|
| 29970 |
window.addEventListener("beforeunload", () => {
|
|
@@ -30000,6 +29817,337 @@ class FacePoke {
|
|
| 30000 |
}
|
| 30001 |
var facePoke = new FacePoke;
|
| 30002 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30003 |
// node_modules/beautiful-react-hooks/esm/useThrottledCallback.js
|
| 30004 |
var import_react6 = __toESM(require_react(), 1);
|
| 30005 |
var import_lodash = __toESM(require_lodash(), 1);
|
|
@@ -32818,34 +32966,20 @@ var FACEMESH_TESSELATION = Object.freeze(new Set([
|
|
| 32818 |
|
| 32819 |
// src/hooks/useFaceLandmarkDetection.tsx
|
| 32820 |
function useFaceLandmarkDetection() {
|
| 32821 |
-
const error = useMainStore((s2) => s2.error);
|
| 32822 |
const setError = useMainStore((s2) => s2.setError);
|
| 32823 |
-
const imageFile = useMainStore((s2) => s2.imageFile);
|
| 32824 |
-
const setImageFile = useMainStore((s2) => s2.setImageFile);
|
| 32825 |
-
const originalImage = useMainStore((s2) => s2.originalImage);
|
| 32826 |
-
const originalImageHash = useMainStore((s2) => s2.originalImageHash);
|
| 32827 |
-
const setOriginalImageHash = useMainStore((s2) => s2.setOriginalImageHash);
|
| 32828 |
const previewImage = useMainStore((s2) => s2.previewImage);
|
| 32829 |
-
const
|
| 32830 |
-
const
|
| 32831 |
-
|
| 32832 |
-
const averageLatency = 220;
|
| 32833 |
-
const [faceLandmarks, setFaceLandmarks] = import_react7.useState([]);
|
| 32834 |
const [isMediaPipeReady, setIsMediaPipeReady] = import_react7.useState(false);
|
| 32835 |
const [isDrawingUtilsReady, setIsDrawingUtilsReady] = import_react7.useState(false);
|
| 32836 |
-
const [blendShapes, setBlendShapes] = import_react7.useState([]);
|
| 32837 |
const [dragStart, setDragStart] = import_react7.useState(null);
|
| 32838 |
-
const [dragEnd, setDragEnd] = import_react7.useState(null);
|
| 32839 |
const [isDragging, setIsDragging] = import_react7.useState(false);
|
| 32840 |
-
const [isWaitingForResponse, setIsWaitingForResponse] = import_react7.useState(false);
|
| 32841 |
const dragStartRef = import_react7.useRef(null);
|
| 32842 |
-
const currentMousePosRef = import_react7.useRef(null);
|
| 32843 |
-
const lastModifiedImageHashRef = import_react7.useRef(null);
|
| 32844 |
const [currentLandmark, setCurrentLandmark] = import_react7.useState(null);
|
| 32845 |
const [previousLandmark, setPreviousLandmark] = import_react7.useState(null);
|
| 32846 |
const [currentOpacity, setCurrentOpacity] = import_react7.useState(0);
|
| 32847 |
const [previousOpacity, setPreviousOpacity] = import_react7.useState(0);
|
| 32848 |
-
const [isHovering, setIsHovering] = import_react7.useState(false);
|
| 32849 |
const canvasRef = import_react7.useRef(null);
|
| 32850 |
const mediaPipeRef = import_react7.useRef({
|
| 32851 |
faceLandmarker: null,
|
|
@@ -32882,8 +33016,8 @@ function useFaceLandmarkDetection() {
|
|
| 32882 |
} else {
|
| 32883 |
faceLandmarker.close();
|
| 32884 |
}
|
| 32885 |
-
} catch (
|
| 32886 |
-
console.error("Error during MediaPipe initialization:",
|
| 32887 |
setError("Failed to initialize face detection. Please try refreshing the page.");
|
| 32888 |
}
|
| 32889 |
};
|
|
@@ -32969,6 +33103,7 @@ function useFaceLandmarkDetection() {
|
|
| 32969 |
}
|
| 32970 |
}, [landmarkCenters]);
|
| 32971 |
const detectFaceLandmarks = import_react7.useCallback(async (imageDataUrl) => {
|
|
|
|
| 32972 |
if (!isMediaPipeReady) {
|
| 32973 |
console.log("MediaPipe not ready. Skipping detection.");
|
| 32974 |
return;
|
|
@@ -33068,200 +33203,137 @@ function useFaceLandmarkDetection() {
|
|
| 33068 |
}
|
| 33069 |
detectFaceLandmarks(previewImage);
|
| 33070 |
}, [isMediaPipeReady, isDrawingUtilsReady, previewImage]);
|
| 33071 |
-
const modifyImage = import_react7.useCallback(({ landmark, vector }) => {
|
| 33072 |
-
const {
|
| 33073 |
-
originalImage: originalImage2,
|
| 33074 |
-
originalImageHash: originalImageHash2,
|
| 33075 |
-
params: previousParams,
|
| 33076 |
-
setParams,
|
| 33077 |
-
setError: setError2
|
| 33078 |
-
} = useMainStore.getState();
|
| 33079 |
-
if (!originalImage2) {
|
| 33080 |
-
console.error("Image file or facePoke not available");
|
| 33081 |
-
return;
|
| 33082 |
-
}
|
| 33083 |
-
const params = {
|
| 33084 |
-
...previousParams
|
| 33085 |
-
};
|
| 33086 |
-
const minX = -0.5;
|
| 33087 |
-
const maxX = 0.5;
|
| 33088 |
-
const minY = -0.5;
|
| 33089 |
-
const maxY = 0.5;
|
| 33090 |
-
const mapRange = (value, inMin, inMax, outMin, outMax) => {
|
| 33091 |
-
return Math.min(outMax, Math.max(outMin, (value - inMin) * (outMax - outMin) / (inMax - inMin) + outMin));
|
| 33092 |
-
};
|
| 33093 |
-
console.log("modifyImage:", {
|
| 33094 |
-
originalImage: originalImage2,
|
| 33095 |
-
originalImageHash: originalImageHash2,
|
| 33096 |
-
landmark,
|
| 33097 |
-
vector,
|
| 33098 |
-
minX,
|
| 33099 |
-
maxX,
|
| 33100 |
-
minY,
|
| 33101 |
-
maxY
|
| 33102 |
-
});
|
| 33103 |
-
switch (landmark.group) {
|
| 33104 |
-
case "leftEye":
|
| 33105 |
-
case "rightEye":
|
| 33106 |
-
const eyesMin = -20;
|
| 33107 |
-
const eyesMax = 5;
|
| 33108 |
-
params.eyes = mapRange(-vector.y, minX, maxX, eyesMin, eyesMax);
|
| 33109 |
-
break;
|
| 33110 |
-
case "leftEyebrow":
|
| 33111 |
-
case "rightEyebrow":
|
| 33112 |
-
const eyebrowMin = -10;
|
| 33113 |
-
const eyebrowMax = 15;
|
| 33114 |
-
params.eyebrow = mapRange(-vector.y, minY, maxY, eyebrowMin, eyebrowMax);
|
| 33115 |
-
break;
|
| 33116 |
-
case "lips":
|
| 33117 |
-
const eeeMin = -20;
|
| 33118 |
-
const eeeMax = 15;
|
| 33119 |
-
params.eee = mapRange(-vector.y, minY, maxY, eeeMin, eeeMax);
|
| 33120 |
-
const wooMin = -20;
|
| 33121 |
-
const wooMax = 15;
|
| 33122 |
-
params.woo = mapRange(-vector.x, minX, maxX, wooMin, wooMax);
|
| 33123 |
-
break;
|
| 33124 |
-
case "faceOval":
|
| 33125 |
-
const rollMin = -40;
|
| 33126 |
-
const rollMax = 40;
|
| 33127 |
-
params.rotate_roll = mapRange(vector.x, minX, maxX, rollMin, rollMax);
|
| 33128 |
-
break;
|
| 33129 |
-
case "background":
|
| 33130 |
-
const yawMin = -40;
|
| 33131 |
-
const yawMax = 40;
|
| 33132 |
-
params.rotate_yaw = mapRange(-vector.x, minX, maxX, yawMin, yawMax);
|
| 33133 |
-
const pitchMin = -40;
|
| 33134 |
-
const pitchMax = 40;
|
| 33135 |
-
params.rotate_pitch = mapRange(vector.y, minY, maxY, pitchMin, pitchMax);
|
| 33136 |
-
break;
|
| 33137 |
-
default:
|
| 33138 |
-
return;
|
| 33139 |
-
}
|
| 33140 |
-
for (const [key, value] of Object.entries(params)) {
|
| 33141 |
-
if (isNaN(value) || !isFinite(value)) {
|
| 33142 |
-
console.log(`${key} is NaN, aborting`);
|
| 33143 |
-
return;
|
| 33144 |
-
}
|
| 33145 |
-
}
|
| 33146 |
-
console.log(`PITCH=${params.rotate_pitch || 0}, YAW=${params.rotate_yaw || 0}, ROLL=${params.rotate_roll || 0}`);
|
| 33147 |
-
setParams(params);
|
| 33148 |
-
try {
|
| 33149 |
-
if (!lastModifiedImageHashRef.current || lastModifiedImageHashRef.current !== originalImageHash2) {
|
| 33150 |
-
lastModifiedImageHashRef.current = originalImageHash2;
|
| 33151 |
-
facePoke.modifyImage(originalImage2, null, params);
|
| 33152 |
-
} else {
|
| 33153 |
-
facePoke.modifyImage(null, lastModifiedImageHashRef.current, params);
|
| 33154 |
-
}
|
| 33155 |
-
} catch (error2) {
|
| 33156 |
-
setError2("Failed to modify image");
|
| 33157 |
-
}
|
| 33158 |
-
}, []);
|
| 33159 |
const modifyImageWithRateLimit = useThrottledCallback_default((params) => {
|
| 33160 |
-
modifyImage(params);
|
| 33161 |
-
}, [
|
| 33162 |
-
|
| 33163 |
-
|
| 33164 |
-
}, []);
|
| 33165 |
-
const
|
| 33166 |
-
setIsHovering(false);
|
| 33167 |
-
}, []);
|
| 33168 |
-
const handleMouseDown = import_react7.useCallback((event) => {
|
| 33169 |
if (!canvasRef.current)
|
| 33170 |
return;
|
| 33171 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 33172 |
-
const
|
| 33173 |
-
const
|
| 33174 |
-
const landmark = findClosestLandmark(
|
| 33175 |
-
console.log(`Mouse down on ${landmark.group}`);
|
| 33176 |
setActiveLandmark(landmark);
|
| 33177 |
-
setDragStart({ x:
|
| 33178 |
-
dragStartRef.current = { x:
|
| 33179 |
}, [findClosestLandmark, setActiveLandmark, setDragStart]);
|
| 33180 |
-
const
|
| 33181 |
if (!canvasRef.current)
|
| 33182 |
return;
|
| 33183 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 33184 |
-
const
|
| 33185 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33186 |
if (dragStart && dragStartRef.current) {
|
| 33187 |
-
|
| 33188 |
-
console.log(`Dragging mouse (was over ${currentLandmark?.group || "nothing"}, now over ${landmark.group})`);
|
| 33189 |
modifyImageWithRateLimit({
|
| 33190 |
landmark: currentLandmark || landmark,
|
| 33191 |
-
vector
|
| 33192 |
-
|
| 33193 |
-
y: y2 - landmarkCenters[landmark.group].y,
|
| 33194 |
-
z: 0
|
| 33195 |
-
}
|
| 33196 |
});
|
| 33197 |
-
setIsDragging(true);
|
| 33198 |
} else {
|
| 33199 |
-
const landmark = findClosestLandmark(x2, y2);
|
| 33200 |
if (!currentLandmark || currentLandmark?.group !== landmark?.group) {
|
| 33201 |
setActiveLandmark(landmark);
|
| 33202 |
}
|
| 33203 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33204 |
}
|
| 33205 |
-
}, [currentLandmark, dragStart,
|
| 33206 |
-
const
|
| 33207 |
if (!canvasRef.current)
|
| 33208 |
return;
|
| 33209 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 33210 |
-
const
|
| 33211 |
-
const
|
| 33212 |
if (dragStart && dragStartRef.current) {
|
| 33213 |
-
const landmark = findClosestLandmark(
|
| 33214 |
-
console.log(`Mouse up (was over ${currentLandmark?.group || "nothing"}, now over ${landmark.group})`);
|
| 33215 |
modifyImageWithRateLimit({
|
| 33216 |
landmark: currentLandmark || landmark,
|
| 33217 |
vector: {
|
| 33218 |
-
x:
|
| 33219 |
-
y:
|
| 33220 |
z: 0
|
| 33221 |
-
}
|
|
|
|
| 33222 |
});
|
| 33223 |
}
|
| 33224 |
setIsDragging(false);
|
| 33225 |
dragStartRef.current = null;
|
| 33226 |
setActiveLandmark(undefined);
|
| 33227 |
-
}, [currentLandmark, isDragging, modifyImageWithRateLimit, findClosestLandmark, setActiveLandmark, landmarkCenters,
|
| 33228 |
-
import_react7.
|
| 33229 |
-
|
| 33230 |
-
|
| 33231 |
-
|
| 33232 |
-
|
| 33233 |
-
|
| 33234 |
-
|
| 33235 |
-
|
| 33236 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33237 |
return {
|
| 33238 |
canvasRef,
|
| 33239 |
canvasRefCallback,
|
| 33240 |
mediaPipeRef,
|
| 33241 |
-
faceLandmarks,
|
| 33242 |
isMediaPipeReady,
|
| 33243 |
isDrawingUtilsReady,
|
| 33244 |
-
blendShapes,
|
| 33245 |
-
setFaceLandmarks,
|
| 33246 |
-
setBlendShapes,
|
| 33247 |
handleMouseDown,
|
| 33248 |
handleMouseUp,
|
| 33249 |
handleMouseMove,
|
| 33250 |
-
|
| 33251 |
-
|
|
|
|
| 33252 |
currentLandmark,
|
| 33253 |
currentOpacity
|
| 33254 |
};
|
| 33255 |
}
|
| 33256 |
|
| 33257 |
-
// src/components/
|
| 33258 |
var jsx_dev_runtime2 = __toESM(require_jsx_dev_runtime(), 1);
|
| 33259 |
-
function
|
| 33260 |
return jsx_dev_runtime2.jsxDEV("div", {
|
| 33261 |
className: "flex flex-row items-center justify-center font-sans mt-4 w-full",
|
| 33262 |
children: [
|
| 33263 |
jsx_dev_runtime2.jsxDEV("span", {
|
| 33264 |
-
className: "
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33265 |
children: jsx_dev_runtime2.jsxDEV("img", {
|
| 33266 |
src: "/hf-logo.svg",
|
| 33267 |
alt: "Hugging Face",
|
|
@@ -33297,7 +33369,6 @@ function Spinner() {
|
|
| 33297 |
// src/hooks/useFacePokeAPI.ts
|
| 33298 |
var import_react8 = __toESM(require_react(), 1);
|
| 33299 |
function useFacePokeAPI() {
|
| 33300 |
-
const [status, setStatus] = import_react8.useState("");
|
| 33301 |
const [isDebugMode, setIsDebugMode] = import_react8.useState(false);
|
| 33302 |
const [interruptMessage, setInterruptMessage] = import_react8.useState(null);
|
| 33303 |
const [isLoading, setIsLoading] = import_react8.useState(false);
|
|
@@ -33316,8 +33387,6 @@ function useFacePokeAPI() {
|
|
| 33316 |
}, []);
|
| 33317 |
return {
|
| 33318 |
facePoke,
|
| 33319 |
-
status,
|
| 33320 |
-
setStatus,
|
| 33321 |
isDebugMode,
|
| 33322 |
setIsDebugMode,
|
| 33323 |
interruptMessage,
|
|
@@ -33335,31 +33404,13 @@ function Layout({ children }) {
|
|
| 33335 |
children: jsx_dev_runtime4.jsxDEV("div", {
|
| 33336 |
className: "min-h-screen w-full py-8 flex flex-col justify-center",
|
| 33337 |
children: jsx_dev_runtime4.jsxDEV("div", {
|
| 33338 |
-
className: "
|
| 33339 |
children
|
| 33340 |
}, undefined, false, undefined, this)
|
| 33341 |
}, undefined, false, undefined, this)
|
| 33342 |
}, undefined, false, undefined, this);
|
| 33343 |
}
|
| 33344 |
|
| 33345 |
-
// src/lib/convertImageToBase64.ts
|
| 33346 |
-
async function convertImageToBase64(imageFile) {
|
| 33347 |
-
return new Promise((resolve, reject) => {
|
| 33348 |
-
const reader = new FileReader;
|
| 33349 |
-
reader.onload = () => {
|
| 33350 |
-
if (typeof reader.result === "string") {
|
| 33351 |
-
resolve(reader.result);
|
| 33352 |
-
} else {
|
| 33353 |
-
reject(new Error("Failed to convert image to base64"));
|
| 33354 |
-
}
|
| 33355 |
-
};
|
| 33356 |
-
reader.onerror = () => {
|
| 33357 |
-
reject(new Error("Error reading file"));
|
| 33358 |
-
};
|
| 33359 |
-
reader.readAsDataURL(imageFile);
|
| 33360 |
-
});
|
| 33361 |
-
}
|
| 33362 |
-
|
| 33363 |
// src/app.tsx
|
| 33364 |
var jsx_dev_runtime5 = __toESM(require_jsx_dev_runtime(), 1);
|
| 33365 |
function App() {
|
|
@@ -33367,66 +33418,34 @@ function App() {
|
|
| 33367 |
const setError = useMainStore((s2) => s2.setError);
|
| 33368 |
const imageFile = useMainStore((s2) => s2.imageFile);
|
| 33369 |
const setImageFile = useMainStore((s2) => s2.setImageFile);
|
| 33370 |
-
const
|
| 33371 |
-
const
|
|
|
|
|
|
|
| 33372 |
const previewImage = useMainStore((s2) => s2.previewImage);
|
| 33373 |
-
const
|
| 33374 |
-
const
|
| 33375 |
-
const setOriginalImageHash = useMainStore((s2) => s2.setOriginalImageHash);
|
| 33376 |
const {
|
| 33377 |
-
status,
|
| 33378 |
-
setStatus,
|
| 33379 |
isDebugMode,
|
| 33380 |
setIsDebugMode,
|
| 33381 |
interruptMessage
|
| 33382 |
} = useFacePokeAPI();
|
| 33383 |
const {
|
| 33384 |
-
canvasRef,
|
| 33385 |
canvasRefCallback,
|
| 33386 |
-
mediaPipeRef,
|
| 33387 |
-
faceLandmarks,
|
| 33388 |
isMediaPipeReady,
|
| 33389 |
-
blendShapes,
|
| 33390 |
-
setFaceLandmarks,
|
| 33391 |
-
setBlendShapes,
|
| 33392 |
handleMouseDown,
|
| 33393 |
handleMouseUp,
|
| 33394 |
handleMouseMove,
|
| 33395 |
-
|
| 33396 |
-
|
|
|
|
| 33397 |
currentOpacity
|
| 33398 |
} = useFaceLandmarkDetection();
|
| 33399 |
const videoRef = import_react9.useRef(null);
|
| 33400 |
-
const handleFileChange = import_react9.useCallback(
|
| 33401 |
const files = event.target.files;
|
| 33402 |
-
|
| 33403 |
-
|
| 33404 |
-
setStatus(`File selected: ${truncateFileName(files[0].name, 16)}`);
|
| 33405 |
-
try {
|
| 33406 |
-
const image = await convertImageToBase64(files[0]);
|
| 33407 |
-
setPreviewImage(image);
|
| 33408 |
-
setOriginalImage(image);
|
| 33409 |
-
setOriginalImageHash("");
|
| 33410 |
-
} catch (err) {
|
| 33411 |
-
console.log(`failed to convert the image: `, err);
|
| 33412 |
-
setImageFile(null);
|
| 33413 |
-
setStatus("");
|
| 33414 |
-
setPreviewImage("");
|
| 33415 |
-
setOriginalImage("");
|
| 33416 |
-
setOriginalImageHash("");
|
| 33417 |
-
setFaceLandmarks([]);
|
| 33418 |
-
setBlendShapes([]);
|
| 33419 |
-
}
|
| 33420 |
-
} else {
|
| 33421 |
-
setImageFile(null);
|
| 33422 |
-
setStatus("");
|
| 33423 |
-
setPreviewImage("");
|
| 33424 |
-
setOriginalImage("");
|
| 33425 |
-
setOriginalImageHash("");
|
| 33426 |
-
setFaceLandmarks([]);
|
| 33427 |
-
setBlendShapes([]);
|
| 33428 |
-
}
|
| 33429 |
-
}, [isMediaPipeReady, setImageFile, setPreviewImage, setOriginalImage, setOriginalImageHash, setFaceLandmarks, setBlendShapes, setStatus]);
|
| 33430 |
const handleDownload = import_react9.useCallback(() => {
|
| 33431 |
if (previewImage) {
|
| 33432 |
const link = document.createElement("a");
|
|
@@ -33503,7 +33522,7 @@ function App() {
|
|
| 33503 |
className: "flex items-center space-x-2",
|
| 33504 |
children: [
|
| 33505 |
jsx_dev_runtime5.jsxDEV("div", {
|
| 33506 |
-
className: "
|
| 33507 |
children: [
|
| 33508 |
jsx_dev_runtime5.jsxDEV("input", {
|
| 33509 |
id: "imageInput",
|
|
@@ -33518,7 +33537,7 @@ function App() {
|
|
| 33518 |
className: `cursor-pointer inline-flex items-center px-3 h-10 border border-transparent text-sm font-medium rounded-md text-white ${isMediaPipeReady ? "bg-slate-600 hover:bg-slate-500" : "bg-slate-500 cursor-not-allowed"} focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-slate-500 shadow-xl`,
|
| 33519 |
children: [
|
| 33520 |
jsx_dev_runtime5.jsxDEV(Spinner, {}, undefined, false, undefined, this),
|
| 33521 |
-
imageFile ? truncateFileName(imageFile.name, 32) : isMediaPipeReady ? "Choose a portrait photo
|
| 33522 |
]
|
| 33523 |
}, undefined, true, undefined, this)
|
| 33524 |
]
|
|
@@ -33535,18 +33554,21 @@ function App() {
|
|
| 33535 |
}, undefined, true, undefined, this)
|
| 33536 |
]
|
| 33537 |
}, undefined, true, undefined, this),
|
| 33538 |
-
previewImage && jsx_dev_runtime5.jsxDEV("
|
| 33539 |
-
className: "
|
| 33540 |
-
children:
|
| 33541 |
-
|
| 33542 |
-
|
| 33543 |
-
|
| 33544 |
-
|
| 33545 |
-
|
| 33546 |
-
|
| 33547 |
-
|
| 33548 |
-
|
| 33549 |
-
|
|
|
|
|
|
|
|
|
|
| 33550 |
]
|
| 33551 |
}, undefined, true, undefined, this),
|
| 33552 |
previewImage && jsx_dev_runtime5.jsxDEV("div", {
|
|
@@ -33560,11 +33582,12 @@ function App() {
|
|
| 33560 |
jsx_dev_runtime5.jsxDEV("canvas", {
|
| 33561 |
ref: canvasRefCallback,
|
| 33562 |
className: "absolute top-0 left-0 w-full h-full select-none",
|
| 33563 |
-
onMouseEnter: handleMouseEnter,
|
| 33564 |
-
onMouseLeave: handleMouseLeave,
|
| 33565 |
onMouseDown: handleMouseDown,
|
| 33566 |
onMouseUp: handleMouseUp,
|
| 33567 |
onMouseMove: handleMouseMove,
|
|
|
|
|
|
|
|
|
|
| 33568 |
style: {
|
| 33569 |
position: "absolute",
|
| 33570 |
top: 0,
|
|
@@ -33580,7 +33603,7 @@ function App() {
|
|
| 33580 |
canDisplayBlendShapes && displayBlendShapes
|
| 33581 |
]
|
| 33582 |
}, undefined, true, undefined, this),
|
| 33583 |
-
jsx_dev_runtime5.jsxDEV(
|
| 33584 |
]
|
| 33585 |
}, undefined, true, undefined, this);
|
| 33586 |
}
|
|
|
|
| 29660 |
return Ka(Yh, t2, e2);
|
| 29661 |
}, Yh.POSE_CONNECTIONS = Eh;
|
| 29662 |
|
| 29663 |
+
// src/types.ts
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29664 |
var WebSocketState;
|
| 29665 |
(function(WebSocketState2) {
|
| 29666 |
WebSocketState2[WebSocketState2["CONNECTING"] = 0] = "CONNECTING";
|
|
|
|
| 29669 |
WebSocketState2[WebSocketState2["CLOSED"] = 3] = "CLOSED";
|
| 29670 |
})(WebSocketState || (WebSocketState = {}));
|
| 29671 |
|
| 29672 |
+
// src/lib/facePoke.ts
|
| 29673 |
class FacePoke {
|
| 29674 |
ws = null;
|
|
|
|
| 29675 |
isUnloading = false;
|
| 29676 |
+
onServerResponse = async () => {
|
| 29677 |
};
|
| 29678 |
reconnectAttempts = 0;
|
| 29679 |
maxReconnectAttempts = 5;
|
| 29680 |
reconnectDelay = 5000;
|
| 29681 |
eventListeners = new Map;
|
|
|
|
|
|
|
|
|
|
| 29682 |
constructor() {
|
| 29683 |
+
console.log(`[FacePoke] Initializing FacePoke instance`);
|
| 29684 |
this.initializeWebSocket();
|
| 29685 |
this.setupUnloadHandler();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29686 |
}
|
| 29687 |
+
setOnServerResponse(handler) {
|
| 29688 |
+
this.onServerResponse = handler;
|
| 29689 |
+
console.log(`[FacePoke] onServerResponse handler set`);
|
| 29690 |
}
|
| 29691 |
async startWebSocket() {
|
| 29692 |
console.log(`[FacePoke] Starting WebSocket connection.`);
|
|
|
|
| 29695 |
}
|
| 29696 |
}
|
| 29697 |
async initializeWebSocket() {
|
| 29698 |
+
console.log(`[FacePoke] Initializing WebSocket connection`);
|
| 29699 |
const connect = () => {
|
| 29700 |
this.ws = new WebSocket(`wss://${window.location.host}/ws`);
|
| 29701 |
this.ws.onopen = this.handleWebSocketOpen.bind(this);
|
|
|
|
| 29702 |
this.ws.onclose = this.handleWebSocketClose.bind(this);
|
| 29703 |
this.ws.onerror = this.handleWebSocketError.bind(this);
|
| 29704 |
+
this.ws.onmessage = this.handleWebSocketMessage.bind(this);
|
| 29705 |
};
|
| 29706 |
connect();
|
| 29707 |
}
|
| 29708 |
+
handleWebSocketMessage(msg) {
|
| 29709 |
+
if (typeof msg.data === "string") {
|
| 29710 |
+
this.onServerResponse({ loaded: JSON.parse(msg.data) });
|
| 29711 |
+
} else if (typeof msg.data !== "undefined") {
|
| 29712 |
+
this.onServerResponse({ image: msg.data });
|
| 29713 |
+
}
|
| 29714 |
+
}
|
| 29715 |
handleWebSocketOpen() {
|
| 29716 |
+
console.log(`[FacePoke] WebSocket connection opened`);
|
| 29717 |
this.reconnectAttempts = 0;
|
| 29718 |
this.emitEvent("websocketOpen");
|
| 29719 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29720 |
handleWebSocketClose(event) {
|
| 29721 |
if (event.wasClean) {
|
| 29722 |
+
console.log(`[FacePoke] WebSocket connection closed cleanly, code=${event.code}, reason=${event.reason}`);
|
| 29723 |
} else {
|
| 29724 |
+
console.warn(`[FacePoke] WebSocket connection abruptly closed`);
|
| 29725 |
}
|
| 29726 |
this.emitEvent("websocketClose", event);
|
| 29727 |
if (!this.isUnloading && this.reconnectAttempts < this.maxReconnectAttempts) {
|
| 29728 |
this.reconnectAttempts++;
|
| 29729 |
const delay = Math.min(1000 * 2 ** this.reconnectAttempts, 30000);
|
| 29730 |
+
console.log(`[FacePoke] Attempting to reconnect in ${delay}ms (Attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts})...`);
|
| 29731 |
setTimeout(() => this.initializeWebSocket(), delay);
|
| 29732 |
} else if (this.reconnectAttempts >= this.maxReconnectAttempts) {
|
| 29733 |
+
console.error(`[FacePoke] Max reconnect attempts reached. Please refresh the page.`);
|
| 29734 |
this.emitEvent("maxReconnectAttemptsReached");
|
| 29735 |
}
|
| 29736 |
}
|
| 29737 |
handleWebSocketError(error) {
|
| 29738 |
+
console.error(`[FacePoke] WebSocket error:`, error);
|
| 29739 |
this.emitEvent("websocketError", error);
|
| 29740 |
}
|
| 29741 |
cleanup() {
|
|
|
|
| 29748 |
console.log("[FacePoke] Cleanup completed");
|
| 29749 |
this.emitEvent("cleanup");
|
| 29750 |
}
|
| 29751 |
+
async loadImage(image) {
|
| 29752 |
+
const base64Data = image.split(",")[1] || image;
|
| 29753 |
+
const buffer = new Uint8Array(atob(base64Data).split("").map((char) => char.charCodeAt(0)));
|
| 29754 |
+
const blob = new Blob([buffer], { type: "application/octet-binary" });
|
| 29755 |
+
this.sendBlobMessage(await blob.arrayBuffer());
|
| 29756 |
+
}
|
| 29757 |
+
transformImage(hash, params) {
|
| 29758 |
+
this.sendJsonMessage({ hash, params });
|
| 29759 |
+
}
|
| 29760 |
+
sendBlobMessage(buffer) {
|
| 29761 |
+
if (!this.ws || this.ws.readyState !== WebSocketState.OPEN) {
|
| 29762 |
+
const error = new Error("WebSocket connection is not open");
|
| 29763 |
+
console.error("[FacePoke] Error sending JSON message:", error);
|
| 29764 |
+
this.emitEvent("sendJsonMessageError", error);
|
| 29765 |
+
throw error;
|
| 29766 |
+
}
|
| 29767 |
try {
|
| 29768 |
+
this.ws.send(buffer);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 29769 |
} catch (err) {
|
| 29770 |
+
console.error(`failed to send the WebSocket message: ${err}`);
|
| 29771 |
}
|
| 29772 |
}
|
| 29773 |
sendJsonMessage(message) {
|
|
|
|
| 29777 |
this.emitEvent("sendJsonMessageError", error);
|
| 29778 |
throw error;
|
| 29779 |
}
|
| 29780 |
+
try {
|
| 29781 |
+
this.ws.send(JSON.stringify(message));
|
| 29782 |
+
} catch (err) {
|
| 29783 |
+
console.error(`failed to send the WebSocket message: ${err}`);
|
| 29784 |
+
}
|
| 29785 |
}
|
| 29786 |
setupUnloadHandler() {
|
| 29787 |
window.addEventListener("beforeunload", () => {
|
|
|
|
| 29817 |
}
|
| 29818 |
var facePoke = new FacePoke;
|
| 29819 |
|
| 29820 |
+
// node_modules/zustand/esm/vanilla.mjs
|
| 29821 |
+
var createStoreImpl = (createState) => {
|
| 29822 |
+
let state;
|
| 29823 |
+
const listeners = new Set;
|
| 29824 |
+
const setState = (partial, replace) => {
|
| 29825 |
+
const nextState = typeof partial === "function" ? partial(state) : partial;
|
| 29826 |
+
if (!Object.is(nextState, state)) {
|
| 29827 |
+
const previousState = state;
|
| 29828 |
+
state = (replace != null ? replace : typeof nextState !== "object" || nextState === null) ? nextState : Object.assign({}, state, nextState);
|
| 29829 |
+
listeners.forEach((listener) => listener(state, previousState));
|
| 29830 |
+
}
|
| 29831 |
+
};
|
| 29832 |
+
const getState = () => state;
|
| 29833 |
+
const getInitialState = () => initialState;
|
| 29834 |
+
const subscribe = (listener) => {
|
| 29835 |
+
listeners.add(listener);
|
| 29836 |
+
return () => listeners.delete(listener);
|
| 29837 |
+
};
|
| 29838 |
+
const api = { setState, getState, getInitialState, subscribe };
|
| 29839 |
+
const initialState = state = createState(setState, getState, api);
|
| 29840 |
+
return api;
|
| 29841 |
+
};
|
| 29842 |
+
var createStore = (createState) => createState ? createStoreImpl(createState) : createStoreImpl;
|
| 29843 |
+
|
| 29844 |
+
// node_modules/zustand/esm/react.mjs
|
| 29845 |
+
var import_react3 = __toESM(require_react(), 1);
|
| 29846 |
+
var useStore = function(api, selector = identity) {
|
| 29847 |
+
const slice = import_react3.default.useSyncExternalStore(api.subscribe, () => selector(api.getState()), () => selector(api.getInitialState()));
|
| 29848 |
+
import_react3.default.useDebugValue(slice);
|
| 29849 |
+
return slice;
|
| 29850 |
+
};
|
| 29851 |
+
var identity = (arg) => arg;
|
| 29852 |
+
var createImpl = (createState) => {
|
| 29853 |
+
const api = createStore(createState);
|
| 29854 |
+
const useBoundStore = (selector) => useStore(api, selector);
|
| 29855 |
+
Object.assign(useBoundStore, api);
|
| 29856 |
+
return useBoundStore;
|
| 29857 |
+
};
|
| 29858 |
+
var create = (createState) => createState ? createImpl(createState) : createImpl;
|
| 29859 |
+
|
| 29860 |
+
// src/lib/convertImageToBase64.ts
|
| 29861 |
+
async function convertImageToBase64(imageFileOrBlob) {
|
| 29862 |
+
return new Promise((resolve, reject) => {
|
| 29863 |
+
const reader = new FileReader;
|
| 29864 |
+
reader.onload = () => {
|
| 29865 |
+
if (typeof reader.result === "string") {
|
| 29866 |
+
resolve(reader.result);
|
| 29867 |
+
} else {
|
| 29868 |
+
reject(new Error("Failed to convert image to base64"));
|
| 29869 |
+
}
|
| 29870 |
+
};
|
| 29871 |
+
reader.onerror = () => {
|
| 29872 |
+
reject(new Error("Error reading file or blob"));
|
| 29873 |
+
};
|
| 29874 |
+
reader.readAsDataURL(imageFileOrBlob);
|
| 29875 |
+
});
|
| 29876 |
+
}
|
| 29877 |
+
|
| 29878 |
+
// src/lib/mapRange.ts
|
| 29879 |
+
var mapRange = (value, inMin, inMax, outMin, outMax) => {
|
| 29880 |
+
return Math.min(outMax, Math.max(outMin, (value - inMin) * (outMax - outMin) / (inMax - inMin) + outMin));
|
| 29881 |
+
};
|
| 29882 |
+
|
| 29883 |
+
// src/hooks/useMainStore.ts
|
| 29884 |
+
var getDefaultState = () => ({
|
| 29885 |
+
status: "",
|
| 29886 |
+
error: "",
|
| 29887 |
+
imageFile: null,
|
| 29888 |
+
isFollowingCursor: false,
|
| 29889 |
+
isGazingAtCursor: false,
|
| 29890 |
+
originalImage: "",
|
| 29891 |
+
originalImageHash: "",
|
| 29892 |
+
previewImage: "",
|
| 29893 |
+
minLatency: 20,
|
| 29894 |
+
averageLatency: 190,
|
| 29895 |
+
maxLatency: 4000,
|
| 29896 |
+
activeLandmark: undefined,
|
| 29897 |
+
metadata: {
|
| 29898 |
+
center: [0, 0],
|
| 29899 |
+
size: 0,
|
| 29900 |
+
bbox: [[0, 0], [0, 0], [0, 0], [0, 0]],
|
| 29901 |
+
angle: 0
|
| 29902 |
+
},
|
| 29903 |
+
params: {},
|
| 29904 |
+
faceLandmarks: [],
|
| 29905 |
+
blendShapes: []
|
| 29906 |
+
});
|
| 29907 |
+
var useMainStore = create((set, get) => ({
|
| 29908 |
+
...getDefaultState(),
|
| 29909 |
+
setStatus: (status = "") => set({ status }),
|
| 29910 |
+
setError: (error = "") => set({ error }),
|
| 29911 |
+
setFaceLandmarks: (faceLandmarks) => {
|
| 29912 |
+
set({ faceLandmarks });
|
| 29913 |
+
},
|
| 29914 |
+
setBlendShapes: (blendShapes) => {
|
| 29915 |
+
set({ blendShapes });
|
| 29916 |
+
},
|
| 29917 |
+
setImageFile: async (file) => {
|
| 29918 |
+
if (!file) {
|
| 29919 |
+
set({
|
| 29920 |
+
...getDefaultState(),
|
| 29921 |
+
status: "No file selected"
|
| 29922 |
+
});
|
| 29923 |
+
return;
|
| 29924 |
+
}
|
| 29925 |
+
try {
|
| 29926 |
+
const image = await convertImageToBase64(file);
|
| 29927 |
+
set({
|
| 29928 |
+
...getDefaultState(),
|
| 29929 |
+
imageFile: file,
|
| 29930 |
+
status: `File selected: ${truncateFileName(file.name, 16)}`,
|
| 29931 |
+
previewImage: image,
|
| 29932 |
+
originalImage: image
|
| 29933 |
+
});
|
| 29934 |
+
facePoke.loadImage(image);
|
| 29935 |
+
} catch (err) {
|
| 29936 |
+
console.log(`failed to load the image: `, err);
|
| 29937 |
+
set({
|
| 29938 |
+
...getDefaultState(),
|
| 29939 |
+
status: "Failed to load the image"
|
| 29940 |
+
});
|
| 29941 |
+
}
|
| 29942 |
+
},
|
| 29943 |
+
setIsFollowingCursor: (isFollowingCursor) => set({ isFollowingCursor }),
|
| 29944 |
+
setIsGazingAtCursor: (isGazingAtCursor) => set({ isGazingAtCursor }),
|
| 29945 |
+
setOriginalImage: (url) => set({ originalImage: url }),
|
| 29946 |
+
setOriginalImageHash: (originalImageHash) => set({ originalImageHash }),
|
| 29947 |
+
setPreviewImage: (url) => set({ previewImage: url }),
|
| 29948 |
+
resetImage: () => {
|
| 29949 |
+
const { originalImage } = get();
|
| 29950 |
+
if (originalImage) {
|
| 29951 |
+
set({ previewImage: originalImage });
|
| 29952 |
+
}
|
| 29953 |
+
},
|
| 29954 |
+
setAverageLatency: (averageLatency) => set({ averageLatency }),
|
| 29955 |
+
setActiveLandmark: (activeLandmark) => set({ activeLandmark }),
|
| 29956 |
+
setMetadata: (metadata) => set(metadata ? {
|
| 29957 |
+
metadata
|
| 29958 |
+
} : {
|
| 29959 |
+
metadata: getDefaultState().metadata
|
| 29960 |
+
}),
|
| 29961 |
+
setParams: (params) => {
|
| 29962 |
+
const { params: previousParams } = get();
|
| 29963 |
+
set({ params: {
|
| 29964 |
+
...previousParams,
|
| 29965 |
+
...params
|
| 29966 |
+
} });
|
| 29967 |
+
},
|
| 29968 |
+
handleServerResponse: async (params) => {
|
| 29969 |
+
const { originalImage, setMetadata, setPreviewImage, setOriginalImageHash, applyModifiedHeadToCanvas, modifyImage } = useMainStore.getState();
|
| 29970 |
+
if (typeof params.error === "string") {
|
| 29971 |
+
console.error(`handleServerResponse: failed to perform the request, resetting the app (${params.error})`);
|
| 29972 |
+
setPreviewImage(originalImage);
|
| 29973 |
+
setOriginalImageHash("");
|
| 29974 |
+
} else if (typeof params.image !== "undefined") {
|
| 29975 |
+
const image = await convertImageToBase64(params.image);
|
| 29976 |
+
setPreviewImage(image);
|
| 29977 |
+
} else if (typeof params.loaded !== "undefined") {
|
| 29978 |
+
setOriginalImageHash(params.loaded.h);
|
| 29979 |
+
setMetadata({
|
| 29980 |
+
center: params.loaded.c,
|
| 29981 |
+
size: params.loaded.s,
|
| 29982 |
+
bbox: params.loaded.b,
|
| 29983 |
+
angle: params.loaded.a
|
| 29984 |
+
});
|
| 29985 |
+
await modifyImage({
|
| 29986 |
+
landmark: {
|
| 29987 |
+
group: "background",
|
| 29988 |
+
distance: 0,
|
| 29989 |
+
vector: { x: 0.5, y: 0.5, z: 0 }
|
| 29990 |
+
},
|
| 29991 |
+
vector: { x: 0, y: 0, z: 0 },
|
| 29992 |
+
mode: "PRIMARY"
|
| 29993 |
+
});
|
| 29994 |
+
} else {
|
| 29995 |
+
console.log(`handleServerResponse: received an unknown json`, params);
|
| 29996 |
+
}
|
| 29997 |
+
},
|
| 29998 |
+
applyModifiedHeadToCanvas: async (headImageBlob) => {
|
| 29999 |
+
return new Promise(async (resolve, reject) => {
|
| 30000 |
+
const originalImg = new Image;
|
| 30001 |
+
const { originalImage, metadata } = useMainStore.getState();
|
| 30002 |
+
originalImg.onload = async () => {
|
| 30003 |
+
const canvas = document.createElement("canvas");
|
| 30004 |
+
const ctx = canvas.getContext("2d");
|
| 30005 |
+
if (!ctx) {
|
| 30006 |
+
reject(new Error("Failed to get 2D context"));
|
| 30007 |
+
return;
|
| 30008 |
+
}
|
| 30009 |
+
const pixelRatio = window.devicePixelRatio || 1;
|
| 30010 |
+
canvas.width = originalImg.width;
|
| 30011 |
+
canvas.height = originalImg.height;
|
| 30012 |
+
ctx.drawImage(originalImg, 0, 0);
|
| 30013 |
+
const headImageBitmap = await createImageBitmap(headImageBlob, {
|
| 30014 |
+
resizeQuality: "high"
|
| 30015 |
+
});
|
| 30016 |
+
const tempCanvas = document.createElement("canvas");
|
| 30017 |
+
const tempCtx = tempCanvas.getContext("2d");
|
| 30018 |
+
if (!tempCtx) {
|
| 30019 |
+
reject(new Error("Failed to get 2D context for temporary canvas"));
|
| 30020 |
+
return;
|
| 30021 |
+
}
|
| 30022 |
+
tempCanvas.width = headImageBitmap.width;
|
| 30023 |
+
tempCanvas.height = headImageBitmap.height;
|
| 30024 |
+
tempCtx.drawImage(headImageBitmap, 0, 0);
|
| 30025 |
+
const gradientSize = 20;
|
| 30026 |
+
const gradient = tempCtx.createRadialGradient(tempCanvas.width / 2, tempCanvas.height / 2, Math.min(tempCanvas.width, tempCanvas.height) / 2 - gradientSize, tempCanvas.width / 2, tempCanvas.height / 2, Math.min(tempCanvas.width, tempCanvas.height) / 2);
|
| 30027 |
+
gradient.addColorStop(0, "rgba(0, 0, 0, 1)");
|
| 30028 |
+
gradient.addColorStop(1, "rgba(0, 0, 0, 0)");
|
| 30029 |
+
tempCtx.globalCompositeOperation = "destination-in";
|
| 30030 |
+
tempCtx.fillStyle = gradient;
|
| 30031 |
+
tempCtx.fillRect(0, 0, tempCanvas.width, tempCanvas.height);
|
| 30032 |
+
console.log("metadata:", metadata);
|
| 30033 |
+
ctx.save();
|
| 30034 |
+
ctx.rotate(metadata.angle);
|
| 30035 |
+
ctx.restore();
|
| 30036 |
+
resolve(canvas.toDataURL("image/png"));
|
| 30037 |
+
};
|
| 30038 |
+
originalImg.src = originalImage;
|
| 30039 |
+
});
|
| 30040 |
+
},
|
| 30041 |
+
modifyImage: async ({ landmark, vector, mode }) => {
|
| 30042 |
+
const {
|
| 30043 |
+
originalImage,
|
| 30044 |
+
originalImageHash,
|
| 30045 |
+
params: previousParams,
|
| 30046 |
+
setParams,
|
| 30047 |
+
setError,
|
| 30048 |
+
isFollowingCursor,
|
| 30049 |
+
isGazingAtCursor
|
| 30050 |
+
} = get();
|
| 30051 |
+
if (!originalImage) {
|
| 30052 |
+
console.error("Image file or facePoke not available");
|
| 30053 |
+
return;
|
| 30054 |
+
}
|
| 30055 |
+
const params = {
|
| 30056 |
+
...previousParams
|
| 30057 |
+
};
|
| 30058 |
+
const generalControl = {
|
| 30059 |
+
minX: -0.3,
|
| 30060 |
+
maxX: 0.3,
|
| 30061 |
+
minY: -0.3,
|
| 30062 |
+
maxY: 0.3
|
| 30063 |
+
};
|
| 30064 |
+
const pupilControl = {
|
| 30065 |
+
minX: -0.5,
|
| 30066 |
+
maxX: 0.5,
|
| 30067 |
+
minY: -0.5,
|
| 30068 |
+
maxY: 0.5
|
| 30069 |
+
};
|
| 30070 |
+
const eyeControl = {
|
| 30071 |
+
minX: -0.5,
|
| 30072 |
+
maxX: 0.5,
|
| 30073 |
+
minY: -0.5,
|
| 30074 |
+
maxY: 0.5
|
| 30075 |
+
};
|
| 30076 |
+
if (isFollowingCursor) {
|
| 30077 |
+
const yawMin = -40;
|
| 30078 |
+
const yawMax = 40;
|
| 30079 |
+
params.rotate_yaw = mapRange(-vector.x, generalControl.minX, generalControl.maxX, yawMin, yawMax);
|
| 30080 |
+
const pitchMin = -40;
|
| 30081 |
+
const pitchMax = 40;
|
| 30082 |
+
params.rotate_pitch = mapRange(vector.y, generalControl.minY, generalControl.maxY, pitchMin, pitchMax);
|
| 30083 |
+
}
|
| 30084 |
+
if (isGazingAtCursor) {
|
| 30085 |
+
const pupilsXMin = -15;
|
| 30086 |
+
const pupilsXMax = 15;
|
| 30087 |
+
params.pupil_x = mapRange(vector.x, pupilControl.minX, pupilControl.maxX, pupilsXMin, pupilsXMax);
|
| 30088 |
+
const pupilsYMin = -2;
|
| 30089 |
+
const pupilsYMax = 8;
|
| 30090 |
+
params.pupil_y = mapRange(-vector.y, pupilControl.minY, pupilControl.maxY, pupilsYMin, pupilsYMax);
|
| 30091 |
+
}
|
| 30092 |
+
if (mode !== "HOVERING") {
|
| 30093 |
+
switch (landmark.group) {
|
| 30094 |
+
case "leftEye":
|
| 30095 |
+
case "rightEye":
|
| 30096 |
+
const pupilsXMin = -15;
|
| 30097 |
+
const pupilsXMax = 15;
|
| 30098 |
+
params.pupil_x = mapRange(vector.x, pupilControl.minX, pupilControl.maxX, pupilsXMin, pupilsXMax);
|
| 30099 |
+
const eyesMin = -20;
|
| 30100 |
+
const eyesMax = 5;
|
| 30101 |
+
params.eyes = mapRange(-vector.y, eyeControl.minX, eyeControl.maxX, eyesMin, eyesMax);
|
| 30102 |
+
break;
|
| 30103 |
+
case "leftEyebrow":
|
| 30104 |
+
case "rightEyebrow":
|
| 30105 |
+
const eyebrowMin = -10;
|
| 30106 |
+
const eyebrowMax = 15;
|
| 30107 |
+
params.eyebrow = mapRange(-vector.y, eyeControl.minY, eyeControl.maxY, eyebrowMin, eyebrowMax);
|
| 30108 |
+
break;
|
| 30109 |
+
case "lips":
|
| 30110 |
+
const aaaMin = -30;
|
| 30111 |
+
const aaaMax = 120;
|
| 30112 |
+
params.aaa = mapRange(-vector.y, eyeControl.minY, eyeControl.maxY, aaaMin, aaaMax);
|
| 30113 |
+
const eeeMin = -20;
|
| 30114 |
+
const eeeMax = 15;
|
| 30115 |
+
params.eee = mapRange(vector.x, eyeControl.minX, eyeControl.maxX, eeeMin, eeeMax);
|
| 30116 |
+
break;
|
| 30117 |
+
case "faceOval":
|
| 30118 |
+
const rollMin = -40;
|
| 30119 |
+
const rollMax = 40;
|
| 30120 |
+
params.rotate_roll = mapRange(vector.x, eyeControl.minX, eyeControl.maxX, rollMin, rollMax);
|
| 30121 |
+
break;
|
| 30122 |
+
case "background":
|
| 30123 |
+
const yawMin = -40;
|
| 30124 |
+
const yawMax = 40;
|
| 30125 |
+
params.rotate_yaw = mapRange(-vector.x, generalControl.minX, generalControl.maxX, yawMin, yawMax);
|
| 30126 |
+
const pitchMin = -40;
|
| 30127 |
+
const pitchMax = 40;
|
| 30128 |
+
params.rotate_pitch = mapRange(vector.y, eyeControl.minY, eyeControl.maxY, pitchMin, pitchMax);
|
| 30129 |
+
break;
|
| 30130 |
+
default:
|
| 30131 |
+
return;
|
| 30132 |
+
}
|
| 30133 |
+
}
|
| 30134 |
+
for (const [key, value] of Object.entries(params)) {
|
| 30135 |
+
if (isNaN(value) || !isFinite(value)) {
|
| 30136 |
+
console.log(`${key} is NaN, aborting`);
|
| 30137 |
+
return;
|
| 30138 |
+
}
|
| 30139 |
+
}
|
| 30140 |
+
setParams(params);
|
| 30141 |
+
try {
|
| 30142 |
+
if (originalImageHash) {
|
| 30143 |
+
facePoke.transformImage(originalImageHash, params);
|
| 30144 |
+
}
|
| 30145 |
+
} catch (error) {
|
| 30146 |
+
setError("Failed to modify image");
|
| 30147 |
+
}
|
| 30148 |
+
}
|
| 30149 |
+
}));
|
| 30150 |
+
|
| 30151 |
// node_modules/beautiful-react-hooks/esm/useThrottledCallback.js
|
| 30152 |
var import_react6 = __toESM(require_react(), 1);
|
| 30153 |
var import_lodash = __toESM(require_lodash(), 1);
|
|
|
|
| 32966 |
|
| 32967 |
// src/hooks/useFaceLandmarkDetection.tsx
|
| 32968 |
function useFaceLandmarkDetection() {
|
|
|
|
| 32969 |
const setError = useMainStore((s2) => s2.setError);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 32970 |
const previewImage = useMainStore((s2) => s2.previewImage);
|
| 32971 |
+
const handleServerResponse = useMainStore((s2) => s2.handleServerResponse);
|
| 32972 |
+
const faceLandmarks = useMainStore((s2) => s2.faceLandmarks);
|
| 32973 |
+
const throttleInMs = 180;
|
|
|
|
|
|
|
| 32974 |
const [isMediaPipeReady, setIsMediaPipeReady] = import_react7.useState(false);
|
| 32975 |
const [isDrawingUtilsReady, setIsDrawingUtilsReady] = import_react7.useState(false);
|
|
|
|
| 32976 |
const [dragStart, setDragStart] = import_react7.useState(null);
|
|
|
|
| 32977 |
const [isDragging, setIsDragging] = import_react7.useState(false);
|
|
|
|
| 32978 |
const dragStartRef = import_react7.useRef(null);
|
|
|
|
|
|
|
| 32979 |
const [currentLandmark, setCurrentLandmark] = import_react7.useState(null);
|
| 32980 |
const [previousLandmark, setPreviousLandmark] = import_react7.useState(null);
|
| 32981 |
const [currentOpacity, setCurrentOpacity] = import_react7.useState(0);
|
| 32982 |
const [previousOpacity, setPreviousOpacity] = import_react7.useState(0);
|
|
|
|
| 32983 |
const canvasRef = import_react7.useRef(null);
|
| 32984 |
const mediaPipeRef = import_react7.useRef({
|
| 32985 |
faceLandmarker: null,
|
|
|
|
| 33016 |
} else {
|
| 33017 |
faceLandmarker.close();
|
| 33018 |
}
|
| 33019 |
+
} catch (error) {
|
| 33020 |
+
console.error("Error during MediaPipe initialization:", error);
|
| 33021 |
setError("Failed to initialize face detection. Please try refreshing the page.");
|
| 33022 |
}
|
| 33023 |
};
|
|
|
|
| 33103 |
}
|
| 33104 |
}, [landmarkCenters]);
|
| 33105 |
const detectFaceLandmarks = import_react7.useCallback(async (imageDataUrl) => {
|
| 33106 |
+
const { setFaceLandmarks, setBlendShapes } = useMainStore.getState();
|
| 33107 |
if (!isMediaPipeReady) {
|
| 33108 |
console.log("MediaPipe not ready. Skipping detection.");
|
| 33109 |
return;
|
|
|
|
| 33203 |
}
|
| 33204 |
detectFaceLandmarks(previewImage);
|
| 33205 |
}, [isMediaPipeReady, isDrawingUtilsReady, previewImage]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33206 |
const modifyImageWithRateLimit = useThrottledCallback_default((params) => {
|
| 33207 |
+
useMainStore.getState().modifyImage(params);
|
| 33208 |
+
}, [], throttleInMs);
|
| 33209 |
+
import_react7.useEffect(() => {
|
| 33210 |
+
facePoke.setOnServerResponse(handleServerResponse);
|
| 33211 |
+
}, [handleServerResponse]);
|
| 33212 |
+
const handleStart = import_react7.useCallback((x2, y2, mode) => {
|
|
|
|
|
|
|
|
|
|
| 33213 |
if (!canvasRef.current)
|
| 33214 |
return;
|
| 33215 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 33216 |
+
const normalizedX = (x2 - rect.left) / rect.width;
|
| 33217 |
+
const normalizedY = (y2 - rect.top) / rect.height;
|
| 33218 |
+
const landmark = findClosestLandmark(normalizedX, normalizedY);
|
|
|
|
| 33219 |
setActiveLandmark(landmark);
|
| 33220 |
+
setDragStart({ x: normalizedX, y: normalizedY });
|
| 33221 |
+
dragStartRef.current = { x: normalizedX, y: normalizedY };
|
| 33222 |
}, [findClosestLandmark, setActiveLandmark, setDragStart]);
|
| 33223 |
+
const handleMove = import_react7.useCallback((x2, y2, mode) => {
|
| 33224 |
if (!canvasRef.current)
|
| 33225 |
return;
|
| 33226 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 33227 |
+
const normalizedX = (x2 - rect.left) / rect.width;
|
| 33228 |
+
const normalizedY = (y2 - rect.top) / rect.height;
|
| 33229 |
+
const landmark = findClosestLandmark(normalizedX, normalizedY, dragStart && dragStartRef.current ? currentLandmark?.group : undefined);
|
| 33230 |
+
const landmarkData = landmarkCenters[landmark?.group];
|
| 33231 |
+
const vector = landmarkData ? {
|
| 33232 |
+
x: normalizedX - landmarkData.x,
|
| 33233 |
+
y: normalizedY - landmarkData.y,
|
| 33234 |
+
z: 0
|
| 33235 |
+
} : {
|
| 33236 |
+
x: 0.5,
|
| 33237 |
+
y: 0.5,
|
| 33238 |
+
z: 0
|
| 33239 |
+
};
|
| 33240 |
if (dragStart && dragStartRef.current) {
|
| 33241 |
+
setIsDragging(true);
|
|
|
|
| 33242 |
modifyImageWithRateLimit({
|
| 33243 |
landmark: currentLandmark || landmark,
|
| 33244 |
+
vector,
|
| 33245 |
+
mode
|
|
|
|
|
|
|
|
|
|
| 33246 |
});
|
|
|
|
| 33247 |
} else {
|
|
|
|
| 33248 |
if (!currentLandmark || currentLandmark?.group !== landmark?.group) {
|
| 33249 |
setActiveLandmark(landmark);
|
| 33250 |
}
|
| 33251 |
+
modifyImageWithRateLimit({
|
| 33252 |
+
landmark,
|
| 33253 |
+
vector,
|
| 33254 |
+
mode: "HOVERING"
|
| 33255 |
+
});
|
| 33256 |
}
|
| 33257 |
+
}, [currentLandmark, dragStart, setActiveLandmark, setIsDragging, modifyImageWithRateLimit, landmarkCenters]);
|
| 33258 |
+
const handleEnd = import_react7.useCallback((x2, y2, mode) => {
|
| 33259 |
if (!canvasRef.current)
|
| 33260 |
return;
|
| 33261 |
const rect = canvasRef.current.getBoundingClientRect();
|
| 33262 |
+
const normalizedX = (x2 - rect.left) / rect.width;
|
| 33263 |
+
const normalizedY = (y2 - rect.top) / rect.height;
|
| 33264 |
if (dragStart && dragStartRef.current) {
|
| 33265 |
+
const landmark = findClosestLandmark(normalizedX, normalizedY, currentLandmark?.group);
|
|
|
|
| 33266 |
modifyImageWithRateLimit({
|
| 33267 |
landmark: currentLandmark || landmark,
|
| 33268 |
vector: {
|
| 33269 |
+
x: normalizedX - landmarkCenters[landmark.group].x,
|
| 33270 |
+
y: normalizedY - landmarkCenters[landmark.group].y,
|
| 33271 |
z: 0
|
| 33272 |
+
},
|
| 33273 |
+
mode
|
| 33274 |
});
|
| 33275 |
}
|
| 33276 |
setIsDragging(false);
|
| 33277 |
dragStartRef.current = null;
|
| 33278 |
setActiveLandmark(undefined);
|
| 33279 |
+
}, [currentLandmark, isDragging, modifyImageWithRateLimit, findClosestLandmark, setActiveLandmark, landmarkCenters, setIsDragging]);
|
| 33280 |
+
const handleMouseDown = import_react7.useCallback((event) => {
|
| 33281 |
+
const mode = event.button === 0 ? "PRIMARY" : "SECONDARY";
|
| 33282 |
+
handleStart(event.clientX, event.clientY, mode);
|
| 33283 |
+
}, [handleStart]);
|
| 33284 |
+
const handleMouseMove = import_react7.useCallback((event) => {
|
| 33285 |
+
const mode = event.buttons === 1 ? "PRIMARY" : "SECONDARY";
|
| 33286 |
+
handleMove(event.clientX, event.clientY, mode);
|
| 33287 |
+
}, [handleMove]);
|
| 33288 |
+
const handleMouseUp = import_react7.useCallback((event) => {
|
| 33289 |
+
const mode = event.buttons === 1 ? "PRIMARY" : "SECONDARY";
|
| 33290 |
+
handleEnd(event.clientX, event.clientY, mode);
|
| 33291 |
+
}, [handleEnd]);
|
| 33292 |
+
const handleTouchStart = import_react7.useCallback((event) => {
|
| 33293 |
+
const mode = event.touches.length === 1 ? "PRIMARY" : "SECONDARY";
|
| 33294 |
+
const touch = event.touches[0];
|
| 33295 |
+
handleStart(touch.clientX, touch.clientY, mode);
|
| 33296 |
+
}, [handleStart]);
|
| 33297 |
+
const handleTouchMove = import_react7.useCallback((event) => {
|
| 33298 |
+
const mode = event.touches.length === 1 ? "PRIMARY" : "SECONDARY";
|
| 33299 |
+
const touch = event.touches[0];
|
| 33300 |
+
handleMove(touch.clientX, touch.clientY, mode);
|
| 33301 |
+
}, [handleMove]);
|
| 33302 |
+
const handleTouchEnd = import_react7.useCallback((event) => {
|
| 33303 |
+
const mode = event.changedTouches.length === 1 ? "PRIMARY" : "SECONDARY";
|
| 33304 |
+
const touch = event.changedTouches[0];
|
| 33305 |
+
handleEnd(touch.clientX, touch.clientY, mode);
|
| 33306 |
+
}, [handleEnd]);
|
| 33307 |
return {
|
| 33308 |
canvasRef,
|
| 33309 |
canvasRefCallback,
|
| 33310 |
mediaPipeRef,
|
|
|
|
| 33311 |
isMediaPipeReady,
|
| 33312 |
isDrawingUtilsReady,
|
|
|
|
|
|
|
|
|
|
| 33313 |
handleMouseDown,
|
| 33314 |
handleMouseUp,
|
| 33315 |
handleMouseMove,
|
| 33316 |
+
handleTouchStart,
|
| 33317 |
+
handleTouchMove,
|
| 33318 |
+
handleTouchEnd,
|
| 33319 |
currentLandmark,
|
| 33320 |
currentOpacity
|
| 33321 |
};
|
| 33322 |
}
|
| 33323 |
|
| 33324 |
+
// src/components/About.tsx
|
| 33325 |
var jsx_dev_runtime2 = __toESM(require_jsx_dev_runtime(), 1);
|
| 33326 |
+
function About() {
|
| 33327 |
return jsx_dev_runtime2.jsxDEV("div", {
|
| 33328 |
className: "flex flex-row items-center justify-center font-sans mt-4 w-full",
|
| 33329 |
children: [
|
| 33330 |
jsx_dev_runtime2.jsxDEV("span", {
|
| 33331 |
+
className: "text-neutral-900 text-sm",
|
| 33332 |
+
style: { textShadow: "rgb(255 255 255 / 80%) 0px 0px 2px" },
|
| 33333 |
+
children: "Click and drag on the image."
|
| 33334 |
+
}, undefined, false, undefined, this),
|
| 33335 |
+
jsx_dev_runtime2.jsxDEV("span", {
|
| 33336 |
+
className: "ml-2 mr-1",
|
| 33337 |
children: jsx_dev_runtime2.jsxDEV("img", {
|
| 33338 |
src: "/hf-logo.svg",
|
| 33339 |
alt: "Hugging Face",
|
|
|
|
| 33369 |
// src/hooks/useFacePokeAPI.ts
|
| 33370 |
var import_react8 = __toESM(require_react(), 1);
|
| 33371 |
function useFacePokeAPI() {
|
|
|
|
| 33372 |
const [isDebugMode, setIsDebugMode] = import_react8.useState(false);
|
| 33373 |
const [interruptMessage, setInterruptMessage] = import_react8.useState(null);
|
| 33374 |
const [isLoading, setIsLoading] = import_react8.useState(false);
|
|
|
|
| 33387 |
}, []);
|
| 33388 |
return {
|
| 33389 |
facePoke,
|
|
|
|
|
|
|
| 33390 |
isDebugMode,
|
| 33391 |
setIsDebugMode,
|
| 33392 |
interruptMessage,
|
|
|
|
| 33404 |
children: jsx_dev_runtime4.jsxDEV("div", {
|
| 33405 |
className: "min-h-screen w-full py-8 flex flex-col justify-center",
|
| 33406 |
children: jsx_dev_runtime4.jsxDEV("div", {
|
| 33407 |
+
className: "flex flex-col items-center justify-center p-4 sm:max-w-5xl sm:mx-auto",
|
| 33408 |
children
|
| 33409 |
}, undefined, false, undefined, this)
|
| 33410 |
}, undefined, false, undefined, this)
|
| 33411 |
}, undefined, false, undefined, this);
|
| 33412 |
}
|
| 33413 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33414 |
// src/app.tsx
|
| 33415 |
var jsx_dev_runtime5 = __toESM(require_jsx_dev_runtime(), 1);
|
| 33416 |
function App() {
|
|
|
|
| 33418 |
const setError = useMainStore((s2) => s2.setError);
|
| 33419 |
const imageFile = useMainStore((s2) => s2.imageFile);
|
| 33420 |
const setImageFile = useMainStore((s2) => s2.setImageFile);
|
| 33421 |
+
const isGazingAtCursor = useMainStore((s2) => s2.isGazingAtCursor);
|
| 33422 |
+
const setIsGazingAtCursor = useMainStore((s2) => s2.setIsGazingAtCursor);
|
| 33423 |
+
const isFollowingCursor = useMainStore((s2) => s2.isFollowingCursor);
|
| 33424 |
+
const setIsFollowingCursor = useMainStore((s2) => s2.setIsFollowingCursor);
|
| 33425 |
const previewImage = useMainStore((s2) => s2.previewImage);
|
| 33426 |
+
const status = useMainStore((s2) => s2.status);
|
| 33427 |
+
const blendShapes = useMainStore((s2) => s2.blendShapes);
|
|
|
|
| 33428 |
const {
|
|
|
|
|
|
|
| 33429 |
isDebugMode,
|
| 33430 |
setIsDebugMode,
|
| 33431 |
interruptMessage
|
| 33432 |
} = useFacePokeAPI();
|
| 33433 |
const {
|
|
|
|
| 33434 |
canvasRefCallback,
|
|
|
|
|
|
|
| 33435 |
isMediaPipeReady,
|
|
|
|
|
|
|
|
|
|
| 33436 |
handleMouseDown,
|
| 33437 |
handleMouseUp,
|
| 33438 |
handleMouseMove,
|
| 33439 |
+
handleTouchStart,
|
| 33440 |
+
handleTouchMove,
|
| 33441 |
+
handleTouchEnd,
|
| 33442 |
currentOpacity
|
| 33443 |
} = useFaceLandmarkDetection();
|
| 33444 |
const videoRef = import_react9.useRef(null);
|
| 33445 |
+
const handleFileChange = import_react9.useCallback((event) => {
|
| 33446 |
const files = event.target.files;
|
| 33447 |
+
setImageFile(files?.[0] || undefined);
|
| 33448 |
+
}, [setImageFile]);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33449 |
const handleDownload = import_react9.useCallback(() => {
|
| 33450 |
if (previewImage) {
|
| 33451 |
const link = document.createElement("a");
|
|
|
|
| 33522 |
className: "flex items-center space-x-2",
|
| 33523 |
children: [
|
| 33524 |
jsx_dev_runtime5.jsxDEV("div", {
|
| 33525 |
+
className: "flex items-center justify-center",
|
| 33526 |
children: [
|
| 33527 |
jsx_dev_runtime5.jsxDEV("input", {
|
| 33528 |
id: "imageInput",
|
|
|
|
| 33537 |
className: `cursor-pointer inline-flex items-center px-3 h-10 border border-transparent text-sm font-medium rounded-md text-white ${isMediaPipeReady ? "bg-slate-600 hover:bg-slate-500" : "bg-slate-500 cursor-not-allowed"} focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-slate-500 shadow-xl`,
|
| 33538 |
children: [
|
| 33539 |
jsx_dev_runtime5.jsxDEV(Spinner, {}, undefined, false, undefined, this),
|
| 33540 |
+
imageFile ? truncateFileName(imageFile.name, 32) : isMediaPipeReady ? "Choose a portrait photo" : "Initializing..."
|
| 33541 |
]
|
| 33542 |
}, undefined, true, undefined, this)
|
| 33543 |
]
|
|
|
|
| 33554 |
}, undefined, true, undefined, this)
|
| 33555 |
]
|
| 33556 |
}, undefined, true, undefined, this),
|
| 33557 |
+
previewImage && jsx_dev_runtime5.jsxDEV("div", {
|
| 33558 |
+
className: "flex items-center space-x-2",
|
| 33559 |
+
children: jsx_dev_runtime5.jsxDEV("label", {
|
| 33560 |
+
className: "mt-4 flex items-center",
|
| 33561 |
+
children: [
|
| 33562 |
+
jsx_dev_runtime5.jsxDEV("input", {
|
| 33563 |
+
type: "checkbox",
|
| 33564 |
+
checked: isDebugMode,
|
| 33565 |
+
onChange: (e2) => setIsDebugMode(e2.target.checked),
|
| 33566 |
+
className: "mr-2"
|
| 33567 |
+
}, undefined, false, undefined, this),
|
| 33568 |
+
"Show face markers"
|
| 33569 |
+
]
|
| 33570 |
+
}, undefined, true, undefined, this)
|
| 33571 |
+
}, undefined, false, undefined, this)
|
| 33572 |
]
|
| 33573 |
}, undefined, true, undefined, this),
|
| 33574 |
previewImage && jsx_dev_runtime5.jsxDEV("div", {
|
|
|
|
| 33582 |
jsx_dev_runtime5.jsxDEV("canvas", {
|
| 33583 |
ref: canvasRefCallback,
|
| 33584 |
className: "absolute top-0 left-0 w-full h-full select-none",
|
|
|
|
|
|
|
| 33585 |
onMouseDown: handleMouseDown,
|
| 33586 |
onMouseUp: handleMouseUp,
|
| 33587 |
onMouseMove: handleMouseMove,
|
| 33588 |
+
onTouchStart: handleTouchStart,
|
| 33589 |
+
onTouchMove: handleTouchMove,
|
| 33590 |
+
onTouchEnd: handleTouchEnd,
|
| 33591 |
style: {
|
| 33592 |
position: "absolute",
|
| 33593 |
top: 0,
|
|
|
|
| 33603 |
canDisplayBlendShapes && displayBlendShapes
|
| 33604 |
]
|
| 33605 |
}, undefined, true, undefined, this),
|
| 33606 |
+
jsx_dev_runtime5.jsxDEV(About, {}, undefined, false, undefined, this)
|
| 33607 |
]
|
| 33608 |
}, undefined, true, undefined, this);
|
| 33609 |
}
|
requirements.txt
CHANGED
|
@@ -7,12 +7,15 @@
|
|
| 7 |
torch
|
| 8 |
torchvision
|
| 9 |
torchaudio
|
| 10 |
-
|
| 11 |
|
| 12 |
# --------------------------------------------------------------------
|
| 13 |
# Common libraries for LivePortrait and all
|
| 14 |
# --------------------------------------------------------------------
|
| 15 |
|
|
|
|
|
|
|
|
|
|
| 16 |
# note: gradio is only used for the cropping utility
|
| 17 |
gradio==5.0.0b4
|
| 18 |
|
|
@@ -35,8 +38,8 @@ ffmpeg-python==0.2.0
|
|
| 35 |
onnxruntime-gpu==1.19.2
|
| 36 |
onnx==1.16.2
|
| 37 |
scikit-image==0.20.0
|
| 38 |
-
albumentations==1.
|
| 39 |
-
matplotlib==3.
|
| 40 |
tyro==0.8.5
|
| 41 |
chumpy==0.70
|
| 42 |
|
|
@@ -45,16 +48,24 @@ accelerate==0.34.2
|
|
| 45 |
tensorflow==2.12.0
|
| 46 |
tensorboard==2.12.0
|
| 47 |
transformers==4.39.2
|
| 48 |
-
huggingface-hub==0.25.1
|
| 49 |
-
safetensors==0.4.5
|
| 50 |
|
| 51 |
gdown==5.2.0
|
| 52 |
requests==2.32.3
|
| 53 |
omegaconf==2.3.0
|
|
|
|
| 54 |
pydantic==2.9.2
|
| 55 |
-
|
| 56 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
aiohttp==3.10.5
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 58 |
|
| 59 |
# --------------------------------------------------------------------
|
| 60 |
# Used for advanced LivePortrait features
|
|
|
|
| 7 |
torch
|
| 8 |
torchvision
|
| 9 |
torchaudio
|
| 10 |
+
torchgeometry
|
| 11 |
|
| 12 |
# --------------------------------------------------------------------
|
| 13 |
# Common libraries for LivePortrait and all
|
| 14 |
# --------------------------------------------------------------------
|
| 15 |
|
| 16 |
+
# LRU cache compatible with asyncio
|
| 17 |
+
async-lru==2.0.4
|
| 18 |
+
|
| 19 |
# note: gradio is only used for the cropping utility
|
| 20 |
gradio==5.0.0b4
|
| 21 |
|
|
|
|
| 38 |
onnxruntime-gpu==1.19.2
|
| 39 |
onnx==1.16.2
|
| 40 |
scikit-image==0.20.0
|
| 41 |
+
albumentations==1.3.1
|
| 42 |
+
matplotlib==3.7.2
|
| 43 |
tyro==0.8.5
|
| 44 |
chumpy==0.70
|
| 45 |
|
|
|
|
| 48 |
tensorflow==2.12.0
|
| 49 |
tensorboard==2.12.0
|
| 50 |
transformers==4.39.2
|
|
|
|
|
|
|
| 51 |
|
| 52 |
gdown==5.2.0
|
| 53 |
requests==2.32.3
|
| 54 |
omegaconf==2.3.0
|
| 55 |
+
|
| 56 |
pydantic==2.9.2
|
| 57 |
+
|
| 58 |
+
# --------------------------------------------------------------------
|
| 59 |
+
# RESERVED FOR FUTURE USAGE
|
| 60 |
+
#
|
| 61 |
+
# (it adds bloat, so you can remove them if you want)
|
| 62 |
+
# --------------------------------------------------------------------
|
| 63 |
aiohttp==3.10.5
|
| 64 |
+
av==12.3.0
|
| 65 |
+
einops==0.7.0
|
| 66 |
+
safetensors==0.4.5
|
| 67 |
+
huggingface-hub==0.25.1
|
| 68 |
+
optimum-quanto==0.2.4
|
| 69 |
|
| 70 |
# --------------------------------------------------------------------
|
| 71 |
# Used for advanced LivePortrait features
|