Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
•
5bd8810
1
Parent(s):
d827205
first version of the video analyzer
Browse files- package-lock.json +88 -88
- package.json +2 -2
- public/images/onboarding/get-started.png +3 -0
- public/images/onboarding/get-started.xcf +3 -0
- public/images/onboarding/pick-an-example.png +3 -0
- public/images/onboarding/pick-an-example.xcf +3 -0
- src/app/main.tsx +60 -7
- src/components/toolbars/top-menu/file/index.tsx +45 -15
- src/components/toolbars/top-menu/index.tsx +8 -1
- src/lib/core/constants.ts +1 -1
- src/lib/hooks/useOpenFilePicker.ts +31 -5
- src/lib/utils/base64DataUriToFile.ts +1 -1
- src/services/io/extractFramesFromVideo.ts +75 -29
- src/services/io/extractScenesFromVideo.ts +369 -0
- src/services/io/fileDataToBase64.ts +13 -0
- src/services/io/parseFileIntoSegments.ts +45 -8
- src/services/io/useIO.ts +167 -82
- src/services/resolver/useResolver.ts +6 -4
- src/services/ui/getDefaultUIState.ts +2 -0
- src/services/ui/useUI.ts +7 -0
package-lock.json
CHANGED
@@ -11,9 +11,9 @@
|
|
11 |
"dependencies": {
|
12 |
"@aitube/broadway": "0.0.22",
|
13 |
"@aitube/clap": "0.0.30",
|
14 |
-
"@aitube/clapper-services": "0.0.
|
15 |
"@aitube/engine": "0.0.26",
|
16 |
-
"@aitube/timeline": "0.0.
|
17 |
"@fal-ai/serverless-client": "^0.13.0",
|
18 |
"@ffmpeg/ffmpeg": "^0.12.10",
|
19 |
"@ffmpeg/util": "^0.12.1",
|
@@ -161,12 +161,12 @@
|
|
161 |
}
|
162 |
},
|
163 |
"node_modules/@aitube/clapper-services": {
|
164 |
-
"version": "0.0.
|
165 |
-
"resolved": "https://registry.npmjs.org/@aitube/clapper-services/-/clapper-services-0.0.
|
166 |
-
"integrity": "sha512-
|
167 |
"peerDependencies": {
|
168 |
"@aitube/clap": "0.0.30",
|
169 |
-
"@aitube/timeline": "0.0.
|
170 |
"@monaco-editor/react": "4.6.0",
|
171 |
"monaco-editor": "0.50.0",
|
172 |
"react": "*",
|
@@ -192,9 +192,9 @@
|
|
192 |
}
|
193 |
},
|
194 |
"node_modules/@aitube/timeline": {
|
195 |
-
"version": "0.0.
|
196 |
-
"resolved": "https://registry.npmjs.org/@aitube/timeline/-/timeline-0.0.
|
197 |
-
"integrity": "sha512-
|
198 |
"dependencies": {
|
199 |
"date-fns": "^3.6.0",
|
200 |
"react-virtualized-auto-sizer": "^1.0.24"
|
@@ -3348,20 +3348,20 @@
|
|
3348 |
}
|
3349 |
},
|
3350 |
"node_modules/@floating-ui/core": {
|
3351 |
-
"version": "1.6.
|
3352 |
-
"resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.
|
3353 |
-
"integrity": "sha512-
|
3354 |
"dependencies": {
|
3355 |
-
"@floating-ui/utils": "^0.2.
|
3356 |
}
|
3357 |
},
|
3358 |
"node_modules/@floating-ui/dom": {
|
3359 |
-
"version": "1.6.
|
3360 |
-
"resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.
|
3361 |
-
"integrity": "sha512-
|
3362 |
"dependencies": {
|
3363 |
"@floating-ui/core": "^1.6.0",
|
3364 |
-
"@floating-ui/utils": "^0.2.
|
3365 |
}
|
3366 |
},
|
3367 |
"node_modules/@floating-ui/react-dom": {
|
@@ -3377,9 +3377,9 @@
|
|
3377 |
}
|
3378 |
},
|
3379 |
"node_modules/@floating-ui/utils": {
|
3380 |
-
"version": "0.2.
|
3381 |
-
"resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.
|
3382 |
-
"integrity": "sha512-
|
3383 |
},
|
3384 |
"node_modules/@gar/promisify": {
|
3385 |
"version": "1.1.3",
|
@@ -3430,9 +3430,9 @@
|
|
3430 |
}
|
3431 |
},
|
3432 |
"node_modules/@huggingface/inference/node_modules/@huggingface/tasks": {
|
3433 |
-
"version": "0.11.
|
3434 |
-
"resolved": "https://registry.npmjs.org/@huggingface/tasks/-/tasks-0.11.
|
3435 |
-
"integrity": "sha512-
|
3436 |
},
|
3437 |
"node_modules/@huggingface/jinja": {
|
3438 |
"version": "0.2.2",
|
@@ -4206,15 +4206,15 @@
|
|
4206 |
}
|
4207 |
},
|
4208 |
"node_modules/@langchain/core": {
|
4209 |
-
"version": "0.2.
|
4210 |
-
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-0.2.
|
4211 |
-
"integrity": "sha512-
|
4212 |
"dependencies": {
|
4213 |
"ansi-styles": "^5.0.0",
|
4214 |
"camelcase": "6",
|
4215 |
"decamelize": "1.2.0",
|
4216 |
"js-tiktoken": "^1.0.12",
|
4217 |
-
"langsmith": "~0.1.
|
4218 |
"ml-distance": "^4.0.0",
|
4219 |
"mustache": "^4.2.0",
|
4220 |
"p-queue": "^6.6.2",
|
@@ -6141,9 +6141,9 @@
|
|
6141 |
"integrity": "sha512-iQVztO09ZVfsletMiY+DpT/JRiBntdsdJ4uqk3UJFhrhS8mIC9ZOZbmfGSRs/kdbNPQkVyzucceDicQ/3Mlj9g=="
|
6142 |
},
|
6143 |
"node_modules/@react-three/drei": {
|
6144 |
-
"version": "9.
|
6145 |
-
"resolved": "https://registry.npmjs.org/@react-three/drei/-/drei-9.
|
6146 |
-
"integrity": "sha512-
|
6147 |
"dependencies": {
|
6148 |
"@babel/runtime": "^7.11.2",
|
6149 |
"@mediapipe/tasks-vision": "0.10.8",
|
@@ -7313,9 +7313,9 @@
|
|
7313 |
}
|
7314 |
},
|
7315 |
"node_modules/@testing-library/dom": {
|
7316 |
-
"version": "10.
|
7317 |
-
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.
|
7318 |
-
"integrity": "sha512-
|
7319 |
"dev": true,
|
7320 |
"peer": true,
|
7321 |
"dependencies": {
|
@@ -7844,13 +7844,13 @@
|
|
7844 |
}
|
7845 |
},
|
7846 |
"node_modules/@vitest/expect": {
|
7847 |
-
"version": "2.0.
|
7848 |
-
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.0.
|
7849 |
-
"integrity": "sha512-
|
7850 |
"dev": true,
|
7851 |
"dependencies": {
|
7852 |
-
"@vitest/spy": "2.0.
|
7853 |
-
"@vitest/utils": "2.0.
|
7854 |
"chai": "^5.1.1",
|
7855 |
"tinyrainbow": "^1.2.0"
|
7856 |
},
|
@@ -7859,9 +7859,9 @@
|
|
7859 |
}
|
7860 |
},
|
7861 |
"node_modules/@vitest/pretty-format": {
|
7862 |
-
"version": "2.0.
|
7863 |
-
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.0.
|
7864 |
-
"integrity": "sha512-
|
7865 |
"dev": true,
|
7866 |
"dependencies": {
|
7867 |
"tinyrainbow": "^1.2.0"
|
@@ -7871,12 +7871,12 @@
|
|
7871 |
}
|
7872 |
},
|
7873 |
"node_modules/@vitest/runner": {
|
7874 |
-
"version": "2.0.
|
7875 |
-
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.0.
|
7876 |
-
"integrity": "sha512-
|
7877 |
"dev": true,
|
7878 |
"dependencies": {
|
7879 |
-
"@vitest/utils": "2.0.
|
7880 |
"pathe": "^1.1.2"
|
7881 |
},
|
7882 |
"funding": {
|
@@ -7884,12 +7884,12 @@
|
|
7884 |
}
|
7885 |
},
|
7886 |
"node_modules/@vitest/snapshot": {
|
7887 |
-
"version": "2.0.
|
7888 |
-
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.0.
|
7889 |
-
"integrity": "sha512-
|
7890 |
"dev": true,
|
7891 |
"dependencies": {
|
7892 |
-
"@vitest/pretty-format": "2.0.
|
7893 |
"magic-string": "^0.30.10",
|
7894 |
"pathe": "^1.1.2"
|
7895 |
},
|
@@ -7898,9 +7898,9 @@
|
|
7898 |
}
|
7899 |
},
|
7900 |
"node_modules/@vitest/spy": {
|
7901 |
-
"version": "2.0.
|
7902 |
-
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.0.
|
7903 |
-
"integrity": "sha512-
|
7904 |
"dev": true,
|
7905 |
"dependencies": {
|
7906 |
"tinyspy": "^3.0.0"
|
@@ -7910,12 +7910,12 @@
|
|
7910 |
}
|
7911 |
},
|
7912 |
"node_modules/@vitest/utils": {
|
7913 |
-
"version": "2.0.
|
7914 |
-
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.0.
|
7915 |
-
"integrity": "sha512-
|
7916 |
"dev": true,
|
7917 |
"dependencies": {
|
7918 |
-
"@vitest/pretty-format": "2.0.
|
7919 |
"estree-walker": "^3.0.3",
|
7920 |
"loupe": "^3.1.1",
|
7921 |
"tinyrainbow": "^1.2.0"
|
@@ -7926,7 +7926,7 @@
|
|
7926 |
},
|
7927 |
"node_modules/@xenova/transformers": {
|
7928 |
"version": "3.0.0-alpha.0",
|
7929 |
-
"resolved": "git+ssh://git@github.com/xenova/transformers.js.git#
|
7930 |
"dependencies": {
|
7931 |
"@huggingface/jinja": "^0.2.2",
|
7932 |
"onnxruntime-web": "^1.18.0",
|
@@ -15062,9 +15062,9 @@
|
|
15062 |
}
|
15063 |
},
|
15064 |
"node_modules/msw/node_modules/type-fest": {
|
15065 |
-
"version": "4.
|
15066 |
-
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.
|
15067 |
-
"integrity": "sha512-
|
15068 |
"engines": {
|
15069 |
"node": ">=16"
|
15070 |
},
|
@@ -15452,9 +15452,9 @@
|
|
15452 |
}
|
15453 |
},
|
15454 |
"node_modules/node-releases": {
|
15455 |
-
"version": "2.0.
|
15456 |
-
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.
|
15457 |
-
"integrity": "sha512-
|
15458 |
},
|
15459 |
"node_modules/nopt": {
|
15460 |
"version": "7.2.1",
|
@@ -15877,9 +15877,9 @@
|
|
15877 |
}
|
15878 |
},
|
15879 |
"node_modules/onnxruntime-node/node_modules/tar": {
|
15880 |
-
"version": "7.4.
|
15881 |
-
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.
|
15882 |
-
"integrity": "sha512-
|
15883 |
"optional": true,
|
15884 |
"dependencies": {
|
15885 |
"@isaacs/fs-minipass": "^4.0.0",
|
@@ -15916,9 +15916,9 @@
|
|
15916 |
}
|
15917 |
},
|
15918 |
"node_modules/openai": {
|
15919 |
-
"version": "4.
|
15920 |
-
"resolved": "https://registry.npmjs.org/openai/-/openai-4.
|
15921 |
-
"integrity": "sha512-
|
15922 |
"dependencies": {
|
15923 |
"@types/node": "^18.11.18",
|
15924 |
"@types/node-fetch": "^2.6.4",
|
@@ -16939,9 +16939,9 @@
|
|
16939 |
}
|
16940 |
},
|
16941 |
"node_modules/query-string": {
|
16942 |
-
"version": "9.
|
16943 |
-
"resolved": "https://registry.npmjs.org/query-string/-/query-string-9.
|
16944 |
-
"integrity": "sha512-
|
16945 |
"dependencies": {
|
16946 |
"decode-uri-component": "^0.4.1",
|
16947 |
"filter-obj": "^5.1.0",
|
@@ -19852,9 +19852,9 @@
|
|
19852 |
}
|
19853 |
},
|
19854 |
"node_modules/vite-node": {
|
19855 |
-
"version": "2.0.
|
19856 |
-
"resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.0.
|
19857 |
-
"integrity": "sha512-
|
19858 |
"dev": true,
|
19859 |
"dependencies": {
|
19860 |
"cac": "^6.7.14",
|
@@ -19888,18 +19888,18 @@
|
|
19888 |
}
|
19889 |
},
|
19890 |
"node_modules/vitest": {
|
19891 |
-
"version": "2.0.
|
19892 |
-
"resolved": "https://registry.npmjs.org/vitest/-/vitest-2.0.
|
19893 |
-
"integrity": "sha512-
|
19894 |
"dev": true,
|
19895 |
"dependencies": {
|
19896 |
"@ampproject/remapping": "^2.3.0",
|
19897 |
-
"@vitest/expect": "2.0.
|
19898 |
-
"@vitest/pretty-format": "^2.0.
|
19899 |
-
"@vitest/runner": "2.0.
|
19900 |
-
"@vitest/snapshot": "2.0.
|
19901 |
-
"@vitest/spy": "2.0.
|
19902 |
-
"@vitest/utils": "2.0.
|
19903 |
"chai": "^5.1.1",
|
19904 |
"debug": "^4.3.5",
|
19905 |
"execa": "^8.0.1",
|
@@ -19910,8 +19910,8 @@
|
|
19910 |
"tinypool": "^1.0.0",
|
19911 |
"tinyrainbow": "^1.2.0",
|
19912 |
"vite": "^5.0.0",
|
19913 |
-
"vite-node": "2.0.
|
19914 |
-
"why-is-node-running": "^2.
|
19915 |
},
|
19916 |
"bin": {
|
19917 |
"vitest": "vitest.mjs"
|
@@ -19925,8 +19925,8 @@
|
|
19925 |
"peerDependencies": {
|
19926 |
"@edge-runtime/vm": "*",
|
19927 |
"@types/node": "^18.0.0 || >=20.0.0",
|
19928 |
-
"@vitest/browser": "2.0.
|
19929 |
-
"@vitest/ui": "2.0.
|
19930 |
"happy-dom": "*",
|
19931 |
"jsdom": "*"
|
19932 |
},
|
|
|
11 |
"dependencies": {
|
12 |
"@aitube/broadway": "0.0.22",
|
13 |
"@aitube/clap": "0.0.30",
|
14 |
+
"@aitube/clapper-services": "0.0.34",
|
15 |
"@aitube/engine": "0.0.26",
|
16 |
+
"@aitube/timeline": "0.0.44",
|
17 |
"@fal-ai/serverless-client": "^0.13.0",
|
18 |
"@ffmpeg/ffmpeg": "^0.12.10",
|
19 |
"@ffmpeg/util": "^0.12.1",
|
|
|
161 |
}
|
162 |
},
|
163 |
"node_modules/@aitube/clapper-services": {
|
164 |
+
"version": "0.0.34",
|
165 |
+
"resolved": "https://registry.npmjs.org/@aitube/clapper-services/-/clapper-services-0.0.34.tgz",
|
166 |
+
"integrity": "sha512-d0HruUyWRIXozO67W+2iEUTuBdbojGPn9BnIf6cvxkVbywLwy4hKaN+SD+yQwOi/jqoqu+TTQYUoWSF93JDVEQ==",
|
167 |
"peerDependencies": {
|
168 |
"@aitube/clap": "0.0.30",
|
169 |
+
"@aitube/timeline": "0.0.44",
|
170 |
"@monaco-editor/react": "4.6.0",
|
171 |
"monaco-editor": "0.50.0",
|
172 |
"react": "*",
|
|
|
192 |
}
|
193 |
},
|
194 |
"node_modules/@aitube/timeline": {
|
195 |
+
"version": "0.0.44",
|
196 |
+
"resolved": "https://registry.npmjs.org/@aitube/timeline/-/timeline-0.0.44.tgz",
|
197 |
+
"integrity": "sha512-iELTtmLONWR7zuGLLr9cJRlMuNoBXWxZzgGerDeXa5VyQhDmjj4shLOlZLP78PiIVHMdRwZr16IN6ob899VmMw==",
|
198 |
"dependencies": {
|
199 |
"date-fns": "^3.6.0",
|
200 |
"react-virtualized-auto-sizer": "^1.0.24"
|
|
|
3348 |
}
|
3349 |
},
|
3350 |
"node_modules/@floating-ui/core": {
|
3351 |
+
"version": "1.6.5",
|
3352 |
+
"resolved": "https://registry.npmjs.org/@floating-ui/core/-/core-1.6.5.tgz",
|
3353 |
+
"integrity": "sha512-8GrTWmoFhm5BsMZOTHeGD2/0FLKLQQHvO/ZmQga4tKempYRLz8aqJGqXVuQgisnMObq2YZ2SgkwctN1LOOxcqA==",
|
3354 |
"dependencies": {
|
3355 |
+
"@floating-ui/utils": "^0.2.5"
|
3356 |
}
|
3357 |
},
|
3358 |
"node_modules/@floating-ui/dom": {
|
3359 |
+
"version": "1.6.8",
|
3360 |
+
"resolved": "https://registry.npmjs.org/@floating-ui/dom/-/dom-1.6.8.tgz",
|
3361 |
+
"integrity": "sha512-kx62rP19VZ767Q653wsP1XZCGIirkE09E0QUGNYTM/ttbbQHqcGPdSfWFxUyyNLc/W6aoJRBajOSXhP6GXjC0Q==",
|
3362 |
"dependencies": {
|
3363 |
"@floating-ui/core": "^1.6.0",
|
3364 |
+
"@floating-ui/utils": "^0.2.5"
|
3365 |
}
|
3366 |
},
|
3367 |
"node_modules/@floating-ui/react-dom": {
|
|
|
3377 |
}
|
3378 |
},
|
3379 |
"node_modules/@floating-ui/utils": {
|
3380 |
+
"version": "0.2.5",
|
3381 |
+
"resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.5.tgz",
|
3382 |
+
"integrity": "sha512-sTcG+QZ6fdEUObICavU+aB3Mp8HY4n14wYHdxK4fXjPmv3PXZZeY5RaguJmGyeH/CJQhX3fqKUtS4qc1LoHwhQ=="
|
3383 |
},
|
3384 |
"node_modules/@gar/promisify": {
|
3385 |
"version": "1.1.3",
|
|
|
3430 |
}
|
3431 |
},
|
3432 |
"node_modules/@huggingface/inference/node_modules/@huggingface/tasks": {
|
3433 |
+
"version": "0.11.3",
|
3434 |
+
"resolved": "https://registry.npmjs.org/@huggingface/tasks/-/tasks-0.11.3.tgz",
|
3435 |
+
"integrity": "sha512-IYq4OdlySdscjkFwm6iIqP1ZgKl4OGhvQFJWI7Yxpq2V8RmXcgIjiqk/65S6Ap7i+eyCdlOC4qweVy/ICNE0JA=="
|
3436 |
},
|
3437 |
"node_modules/@huggingface/jinja": {
|
3438 |
"version": "0.2.2",
|
|
|
4206 |
}
|
4207 |
},
|
4208 |
"node_modules/@langchain/core": {
|
4209 |
+
"version": "0.2.18",
|
4210 |
+
"resolved": "https://registry.npmjs.org/@langchain/core/-/core-0.2.18.tgz",
|
4211 |
+
"integrity": "sha512-ru542BwNcsnDfjTeDbIkFIchwa54ctHZR+kVrC8U9NPS9/36iM8p8ruprOV7Zccj/oxtLE5UpEhV+9MZhVcFlA==",
|
4212 |
"dependencies": {
|
4213 |
"ansi-styles": "^5.0.0",
|
4214 |
"camelcase": "6",
|
4215 |
"decamelize": "1.2.0",
|
4216 |
"js-tiktoken": "^1.0.12",
|
4217 |
+
"langsmith": "~0.1.39",
|
4218 |
"ml-distance": "^4.0.0",
|
4219 |
"mustache": "^4.2.0",
|
4220 |
"p-queue": "^6.6.2",
|
|
|
6141 |
"integrity": "sha512-iQVztO09ZVfsletMiY+DpT/JRiBntdsdJ4uqk3UJFhrhS8mIC9ZOZbmfGSRs/kdbNPQkVyzucceDicQ/3Mlj9g=="
|
6142 |
},
|
6143 |
"node_modules/@react-three/drei": {
|
6144 |
+
"version": "9.109.0",
|
6145 |
+
"resolved": "https://registry.npmjs.org/@react-three/drei/-/drei-9.109.0.tgz",
|
6146 |
+
"integrity": "sha512-LlJ1k0DO5UvBdjuv6WuSP5jXb1mXsQY3VeQTfzivCsHJH9pUsbxutLL7mk84w9MI7cZytv2Qcx2nU2HBm0eNpQ==",
|
6147 |
"dependencies": {
|
6148 |
"@babel/runtime": "^7.11.2",
|
6149 |
"@mediapipe/tasks-vision": "0.10.8",
|
|
|
7313 |
}
|
7314 |
},
|
7315 |
"node_modules/@testing-library/dom": {
|
7316 |
+
"version": "10.4.0",
|
7317 |
+
"resolved": "https://registry.npmjs.org/@testing-library/dom/-/dom-10.4.0.tgz",
|
7318 |
+
"integrity": "sha512-pemlzrSESWbdAloYml3bAJMEfNh1Z7EduzqPKprCH5S341frlpYnUEW0H72dLxa6IsYr+mPno20GiSm+h9dEdQ==",
|
7319 |
"dev": true,
|
7320 |
"peer": true,
|
7321 |
"dependencies": {
|
|
|
7844 |
}
|
7845 |
},
|
7846 |
"node_modules/@vitest/expect": {
|
7847 |
+
"version": "2.0.4",
|
7848 |
+
"resolved": "https://registry.npmjs.org/@vitest/expect/-/expect-2.0.4.tgz",
|
7849 |
+
"integrity": "sha512-39jr5EguIoanChvBqe34I8m1hJFI4+jxvdOpD7gslZrVQBKhh8H9eD7J/LJX4zakrw23W+dITQTDqdt43xVcJw==",
|
7850 |
"dev": true,
|
7851 |
"dependencies": {
|
7852 |
+
"@vitest/spy": "2.0.4",
|
7853 |
+
"@vitest/utils": "2.0.4",
|
7854 |
"chai": "^5.1.1",
|
7855 |
"tinyrainbow": "^1.2.0"
|
7856 |
},
|
|
|
7859 |
}
|
7860 |
},
|
7861 |
"node_modules/@vitest/pretty-format": {
|
7862 |
+
"version": "2.0.4",
|
7863 |
+
"resolved": "https://registry.npmjs.org/@vitest/pretty-format/-/pretty-format-2.0.4.tgz",
|
7864 |
+
"integrity": "sha512-RYZl31STbNGqf4l2eQM1nvKPXE0NhC6Eq0suTTePc4mtMQ1Fn8qZmjV4emZdEdG2NOWGKSCrHZjmTqDCDoeFBw==",
|
7865 |
"dev": true,
|
7866 |
"dependencies": {
|
7867 |
"tinyrainbow": "^1.2.0"
|
|
|
7871 |
}
|
7872 |
},
|
7873 |
"node_modules/@vitest/runner": {
|
7874 |
+
"version": "2.0.4",
|
7875 |
+
"resolved": "https://registry.npmjs.org/@vitest/runner/-/runner-2.0.4.tgz",
|
7876 |
+
"integrity": "sha512-Gk+9Su/2H2zNfNdeJR124gZckd5st4YoSuhF1Rebi37qTXKnqYyFCd9KP4vl2cQHbtuVKjfEKrNJxHHCW8thbQ==",
|
7877 |
"dev": true,
|
7878 |
"dependencies": {
|
7879 |
+
"@vitest/utils": "2.0.4",
|
7880 |
"pathe": "^1.1.2"
|
7881 |
},
|
7882 |
"funding": {
|
|
|
7884 |
}
|
7885 |
},
|
7886 |
"node_modules/@vitest/snapshot": {
|
7887 |
+
"version": "2.0.4",
|
7888 |
+
"resolved": "https://registry.npmjs.org/@vitest/snapshot/-/snapshot-2.0.4.tgz",
|
7889 |
+
"integrity": "sha512-or6Mzoz/pD7xTvuJMFYEtso1vJo1S5u6zBTinfl+7smGUhqybn6VjzCDMhmTyVOFWwkCMuNjmNNxnyXPgKDoPw==",
|
7890 |
"dev": true,
|
7891 |
"dependencies": {
|
7892 |
+
"@vitest/pretty-format": "2.0.4",
|
7893 |
"magic-string": "^0.30.10",
|
7894 |
"pathe": "^1.1.2"
|
7895 |
},
|
|
|
7898 |
}
|
7899 |
},
|
7900 |
"node_modules/@vitest/spy": {
|
7901 |
+
"version": "2.0.4",
|
7902 |
+
"resolved": "https://registry.npmjs.org/@vitest/spy/-/spy-2.0.4.tgz",
|
7903 |
+
"integrity": "sha512-uTXU56TNoYrTohb+6CseP8IqNwlNdtPwEO0AWl+5j7NelS6x0xZZtP0bDWaLvOfUbaYwhhWp1guzXUxkC7mW7Q==",
|
7904 |
"dev": true,
|
7905 |
"dependencies": {
|
7906 |
"tinyspy": "^3.0.0"
|
|
|
7910 |
}
|
7911 |
},
|
7912 |
"node_modules/@vitest/utils": {
|
7913 |
+
"version": "2.0.4",
|
7914 |
+
"resolved": "https://registry.npmjs.org/@vitest/utils/-/utils-2.0.4.tgz",
|
7915 |
+
"integrity": "sha512-Zc75QuuoJhOBnlo99ZVUkJIuq4Oj0zAkrQ2VzCqNCx6wAwViHEh5Fnp4fiJTE9rA+sAoXRf00Z9xGgfEzV6fzQ==",
|
7916 |
"dev": true,
|
7917 |
"dependencies": {
|
7918 |
+
"@vitest/pretty-format": "2.0.4",
|
7919 |
"estree-walker": "^3.0.3",
|
7920 |
"loupe": "^3.1.1",
|
7921 |
"tinyrainbow": "^1.2.0"
|
|
|
7926 |
},
|
7927 |
"node_modules/@xenova/transformers": {
|
7928 |
"version": "3.0.0-alpha.0",
|
7929 |
+
"resolved": "git+ssh://git@github.com/xenova/transformers.js.git#c6aeb4be1bc1cdfa72e9d050f77b97dc9c8af362",
|
7930 |
"dependencies": {
|
7931 |
"@huggingface/jinja": "^0.2.2",
|
7932 |
"onnxruntime-web": "^1.18.0",
|
|
|
15062 |
}
|
15063 |
},
|
15064 |
"node_modules/msw/node_modules/type-fest": {
|
15065 |
+
"version": "4.23.0",
|
15066 |
+
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.23.0.tgz",
|
15067 |
+
"integrity": "sha512-ZiBujro2ohr5+Z/hZWHESLz3g08BBdrdLMieYFULJO+tWc437sn8kQsWLJoZErY8alNhxre9K4p3GURAG11n+w==",
|
15068 |
"engines": {
|
15069 |
"node": ">=16"
|
15070 |
},
|
|
|
15452 |
}
|
15453 |
},
|
15454 |
"node_modules/node-releases": {
|
15455 |
+
"version": "2.0.18",
|
15456 |
+
"resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz",
|
15457 |
+
"integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g=="
|
15458 |
},
|
15459 |
"node_modules/nopt": {
|
15460 |
"version": "7.2.1",
|
|
|
15877 |
}
|
15878 |
},
|
15879 |
"node_modules/onnxruntime-node/node_modules/tar": {
|
15880 |
+
"version": "7.4.1",
|
15881 |
+
"resolved": "https://registry.npmjs.org/tar/-/tar-7.4.1.tgz",
|
15882 |
+
"integrity": "sha512-dDJzpQf7Nud96mCs3wtw+XUiWGpi9WHxytSusrg0lYlj/Kr11DnB5hfw5bNDQNzx52JJ2Vy+7l8AFivp6H7ETA==",
|
15883 |
"optional": true,
|
15884 |
"dependencies": {
|
15885 |
"@isaacs/fs-minipass": "^4.0.0",
|
|
|
15916 |
}
|
15917 |
},
|
15918 |
"node_modules/openai": {
|
15919 |
+
"version": "4.53.0",
|
15920 |
+
"resolved": "https://registry.npmjs.org/openai/-/openai-4.53.0.tgz",
|
15921 |
+
"integrity": "sha512-XoMaJsSLuedW5eoMEMmZbdNoXgML3ujcU5KfwRnC6rnbmZkHE2Q4J/SArwhqCxQRqJwHnQUj1LpiROmKPExZJA==",
|
15922 |
"dependencies": {
|
15923 |
"@types/node": "^18.11.18",
|
15924 |
"@types/node-fetch": "^2.6.4",
|
|
|
16939 |
}
|
16940 |
},
|
16941 |
"node_modules/query-string": {
|
16942 |
+
"version": "9.1.0",
|
16943 |
+
"resolved": "https://registry.npmjs.org/query-string/-/query-string-9.1.0.tgz",
|
16944 |
+
"integrity": "sha512-t6dqMECpCkqfyv2FfwVS1xcB6lgXW/0XZSaKdsCNGYkqMO76AFiJEg4vINzoDKcZa6MS7JX+OHIjwh06K5vczw==",
|
16945 |
"dependencies": {
|
16946 |
"decode-uri-component": "^0.4.1",
|
16947 |
"filter-obj": "^5.1.0",
|
|
|
19852 |
}
|
19853 |
},
|
19854 |
"node_modules/vite-node": {
|
19855 |
+
"version": "2.0.4",
|
19856 |
+
"resolved": "https://registry.npmjs.org/vite-node/-/vite-node-2.0.4.tgz",
|
19857 |
+
"integrity": "sha512-ZpJVkxcakYtig5iakNeL7N3trufe3M6vGuzYAr4GsbCTwobDeyPJpE4cjDhhPluv8OvQCFzu2LWp6GkoKRITXA==",
|
19858 |
"dev": true,
|
19859 |
"dependencies": {
|
19860 |
"cac": "^6.7.14",
|
|
|
19888 |
}
|
19889 |
},
|
19890 |
"node_modules/vitest": {
|
19891 |
+
"version": "2.0.4",
|
19892 |
+
"resolved": "https://registry.npmjs.org/vitest/-/vitest-2.0.4.tgz",
|
19893 |
+
"integrity": "sha512-luNLDpfsnxw5QSW4bISPe6tkxVvv5wn2BBs/PuDRkhXZ319doZyLOBr1sjfB5yCEpTiU7xCAdViM8TNVGPwoog==",
|
19894 |
"dev": true,
|
19895 |
"dependencies": {
|
19896 |
"@ampproject/remapping": "^2.3.0",
|
19897 |
+
"@vitest/expect": "2.0.4",
|
19898 |
+
"@vitest/pretty-format": "^2.0.4",
|
19899 |
+
"@vitest/runner": "2.0.4",
|
19900 |
+
"@vitest/snapshot": "2.0.4",
|
19901 |
+
"@vitest/spy": "2.0.4",
|
19902 |
+
"@vitest/utils": "2.0.4",
|
19903 |
"chai": "^5.1.1",
|
19904 |
"debug": "^4.3.5",
|
19905 |
"execa": "^8.0.1",
|
|
|
19910 |
"tinypool": "^1.0.0",
|
19911 |
"tinyrainbow": "^1.2.0",
|
19912 |
"vite": "^5.0.0",
|
19913 |
+
"vite-node": "2.0.4",
|
19914 |
+
"why-is-node-running": "^2.3.0"
|
19915 |
},
|
19916 |
"bin": {
|
19917 |
"vitest": "vitest.mjs"
|
|
|
19925 |
"peerDependencies": {
|
19926 |
"@edge-runtime/vm": "*",
|
19927 |
"@types/node": "^18.0.0 || >=20.0.0",
|
19928 |
+
"@vitest/browser": "2.0.4",
|
19929 |
+
"@vitest/ui": "2.0.4",
|
19930 |
"happy-dom": "*",
|
19931 |
"jsdom": "*"
|
19932 |
},
|
package.json
CHANGED
@@ -37,9 +37,9 @@
|
|
37 |
"dependencies": {
|
38 |
"@aitube/broadway": "0.0.22",
|
39 |
"@aitube/clap": "0.0.30",
|
40 |
-
"@aitube/clapper-services": "0.0.
|
41 |
"@aitube/engine": "0.0.26",
|
42 |
-
"@aitube/timeline": "0.0.
|
43 |
"@fal-ai/serverless-client": "^0.13.0",
|
44 |
"@ffmpeg/ffmpeg": "^0.12.10",
|
45 |
"@ffmpeg/util": "^0.12.1",
|
|
|
37 |
"dependencies": {
|
38 |
"@aitube/broadway": "0.0.22",
|
39 |
"@aitube/clap": "0.0.30",
|
40 |
+
"@aitube/clapper-services": "0.0.34",
|
41 |
"@aitube/engine": "0.0.26",
|
42 |
+
"@aitube/timeline": "0.0.44",
|
43 |
"@fal-ai/serverless-client": "^0.13.0",
|
44 |
"@ffmpeg/ffmpeg": "^0.12.10",
|
45 |
"@ffmpeg/util": "^0.12.1",
|
public/images/onboarding/get-started.png
ADDED
Git LFS Details
|
public/images/onboarding/get-started.xcf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:864be2d20925acf9ff343ac45c3d877c656eb2b474f335316526181a204a82be
|
3 |
+
size 70110
|
public/images/onboarding/pick-an-example.png
ADDED
Git LFS Details
|
public/images/onboarding/pick-an-example.xcf
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29d46c5ed0ae1b1a4d9d11cf1103baf52b50ce204221d3667b7f481394ed75ba
|
3 |
+
size 107646
|
src/app/main.tsx
CHANGED
@@ -5,7 +5,6 @@ import { ReflexContainer, ReflexSplitter, ReflexElement } from 'react-reflex'
|
|
5 |
import { useSearchParams } from 'next/navigation'
|
6 |
import { DndProvider, useDrop } from 'react-dnd'
|
7 |
import { HTML5Backend, NativeTypes } from 'react-dnd-html5-backend'
|
8 |
-
import { useTimeline } from '@aitube/timeline'
|
9 |
|
10 |
import { Toaster } from '@/components/ui/sonner'
|
11 |
import { cn } from '@/lib/utils'
|
@@ -14,22 +13,23 @@ import { Monitor } from '@/components/monitor'
|
|
14 |
|
15 |
import { SettingsDialog } from '@/components/settings'
|
16 |
import { LoadingDialog } from '@/components/dialogs/loader/LoadingDialog'
|
17 |
-
import { useUI } from '@/services
|
18 |
import { TopBar } from '@/components/toolbars/top-bar'
|
19 |
import { Timeline } from '@/components/core/timeline'
|
20 |
-
import { useIO } from '@/services/io/useIO'
|
21 |
import { ChatView } from '@/components/assistant/ChatView'
|
22 |
import { Editors } from '@/components/editors/Editors'
|
|
|
23 |
|
24 |
type DroppableThing = { files: File[] }
|
25 |
|
26 |
function MainContent() {
|
27 |
const ref = useRef<HTMLDivElement>(null)
|
28 |
-
const
|
29 |
const showTimeline = useUI((s) => s.showTimeline)
|
30 |
const showAssistant = useUI((s) => s.showAssistant)
|
31 |
-
|
32 |
const openFiles = useIO((s) => s.openFiles)
|
|
|
33 |
|
34 |
const [{ isOver, canDrop }, connectFileDrop] = useDrop({
|
35 |
accept: [NativeTypes.FILE],
|
@@ -67,8 +67,7 @@ function MainContent() {
|
|
67 |
<TopBar />
|
68 |
<div
|
69 |
className={cn(
|
70 |
-
`flex h-[calc(100vh-40px)] w-screen flex-row overflow-hidden
|
71 |
-
isEmpty ? 'opacity-0' : 'opacity-100'
|
72 |
)}
|
73 |
>
|
74 |
<ReflexContainer orientation="vertical">
|
@@ -109,6 +108,60 @@ function MainContent() {
|
|
109 |
</ReflexContainer>
|
110 |
</div>
|
111 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
<SettingsDialog />
|
113 |
<LoadingDialog />
|
114 |
<Toaster />
|
|
|
5 |
import { useSearchParams } from 'next/navigation'
|
6 |
import { DndProvider, useDrop } from 'react-dnd'
|
7 |
import { HTML5Backend, NativeTypes } from 'react-dnd-html5-backend'
|
|
|
8 |
|
9 |
import { Toaster } from '@/components/ui/sonner'
|
10 |
import { cn } from '@/lib/utils'
|
|
|
13 |
|
14 |
import { SettingsDialog } from '@/components/settings'
|
15 |
import { LoadingDialog } from '@/components/dialogs/loader/LoadingDialog'
|
16 |
+
import { useUI, useIO } from '@/services'
|
17 |
import { TopBar } from '@/components/toolbars/top-bar'
|
18 |
import { Timeline } from '@/components/core/timeline'
|
|
|
19 |
import { ChatView } from '@/components/assistant/ChatView'
|
20 |
import { Editors } from '@/components/editors/Editors'
|
21 |
+
import { useTheme } from '@/services/ui/useTheme'
|
22 |
|
23 |
type DroppableThing = { files: File[] }
|
24 |
|
25 |
function MainContent() {
|
26 |
const ref = useRef<HTMLDivElement>(null)
|
27 |
+
const showWelcomeScreen = useUI((s) => s.showWelcomeScreen)
|
28 |
const showTimeline = useUI((s) => s.showTimeline)
|
29 |
const showAssistant = useUI((s) => s.showAssistant)
|
30 |
+
const theme = useTheme()
|
31 |
const openFiles = useIO((s) => s.openFiles)
|
32 |
+
const isTopMenuOpen = useUI((s) => s.isTopMenuOpen)
|
33 |
|
34 |
const [{ isOver, canDrop }, connectFileDrop] = useDrop({
|
35 |
accept: [NativeTypes.FILE],
|
|
|
67 |
<TopBar />
|
68 |
<div
|
69 |
className={cn(
|
70 |
+
`flex h-[calc(100vh-40px)] w-screen flex-row overflow-hidden`
|
|
|
71 |
)}
|
72 |
>
|
73 |
<ReflexContainer orientation="vertical">
|
|
|
108 |
</ReflexContainer>
|
109 |
</div>
|
110 |
|
111 |
+
<div
|
112 |
+
className={cn(
|
113 |
+
showWelcomeScreen
|
114 |
+
? 'pointer-events-auto z-[101] flex'
|
115 |
+
: 'pointer-events-none hidden',
|
116 |
+
`fixed top-[40px] h-[calc(100vh-40px)] w-screen flex-row overflow-hidden`,
|
117 |
+
`items-center justify-center`,
|
118 |
+
`bg-stone-950`
|
119 |
+
)}
|
120 |
+
>
|
121 |
+
<div
|
122 |
+
className="flex h-full w-full items-center justify-center"
|
123 |
+
style={{
|
124 |
+
backgroundImage:
|
125 |
+
'repeating-radial-gradient( circle at 0 0, transparent 0, #000000 7px ), repeating-linear-gradient( #37353455, #373534 )',
|
126 |
+
}}
|
127 |
+
>
|
128 |
+
<div
|
129 |
+
className={cn(
|
130 |
+
`pointer-events-none absolute left-[100px] top-[20px]`,
|
131 |
+
`opacity-90`
|
132 |
+
)}
|
133 |
+
>
|
134 |
+
<img
|
135 |
+
src="/images/onboarding/get-started.png"
|
136 |
+
width="180"
|
137 |
+
className=""
|
138 |
+
></img>
|
139 |
+
</div>
|
140 |
+
<div
|
141 |
+
className={cn(
|
142 |
+
`pointer-events-none absolute left-[305px] top-[140px]`,
|
143 |
+
`transition-all duration-200 ease-out`,
|
144 |
+
isTopMenuOpen ? 'scale-100 opacity-90' : 'scale-90 opacity-0'
|
145 |
+
)}
|
146 |
+
>
|
147 |
+
<img src="/images/onboarding/pick-an-example.png" width="140"></img>
|
148 |
+
</div>
|
149 |
+
<div className="flex flex-col items-center justify-center space-y-6">
|
150 |
+
<h1 className="text-6xl font-bold">
|
151 |
+
Welcome to{' '}
|
152 |
+
<span className="" style={{ color: theme.defaultPrimaryColor }}>
|
153 |
+
Clapper
|
154 |
+
</span>
|
155 |
+
.
|
156 |
+
</h1>
|
157 |
+
<div className="flex flex-col items-center justify-center space-y-2 text-center text-2xl font-semibold">
|
158 |
+
<p>A free and open-source AI video editor,</p>
|
159 |
+
<p>designed for the age of generative filmmaking.</p>
|
160 |
+
</div>
|
161 |
+
</div>
|
162 |
+
</div>
|
163 |
+
</div>
|
164 |
+
|
165 |
<SettingsDialog />
|
166 |
<LoadingDialog />
|
167 |
<Toaster />
|
src/components/toolbars/top-menu/file/index.tsx
CHANGED
@@ -17,6 +17,7 @@ import {
|
|
17 |
import { useOpenFilePicker, useQueryStringParams } from '@/lib/hooks'
|
18 |
import { IframeWarning } from '@/components/dialogs/iframe-warning'
|
19 |
import { useIO, useUI } from '@/services'
|
|
|
20 |
|
21 |
export function TopMenuFile() {
|
22 |
const { clapUrl } = useQueryStringParams({
|
@@ -44,6 +45,9 @@ export function TopMenuFile() {
|
|
44 |
|
45 |
const hasBetaAccess = useUI((s) => s.hasBetaAccess)
|
46 |
|
|
|
|
|
|
|
47 |
useEffect(() => {
|
48 |
;(async () => {
|
49 |
if (!clapUrl) {
|
@@ -67,21 +71,21 @@ export function TopMenuFile() {
|
|
67 |
<MenubarMenu>
|
68 |
<MenubarTrigger>File</MenubarTrigger>
|
69 |
<MenubarContent>
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
<MenubarItem
|
80 |
onClick={() => {
|
81 |
openFilePicker()
|
82 |
}}
|
83 |
>
|
84 |
-
Open
|
85 |
</MenubarItem>
|
86 |
<MenubarItem
|
87 |
onClick={() => {
|
@@ -92,7 +96,7 @@ export function TopMenuFile() {
|
|
92 |
</MenubarItem>
|
93 |
<MenubarSeparator />
|
94 |
<MenubarSub>
|
95 |
-
<MenubarSubTrigger>
|
96 |
<MenubarSubContent>
|
97 |
<MenubarItem
|
98 |
onClick={() => {
|
@@ -157,20 +161,46 @@ export function TopMenuFile() {
|
|
157 |
<MenubarSeparator />
|
158 |
<MenubarItem
|
159 |
onClick={() => {
|
160 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
161 |
}}
|
162 |
>
|
163 |
-
|
164 |
</MenubarItem>
|
165 |
<MenubarSeparator />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
166 |
<MenubarItem
|
167 |
onClick={() => {
|
168 |
saveZipFile()
|
169 |
}}
|
170 |
>
|
171 |
-
Export
|
172 |
</MenubarItem>
|
173 |
-
<MenubarSeparator />
|
174 |
{/*
|
175 |
<MenubarItem onClick={() => {
|
176 |
saveKdenline()
|
|
|
17 |
import { useOpenFilePicker, useQueryStringParams } from '@/lib/hooks'
|
18 |
import { IframeWarning } from '@/components/dialogs/iframe-warning'
|
19 |
import { useIO, useUI } from '@/services'
|
20 |
+
import { newClap } from '@aitube/clap'
|
21 |
|
22 |
export function TopMenuFile() {
|
23 |
const { clapUrl } = useQueryStringParams({
|
|
|
45 |
|
46 |
const hasBetaAccess = useUI((s) => s.hasBetaAccess)
|
47 |
|
48 |
+
const showWelcomeScreen = useUI((s) => s.showWelcomeScreen)
|
49 |
+
const setShowWelcomeScreen = useUI((s) => s.setShowWelcomeScreen)
|
50 |
+
|
51 |
useEffect(() => {
|
52 |
;(async () => {
|
53 |
if (!clapUrl) {
|
|
|
71 |
<MenubarMenu>
|
72 |
<MenubarTrigger>File</MenubarTrigger>
|
73 |
<MenubarContent>
|
74 |
+
<MenubarItem
|
75 |
+
onClick={() => {
|
76 |
+
setClap(newClap())
|
77 |
+
setShowWelcomeScreen(false)
|
78 |
+
}}
|
79 |
+
>
|
80 |
+
New Project<MenubarShortcut>⌘N</MenubarShortcut>
|
81 |
+
</MenubarItem>
|
82 |
+
<MenubarSeparator />
|
83 |
<MenubarItem
|
84 |
onClick={() => {
|
85 |
openFilePicker()
|
86 |
}}
|
87 |
>
|
88 |
+
Open project (.clap)<MenubarShortcut>⌘O</MenubarShortcut>
|
89 |
</MenubarItem>
|
90 |
<MenubarItem
|
91 |
onClick={() => {
|
|
|
96 |
</MenubarItem>
|
97 |
<MenubarSeparator />
|
98 |
<MenubarSub>
|
99 |
+
<MenubarSubTrigger>Import an example</MenubarSubTrigger>
|
100 |
<MenubarSubContent>
|
101 |
<MenubarItem
|
102 |
onClick={() => {
|
|
|
161 |
<MenubarSeparator />
|
162 |
<MenubarItem
|
163 |
onClick={() => {
|
164 |
+
openFilePicker()
|
165 |
+
}}
|
166 |
+
>
|
167 |
+
Import screenplay (.txt)
|
168 |
+
</MenubarItem>
|
169 |
+
<MenubarItem
|
170 |
+
onClick={() => {
|
171 |
+
openFilePicker()
|
172 |
+
}}
|
173 |
+
>
|
174 |
+
Import video (.mp4)
|
175 |
+
</MenubarItem>
|
176 |
+
{/*
|
177 |
+
In case we want to show a video import wizard UI:
|
178 |
+
|
179 |
+
<MenubarItem
|
180 |
+
onClick={() => {
|
181 |
+
openFilePicker()
|
182 |
}}
|
183 |
>
|
184 |
+
Import video (.mp4)
|
185 |
</MenubarItem>
|
186 |
<MenubarSeparator />
|
187 |
+
*/}
|
188 |
+
|
189 |
+
<MenubarSeparator />
|
190 |
+
<MenubarItem
|
191 |
+
onClick={() => {
|
192 |
+
saveVideoFile()
|
193 |
+
}}
|
194 |
+
>
|
195 |
+
Export full video (.mp4)
|
196 |
+
</MenubarItem>
|
197 |
<MenubarItem
|
198 |
onClick={() => {
|
199 |
saveZipFile()
|
200 |
}}
|
201 |
>
|
202 |
+
Export all assets (.zip)
|
203 |
</MenubarItem>
|
|
|
204 |
{/*
|
205 |
<MenubarItem onClick={() => {
|
206 |
saveKdenline()
|
src/components/toolbars/top-menu/index.tsx
CHANGED
@@ -19,10 +19,17 @@ import { TopMenuPlugins } from './plugins'
|
|
19 |
export function TopMenu() {
|
20 |
const isBusyResolving = useResolver((s) => s.isBusyResolving)
|
21 |
|
|
|
|
|
22 |
const hasBetaAccess = useUI((s) => s.hasBetaAccess)
|
23 |
|
24 |
return (
|
25 |
-
<Menubar
|
|
|
|
|
|
|
|
|
|
|
26 |
<TopMenuLogo />
|
27 |
<TopMenuFile />
|
28 |
{hasBetaAccess && <TopMenuEdit />}
|
|
|
19 |
export function TopMenu() {
|
20 |
const isBusyResolving = useResolver((s) => s.isBusyResolving)
|
21 |
|
22 |
+
const setIsTopMenuOpen = useUI((s) => s.setIsTopMenuOpen)
|
23 |
+
|
24 |
const hasBetaAccess = useUI((s) => s.hasBetaAccess)
|
25 |
|
26 |
return (
|
27 |
+
<Menubar
|
28 |
+
className="ml-1 w-full"
|
29 |
+
onValueChange={(value) => {
|
30 |
+
setIsTopMenuOpen(!!value)
|
31 |
+
}}
|
32 |
+
>
|
33 |
<TopMenuLogo />
|
34 |
<TopMenuFile />
|
35 |
{hasBetaAccess && <TopMenuEdit />}
|
src/lib/core/constants.ts
CHANGED
@@ -3,7 +3,7 @@
|
|
3 |
export const HARD_LIMIT_NB_MAX_ASSETS_TO_GENERATE_IN_PARALLEL = 32
|
4 |
|
5 |
export const APP_NAME = 'Clapper.app'
|
6 |
-
export const APP_REVISION = 'r20240722-
|
7 |
|
8 |
export const APP_DOMAIN = 'Clapper.app'
|
9 |
export const APP_LINK = 'https://clapper.app'
|
|
|
3 |
export const HARD_LIMIT_NB_MAX_ASSETS_TO_GENERATE_IN_PARALLEL = 32
|
4 |
|
5 |
export const APP_NAME = 'Clapper.app'
|
6 |
+
export const APP_REVISION = 'r20240722-2258'
|
7 |
|
8 |
export const APP_DOMAIN = 'Clapper.app'
|
9 |
export const APP_LINK = 'https://clapper.app'
|
src/lib/hooks/useOpenFilePicker.ts
CHANGED
@@ -4,15 +4,24 @@ import { useFilePicker } from 'use-file-picker'
|
|
4 |
import { parseFileName } from '@/services/io/parseFileName'
|
5 |
import { useIO } from '@/services/io/useIO'
|
6 |
|
7 |
-
const
|
8 |
|
9 |
-
export function useOpenFilePicker(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10 |
const [isLoading, setIsLoading] = useState(false)
|
11 |
const openClapBlob = useIO((s) => s.openClapBlob)
|
12 |
const openScreenplay = useIO((s) => s.openScreenplay)
|
|
|
13 |
|
14 |
const { openFilePicker, filesContent, loading } = useFilePicker({
|
15 |
-
accept:
|
16 |
readAs: 'ArrayBuffer',
|
17 |
})
|
18 |
|
@@ -27,7 +36,7 @@ export function useOpenFilePicker() {
|
|
27 |
|
28 |
const { fileName, projectName, extension } = parseFileName(input)
|
29 |
|
30 |
-
if (!
|
31 |
console.error(`unsupported extension "${extension}"`)
|
32 |
return
|
33 |
}
|
@@ -52,10 +61,27 @@ export function useOpenFilePicker() {
|
|
52 |
} finally {
|
53 |
setIsLoading(false)
|
54 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
55 |
}
|
56 |
}
|
57 |
fn()
|
58 |
-
}, [
|
|
|
|
|
|
|
|
|
|
|
|
|
59 |
|
60 |
return {
|
61 |
openFilePicker,
|
|
|
4 |
import { parseFileName } from '@/services/io/parseFileName'
|
5 |
import { useIO } from '@/services/io/useIO'
|
6 |
|
7 |
+
const defaultSupportedExtensions = ['clap', 'txt', 'mp4', 'mp3']
|
8 |
|
9 |
+
export function useOpenFilePicker(
|
10 |
+
{
|
11 |
+
supportedExtensions = defaultSupportedExtensions,
|
12 |
+
}: {
|
13 |
+
supportedExtensions: string[]
|
14 |
+
} = {
|
15 |
+
supportedExtensions: defaultSupportedExtensions,
|
16 |
+
}
|
17 |
+
) {
|
18 |
const [isLoading, setIsLoading] = useState(false)
|
19 |
const openClapBlob = useIO((s) => s.openClapBlob)
|
20 |
const openScreenplay = useIO((s) => s.openScreenplay)
|
21 |
+
const openVideo = useIO((s) => s.openVideo)
|
22 |
|
23 |
const { openFilePicker, filesContent, loading } = useFilePicker({
|
24 |
+
accept: supportedExtensions.map((ext) => `.${ext}`),
|
25 |
readAs: 'ArrayBuffer',
|
26 |
})
|
27 |
|
|
|
36 |
|
37 |
const { fileName, projectName, extension } = parseFileName(input)
|
38 |
|
39 |
+
if (!defaultSupportedExtensions.includes(extension)) {
|
40 |
console.error(`unsupported extension "${extension}"`)
|
41 |
return
|
42 |
}
|
|
|
61 |
} finally {
|
62 |
setIsLoading(false)
|
63 |
}
|
64 |
+
} else if (extension === 'mp4') {
|
65 |
+
try {
|
66 |
+
setIsLoading(true)
|
67 |
+
await openVideo(projectName, fileName, blob)
|
68 |
+
} catch (err) {
|
69 |
+
console.error('failed to load the Clap file:', err)
|
70 |
+
} finally {
|
71 |
+
setIsLoading(false)
|
72 |
+
}
|
73 |
+
} else if (extension === 'mp3') {
|
74 |
+
alert('Initializing a project from a mp3 is not supported yet')
|
75 |
}
|
76 |
}
|
77 |
fn()
|
78 |
+
}, [
|
79 |
+
fileData?.name,
|
80 |
+
fileData?.content,
|
81 |
+
openClapBlob,
|
82 |
+
openScreenplay,
|
83 |
+
openVideo,
|
84 |
+
])
|
85 |
|
86 |
return {
|
87 |
openFilePicker,
|
src/lib/utils/base64DataUriToFile.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
export function base64DataUriToFile(dataUrl: string, fileName: string) {
|
2 |
-
var arr = dataUrl
|
3 |
const st = `${arr[0] || ''}`
|
4 |
const mime = `${st.match(/:(.*?);/)?.[1] || ''}`
|
5 |
const bstr = atob(arr[arr.length - 1])
|
|
|
1 |
export function base64DataUriToFile(dataUrl: string, fileName: string) {
|
2 |
+
var arr = `${dataUrl || ''}`.split(',')
|
3 |
const st = `${arr[0] || ''}`
|
4 |
const mime = `${st.match(/:(.*?);/)?.[1] || ''}`
|
5 |
const bstr = atob(arr[arr.length - 1])
|
src/services/io/extractFramesFromVideo.ts
CHANGED
@@ -1,6 +1,7 @@
|
|
1 |
'use client'
|
2 |
|
3 |
import { FFmpeg } from '@ffmpeg/ffmpeg'
|
|
|
4 |
import { toBlobURL } from '@ffmpeg/util'
|
5 |
import mediaInfoFactory, {
|
6 |
Track,
|
@@ -12,6 +13,7 @@ import mediaInfoFactory, {
|
|
12 |
MenuTrack,
|
13 |
OtherTrack,
|
14 |
} from 'mediainfo.js'
|
|
|
15 |
|
16 |
interface FrameExtractorOptions {
|
17 |
format: 'png' | 'jpg'
|
@@ -20,6 +22,7 @@ interface FrameExtractorOptions {
|
|
20 |
sceneSamplingRate: number // Percentage of additional frames between scene changes (0-100)
|
21 |
onProgress?: (progress: number) => void // Callback function for progress updates
|
22 |
debug?: boolean
|
|
|
23 |
}
|
24 |
|
25 |
export async function extractFramesFromVideo(
|
@@ -114,28 +117,57 @@ export async function extractFramesFromVideo(
|
|
114 |
if (options.debug) {
|
115 |
console.log('input.mp4 written!')
|
116 |
}
|
117 |
-
// Prepare FFmpeg command
|
118 |
-
const sceneFilter = `select='gt(scene,0.4)'`
|
119 |
-
const additionalFramesFilter = `select='not(mod(n,${Math.floor(100 / options.sceneSamplingRate)}))'`
|
120 |
-
const scaleFilter = `scale='min(${options.maxWidth},iw)':min'(${options.maxHeight},ih)':force_original_aspect_ratio=decrease`
|
121 |
|
122 |
-
let
|
123 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
124 |
if (options.debug) {
|
125 |
-
console.log(
|
|
|
|
|
|
|
126 |
}
|
127 |
-
|
128 |
-
|
129 |
-
const
|
130 |
-
|
131 |
-
|
132 |
-
const progress = Math.min(100, Math.round((currentTime / duration) * 100))
|
133 |
-
if (progress > lastProgress) {
|
134 |
-
lastProgress = progress
|
135 |
-
options.onProgress?.(progress)
|
136 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
137 |
}
|
138 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
139 |
|
140 |
const ffmpegCommand = [
|
141 |
'-i',
|
@@ -143,7 +175,7 @@ export async function extractFramesFromVideo(
|
|
143 |
'-loglevel',
|
144 |
'verbose',
|
145 |
'-vf',
|
146 |
-
|
147 |
'-vsync',
|
148 |
'2',
|
149 |
'-q:v',
|
@@ -156,9 +188,30 @@ export async function extractFramesFromVideo(
|
|
156 |
]
|
157 |
|
158 |
if (options.debug) {
|
159 |
-
console.log('Executing FFmpeg command:', ffmpegCommand.join(' '))
|
160 |
}
|
161 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
162 |
try {
|
163 |
await ffmpeg.exec(ffmpegCommand)
|
164 |
} catch (error) {
|
@@ -189,16 +242,9 @@ export async function extractFramesFromVideo(
|
|
189 |
console.log(`Processing frame file: ${file.name}`)
|
190 |
}
|
191 |
try {
|
192 |
-
const frameData = await ffmpeg.readFile(file.name)
|
193 |
-
|
194 |
-
|
195 |
-
let binary = ''
|
196 |
-
const bytes = new Uint8Array(frameData as any)
|
197 |
-
const len = bytes.byteLength
|
198 |
-
for (let i = 0; i < len; i++) {
|
199 |
-
binary += String.fromCharCode(bytes[i])
|
200 |
-
}
|
201 |
-
const base64Frame = window.btoa(binary)
|
202 |
|
203 |
frames.push(`data:image/${options.format};base64,${base64Frame}`)
|
204 |
|
|
|
1 |
'use client'
|
2 |
|
3 |
import { FFmpeg } from '@ffmpeg/ffmpeg'
|
4 |
+
import { FileData } from '@ffmpeg/ffmpeg/dist/esm/types'
|
5 |
import { toBlobURL } from '@ffmpeg/util'
|
6 |
import mediaInfoFactory, {
|
7 |
Track,
|
|
|
13 |
MenuTrack,
|
14 |
OtherTrack,
|
15 |
} from 'mediainfo.js'
|
16 |
+
import { fileDataToBase64 } from './fileDataToBase64'
|
17 |
|
18 |
interface FrameExtractorOptions {
|
19 |
format: 'png' | 'jpg'
|
|
|
22 |
sceneSamplingRate: number // Percentage of additional frames between scene changes (0-100)
|
23 |
onProgress?: (progress: number) => void // Callback function for progress updates
|
24 |
debug?: boolean
|
25 |
+
autoCrop?: boolean // New option to enable automatic cropping
|
26 |
}
|
27 |
|
28 |
export async function extractFramesFromVideo(
|
|
|
117 |
if (options.debug) {
|
118 |
console.log('input.mp4 written!')
|
119 |
}
|
|
|
|
|
|
|
|
|
120 |
|
121 |
+
let cropParams = ''
|
122 |
+
|
123 |
+
if (options.autoCrop) {
|
124 |
+
// First pass: Detect crop parameters
|
125 |
+
const cropDetectCommand = [
|
126 |
+
'-i',
|
127 |
+
'input.mp4',
|
128 |
+
'-vf',
|
129 |
+
'cropdetect=limit=0.1:round=2:reset=0',
|
130 |
+
'-f',
|
131 |
+
'null',
|
132 |
+
'-t',
|
133 |
+
'10', // Analyze first 10 seconds
|
134 |
+
'-',
|
135 |
+
]
|
136 |
+
|
137 |
if (options.debug) {
|
138 |
+
console.log(
|
139 |
+
'Executing crop detection command:',
|
140 |
+
cropDetectCommand.join(' ')
|
141 |
+
)
|
142 |
}
|
143 |
+
|
144 |
+
ffmpeg.on('log', ({ message }) => {
|
145 |
+
const cropMatch = message.match(/crop=(\d+:\d+:\d+:\d+)/)
|
146 |
+
if (cropMatch) {
|
147 |
+
cropParams = cropMatch[1]
|
|
|
|
|
|
|
|
|
148 |
}
|
149 |
+
})
|
150 |
+
|
151 |
+
await ffmpeg.exec(cropDetectCommand)
|
152 |
+
|
153 |
+
if (options.debug) {
|
154 |
+
console.log('Detected crop parameters:', cropParams)
|
155 |
}
|
156 |
+
|
157 |
+
if (!cropParams) {
|
158 |
+
console.warn('No crop parameters detected. Proceeding without cropping.')
|
159 |
+
}
|
160 |
+
}
|
161 |
+
|
162 |
+
// Main processing command
|
163 |
+
const sceneFilter = `select='gt(scene,0.2)'`
|
164 |
+
const additionalFramesFilter = `select='not(mod(n,${Math.floor(100 / options.sceneSamplingRate)}))'`
|
165 |
+
const scaleFilter = `scale='min(${options.maxWidth},iw)':min'(${options.maxHeight},ih)':force_original_aspect_ratio=decrease`
|
166 |
+
|
167 |
+
let filterChain = `${sceneFilter},${additionalFramesFilter},${scaleFilter}`
|
168 |
+
if (options.autoCrop && cropParams) {
|
169 |
+
filterChain = `crop=${cropParams},${filterChain}`
|
170 |
+
}
|
171 |
|
172 |
const ffmpegCommand = [
|
173 |
'-i',
|
|
|
175 |
'-loglevel',
|
176 |
'verbose',
|
177 |
'-vf',
|
178 |
+
filterChain,
|
179 |
'-vsync',
|
180 |
'2',
|
181 |
'-q:v',
|
|
|
188 |
]
|
189 |
|
190 |
if (options.debug) {
|
191 |
+
console.log('Executing main FFmpeg command:', ffmpegCommand.join(' '))
|
192 |
}
|
193 |
|
194 |
+
let lastProgress = 0
|
195 |
+
ffmpeg.on('log', ({ message }) => {
|
196 |
+
if (options.debug) {
|
197 |
+
console.log('FFmpeg log:', message)
|
198 |
+
}
|
199 |
+
const timeMatch = message.match(/time=(\d{2}):(\d{2}):(\d{2}\.\d{2})/)
|
200 |
+
if (timeMatch) {
|
201 |
+
const [, hours, minutes, seconds] = timeMatch
|
202 |
+
const currentTime =
|
203 |
+
parseInt(hours) * 3600 + parseInt(minutes) * 60 + parseFloat(seconds)
|
204 |
+
const progress = Math.min(100, Math.round((currentTime / duration) * 100))
|
205 |
+
if (progress > lastProgress) {
|
206 |
+
lastProgress = progress
|
207 |
+
options.onProgress?.(progress)
|
208 |
+
}
|
209 |
+
}
|
210 |
+
})
|
211 |
+
|
212 |
+
if (options.debug) {
|
213 |
+
console.log('Executing FFmpeg command:', ffmpegCommand.join(' '))
|
214 |
+
}
|
215 |
try {
|
216 |
await ffmpeg.exec(ffmpegCommand)
|
217 |
} catch (error) {
|
|
|
242 |
console.log(`Processing frame file: ${file.name}`)
|
243 |
}
|
244 |
try {
|
245 |
+
const frameData: FileData = await ffmpeg.readFile(file.name)
|
246 |
+
|
247 |
+
const base64Frame = fileDataToBase64(frameData)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
248 |
|
249 |
frames.push(`data:image/${options.format};base64,${base64Frame}`)
|
250 |
|
src/services/io/extractScenesFromVideo.ts
ADDED
@@ -0,0 +1,369 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
'use client'
|
2 |
+
|
3 |
+
import { FFmpeg } from '@ffmpeg/ffmpeg'
|
4 |
+
import { toBlobURL } from '@ffmpeg/util'
|
5 |
+
import mediaInfoFactory, { VideoTrack, AudioTrack } from 'mediainfo.js'
|
6 |
+
import { fileDataToBase64 } from './fileDataToBase64'
|
7 |
+
|
8 |
+
interface ExtractorOptions {
|
9 |
+
frameFormat: 'png' | 'jpg'
|
10 |
+
maxWidth: number
|
11 |
+
maxHeight: number
|
12 |
+
framesPerScene: number
|
13 |
+
onProgress?: (progress: number) => void
|
14 |
+
debug?: boolean
|
15 |
+
autoCrop?: boolean
|
16 |
+
sceneThreshold?: number
|
17 |
+
minSceneDuration?: number
|
18 |
+
}
|
19 |
+
|
20 |
+
interface SceneData {
|
21 |
+
sceneIndex: number
|
22 |
+
startTimeInMs: number
|
23 |
+
endTimeInMs: number
|
24 |
+
video: string
|
25 |
+
frames: string[]
|
26 |
+
}
|
27 |
+
|
28 |
+
export async function extractScenesFromVideo(
|
29 |
+
videoBlob: Blob,
|
30 |
+
options: ExtractorOptions
|
31 |
+
): Promise<SceneData[]> {
|
32 |
+
const ffmpeg = new FFmpeg()
|
33 |
+
const baseURL = 'https://unpkg.com/@ffmpeg/core@0.12.6/dist/umd'
|
34 |
+
|
35 |
+
try {
|
36 |
+
console.log(`getting duration..`)
|
37 |
+
|
38 |
+
const duration = await getVideoDuration(videoBlob)
|
39 |
+
if (!duration) {
|
40 |
+
throw new Error(`couldn't get the video duration`)
|
41 |
+
}
|
42 |
+
if (options.debug) {
|
43 |
+
console.log('Video duration in seconds:', duration)
|
44 |
+
}
|
45 |
+
|
46 |
+
console.log(`loading FFmpeg..`)
|
47 |
+
|
48 |
+
await ffmpeg.load({
|
49 |
+
coreURL: await toBlobURL(`${baseURL}/ffmpeg-core.js`, 'text/javascript'),
|
50 |
+
wasmURL: await toBlobURL(
|
51 |
+
`${baseURL}/ffmpeg-core.wasm`,
|
52 |
+
'application/wasm'
|
53 |
+
),
|
54 |
+
})
|
55 |
+
|
56 |
+
if (options.debug) {
|
57 |
+
console.log('FFmpeg loaded')
|
58 |
+
}
|
59 |
+
|
60 |
+
const videoUint8Array = new Uint8Array(await videoBlob.arrayBuffer())
|
61 |
+
await ffmpeg.writeFile('input.mp4', videoUint8Array)
|
62 |
+
|
63 |
+
console.log(`detecting crop parameters..`)
|
64 |
+
|
65 |
+
let cropParams = ''
|
66 |
+
if (options.autoCrop) {
|
67 |
+
cropParams = await detectCropParameters(ffmpeg, options)
|
68 |
+
}
|
69 |
+
|
70 |
+
const sceneThreshold = options.sceneThreshold || 0.2
|
71 |
+
const minSceneDuration = options.minSceneDuration || 1
|
72 |
+
|
73 |
+
const sceneDetectionFilter = `select='gt(scene,${sceneThreshold})'`
|
74 |
+
const scaleFilter = `scale='min(${options.maxWidth},iw)':min'(${options.maxHeight},ih)':force_original_aspect_ratio=decrease`
|
75 |
+
|
76 |
+
let filterChain = `${sceneDetectionFilter},${scaleFilter}`
|
77 |
+
if (cropParams) {
|
78 |
+
filterChain = `crop=${cropParams},${filterChain}`
|
79 |
+
}
|
80 |
+
console.log(`detecting scenes..`)
|
81 |
+
|
82 |
+
const sceneTimestamps = await detectScenes(
|
83 |
+
ffmpeg,
|
84 |
+
filterChain,
|
85 |
+
options,
|
86 |
+
duration
|
87 |
+
)
|
88 |
+
|
89 |
+
console.log(`detected ${sceneTimestamps.length} scenes`)
|
90 |
+
|
91 |
+
const scenes: SceneData[] = []
|
92 |
+
|
93 |
+
for (let i = 0; i < sceneTimestamps.length; i++) {
|
94 |
+
const startTime = sceneTimestamps[i]
|
95 |
+
const endTime =
|
96 |
+
i < sceneTimestamps.length - 1
|
97 |
+
? sceneTimestamps[i + 1]
|
98 |
+
: duration * 1000
|
99 |
+
const sceneDuration = endTime - startTime
|
100 |
+
console.log(`processing scene ${i}`)
|
101 |
+
|
102 |
+
try {
|
103 |
+
const sceneData = await processScene(
|
104 |
+
ffmpeg,
|
105 |
+
i,
|
106 |
+
startTime,
|
107 |
+
endTime,
|
108 |
+
sceneDuration,
|
109 |
+
options
|
110 |
+
)
|
111 |
+
scenes.push(sceneData)
|
112 |
+
} catch (error) {
|
113 |
+
console.error(`Error processing scene ${i}:`, error)
|
114 |
+
}
|
115 |
+
|
116 |
+
options.onProgress?.(Math.round(((i + 1) / sceneTimestamps.length) * 100))
|
117 |
+
}
|
118 |
+
|
119 |
+
if (options.debug) {
|
120 |
+
console.log(`Total scenes processed: ${scenes.length}`)
|
121 |
+
}
|
122 |
+
|
123 |
+
return scenes
|
124 |
+
} catch (error) {
|
125 |
+
console.error('Error in extractFramesAndScenesFromVideo:', error)
|
126 |
+
throw error
|
127 |
+
} finally {
|
128 |
+
try {
|
129 |
+
await ffmpeg.terminate()
|
130 |
+
} catch (error) {
|
131 |
+
console.error('Error terminating FFmpeg:', error)
|
132 |
+
}
|
133 |
+
}
|
134 |
+
}
|
135 |
+
|
136 |
+
async function getVideoDuration(
|
137 |
+
videoBlob: Blob,
|
138 |
+
debug: boolean = false
|
139 |
+
): Promise<number> {
|
140 |
+
// Initialize MediaInfo
|
141 |
+
const mediaInfo = await mediaInfoFactory({
|
142 |
+
format: 'object',
|
143 |
+
locateFile: () => {
|
144 |
+
return '/wasm/MediaInfoModule.wasm'
|
145 |
+
},
|
146 |
+
})
|
147 |
+
|
148 |
+
// Get video duration using MediaInfo
|
149 |
+
const getSize = () => videoBlob.size
|
150 |
+
const readChunk = (chunkSize: number, offset: number) =>
|
151 |
+
new Promise<Uint8Array>((resolve, reject) => {
|
152 |
+
const reader = new FileReader()
|
153 |
+
reader.onload = (event) => {
|
154 |
+
if (event.target?.result instanceof ArrayBuffer) {
|
155 |
+
resolve(new Uint8Array(event.target.result))
|
156 |
+
} else {
|
157 |
+
reject(new Error('Failed to read chunk'))
|
158 |
+
}
|
159 |
+
}
|
160 |
+
reader.onerror = (error) => reject(error)
|
161 |
+
reader.readAsArrayBuffer(videoBlob.slice(offset, offset + chunkSize))
|
162 |
+
})
|
163 |
+
|
164 |
+
if (debug) {
|
165 |
+
console.log('calling await mediaInfo.analyzeData(getSize, readChunk)')
|
166 |
+
}
|
167 |
+
|
168 |
+
const result = await mediaInfo.analyzeData(getSize, readChunk)
|
169 |
+
if (debug) {
|
170 |
+
console.log('result = ', result)
|
171 |
+
}
|
172 |
+
|
173 |
+
let duration: number = 0
|
174 |
+
|
175 |
+
for (const track of result.media?.track || []) {
|
176 |
+
if (debug) {
|
177 |
+
console.log('track = ', track)
|
178 |
+
}
|
179 |
+
|
180 |
+
let maybeDuration: number = 0
|
181 |
+
if (track['@type'] === 'Audio') {
|
182 |
+
const audioTrack = track as AudioTrack
|
183 |
+
maybeDuration = audioTrack.Duration
|
184 |
+
? parseFloat(`${audioTrack.Duration || 0}`)
|
185 |
+
: 0
|
186 |
+
} else if (track['@type'] === 'Video') {
|
187 |
+
const videoTrack = track as VideoTrack
|
188 |
+
maybeDuration = videoTrack.Duration
|
189 |
+
? parseFloat(`${videoTrack.Duration || 0}`)
|
190 |
+
: 0
|
191 |
+
}
|
192 |
+
if (
|
193 |
+
typeof maybeDuration === 'number' &&
|
194 |
+
isFinite(maybeDuration) &&
|
195 |
+
!isNaN(maybeDuration)
|
196 |
+
) {
|
197 |
+
duration = maybeDuration
|
198 |
+
}
|
199 |
+
}
|
200 |
+
return duration
|
201 |
+
}
|
202 |
+
|
203 |
+
async function detectCropParameters(
|
204 |
+
ffmpeg: FFmpeg,
|
205 |
+
options: ExtractorOptions
|
206 |
+
): Promise<string> {
|
207 |
+
const cropDetectCommand = [
|
208 |
+
'-i',
|
209 |
+
'input.mp4',
|
210 |
+
'-vf',
|
211 |
+
'cropdetect=limit=0.1:round=2:reset=0',
|
212 |
+
'-f',
|
213 |
+
'null',
|
214 |
+
'-t',
|
215 |
+
'10',
|
216 |
+
'-',
|
217 |
+
]
|
218 |
+
|
219 |
+
if (options.debug) {
|
220 |
+
console.log(
|
221 |
+
'Executing crop detection command:',
|
222 |
+
cropDetectCommand.join(' ')
|
223 |
+
)
|
224 |
+
}
|
225 |
+
|
226 |
+
let cropParams = ''
|
227 |
+
ffmpeg.on('log', ({ message }) => {
|
228 |
+
const cropMatch = message.match(/crop=(\d+:\d+:\d+:\d+)/)
|
229 |
+
if (cropMatch) {
|
230 |
+
cropParams = cropMatch[1]
|
231 |
+
}
|
232 |
+
})
|
233 |
+
|
234 |
+
await ffmpeg.exec(cropDetectCommand)
|
235 |
+
|
236 |
+
if (options.debug) {
|
237 |
+
console.log('Detected crop parameters:', cropParams)
|
238 |
+
}
|
239 |
+
|
240 |
+
return cropParams
|
241 |
+
}
|
242 |
+
|
243 |
+
async function detectScenes(
|
244 |
+
ffmpeg: FFmpeg,
|
245 |
+
filterChain: string,
|
246 |
+
options: ExtractorOptions,
|
247 |
+
duration: number
|
248 |
+
): Promise<number[]> {
|
249 |
+
const extractScenesCommand = [
|
250 |
+
'-i',
|
251 |
+
'input.mp4',
|
252 |
+
'-filter_complex',
|
253 |
+
`${filterChain},metadata=print:file=scenes.txt`,
|
254 |
+
'-f',
|
255 |
+
'null',
|
256 |
+
'-',
|
257 |
+
]
|
258 |
+
|
259 |
+
if (options.debug) {
|
260 |
+
console.log(
|
261 |
+
'Executing scene detection command:',
|
262 |
+
extractScenesCommand.join(' ')
|
263 |
+
)
|
264 |
+
}
|
265 |
+
|
266 |
+
await ffmpeg.exec(extractScenesCommand)
|
267 |
+
|
268 |
+
const scenesMetadata = await ffmpeg.readFile('scenes.txt')
|
269 |
+
const decodedMetadata = new TextDecoder().decode(scenesMetadata as Uint8Array)
|
270 |
+
|
271 |
+
if (options.debug) {
|
272 |
+
console.log('Scenes metadata:', decodedMetadata)
|
273 |
+
}
|
274 |
+
|
275 |
+
const sceneTimestamps = decodedMetadata
|
276 |
+
.split('\n')
|
277 |
+
.filter((line) => line.includes('pts_time'))
|
278 |
+
.map((line) => parseFloat(line.split('pts_time:')[1]) * 1000) // Convert to milliseconds
|
279 |
+
|
280 |
+
// Add start and end timestamps
|
281 |
+
sceneTimestamps.unshift(0)
|
282 |
+
sceneTimestamps.push(duration * 1000)
|
283 |
+
|
284 |
+
// Filter out scenes that are too short
|
285 |
+
const filteredScenes = sceneTimestamps.filter((timestamp, index, array) => {
|
286 |
+
if (index === 0) return true
|
287 |
+
const sceneDuration = timestamp - array[index - 1]
|
288 |
+
return sceneDuration >= (options.minSceneDuration || 1) * 1000
|
289 |
+
})
|
290 |
+
|
291 |
+
return filteredScenes
|
292 |
+
}
|
293 |
+
|
294 |
+
async function processScene(
|
295 |
+
ffmpeg: FFmpeg,
|
296 |
+
index: number,
|
297 |
+
startTime: number,
|
298 |
+
endTime: number,
|
299 |
+
duration: number,
|
300 |
+
options: ExtractorOptions
|
301 |
+
): Promise<SceneData> {
|
302 |
+
const extractSceneCommand = [
|
303 |
+
'-ss',
|
304 |
+
(startTime / 1000).toString(),
|
305 |
+
'-i',
|
306 |
+
'input.mp4',
|
307 |
+
'-t',
|
308 |
+
(duration / 1000).toString(),
|
309 |
+
'-c:v',
|
310 |
+
'libx264',
|
311 |
+
'-preset',
|
312 |
+
'ultrafast',
|
313 |
+
'-crf',
|
314 |
+
'23',
|
315 |
+
'-c:a',
|
316 |
+
'aac',
|
317 |
+
`scene_${index}.mp4`,
|
318 |
+
]
|
319 |
+
// console.log(`calling ffmpeg.exec(extractSceneCommand)`, extractSceneCommand)
|
320 |
+
await ffmpeg.exec(extractSceneCommand)
|
321 |
+
|
322 |
+
// Calculate frame interval to get the desired number of frames
|
323 |
+
const frameInterval = Math.max(
|
324 |
+
1,
|
325 |
+
Math.floor(duration / (1000 * options.framesPerScene))
|
326 |
+
)
|
327 |
+
|
328 |
+
const extractFramesCommand = [
|
329 |
+
'-i',
|
330 |
+
`scene_${index}.mp4`,
|
331 |
+
'-vf',
|
332 |
+
`select='not(mod(n,${frameInterval}))',setpts=N/FRAME_RATE/TB`,
|
333 |
+
'-frames:v',
|
334 |
+
options.framesPerScene.toString(),
|
335 |
+
'-vsync',
|
336 |
+
'0',
|
337 |
+
'-q:v',
|
338 |
+
'2',
|
339 |
+
'-f',
|
340 |
+
'image2',
|
341 |
+
`scene_${index}_frame_%03d.${options.frameFormat}`,
|
342 |
+
]
|
343 |
+
// console.log(`calling ffmpeg.exec(extractFramesCommand)`, extractFramesCommand)
|
344 |
+
await ffmpeg.exec(extractFramesCommand)
|
345 |
+
|
346 |
+
const sceneVideo = await ffmpeg.readFile(`scene_${index}.mp4`)
|
347 |
+
const frameFiles = (await ffmpeg.listDir('/')).filter(
|
348 |
+
(file) =>
|
349 |
+
file.name.startsWith(`scene_${index}_frame_`) &&
|
350 |
+
file.name.endsWith(`.${options.frameFormat}`)
|
351 |
+
)
|
352 |
+
|
353 |
+
const frames: string[] = []
|
354 |
+
for (const frameFile of frameFiles) {
|
355 |
+
const frameData = await ffmpeg.readFile(frameFile.name)
|
356 |
+
const base64Frame = fileDataToBase64(frameData)
|
357 |
+
frames.push(`data:image/${options.frameFormat};base64,${base64Frame}`)
|
358 |
+
}
|
359 |
+
|
360 |
+
const base64Video = fileDataToBase64(sceneVideo)
|
361 |
+
|
362 |
+
return {
|
363 |
+
sceneIndex: index,
|
364 |
+
startTimeInMs: Math.round(startTime),
|
365 |
+
endTimeInMs: Math.round(endTime),
|
366 |
+
video: `data:video/mp4;base64,${base64Video}`,
|
367 |
+
frames,
|
368 |
+
}
|
369 |
+
}
|
src/services/io/fileDataToBase64.ts
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { FileData } from '@ffmpeg/ffmpeg/dist/esm/types'
|
2 |
+
|
3 |
+
export function fileDataToBase64(fileData: FileData): string {
|
4 |
+
// Convert Uint8Array to Base64 string without using btoa
|
5 |
+
let binary = ''
|
6 |
+
const bytes = new Uint8Array(fileData as any)
|
7 |
+
const len = bytes.byteLength
|
8 |
+
for (let i = 0; i < len; i++) {
|
9 |
+
binary += String.fromCharCode(bytes[i])
|
10 |
+
}
|
11 |
+
|
12 |
+
return window.btoa(binary)
|
13 |
+
}
|
src/services/io/parseFileIntoSegments.ts
CHANGED
@@ -81,7 +81,41 @@ export async function parseFileIntoSegments({
|
|
81 |
? maybeEndTimeInMs!
|
82 |
: startTimeInMs + durationInMs
|
83 |
|
84 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
85 |
prompt: 'Storyboard', // note: this can be set later with an automatic captioning worker
|
86 |
startTimeInMs, // start time of the segment
|
87 |
endTimeInMs, // end time of the segment (startTimeInMs + durationInMs)
|
@@ -90,28 +124,31 @@ export async function parseFileIntoSegments({
|
|
90 |
label: `${file.name}`, // a short label to name the segment (optional, can be human or LLM-defined)
|
91 |
category,
|
92 |
assetUrl,
|
93 |
-
assetDurationInMs:
|
94 |
assetSourceType: ClapAssetSource.DATA,
|
95 |
assetFileFormat: `${file.type}`,
|
|
|
|
|
|
|
96 |
}
|
97 |
|
98 |
-
const
|
99 |
-
newSegment(
|
100 |
)
|
101 |
|
102 |
if (isValidNumber(track)) {
|
103 |
-
|
104 |
}
|
105 |
|
106 |
-
|
107 |
|
108 |
// we assume we want it to be immediately visible
|
109 |
-
|
110 |
|
111 |
// console.log("newSegment:", audioSegment)
|
112 |
|
113 |
// poof! type disappears.. it's magic
|
114 |
-
newSegments.push(
|
115 |
break
|
116 |
}
|
117 |
|
|
|
81 |
? maybeEndTimeInMs!
|
82 |
: startTimeInMs + durationInMs
|
83 |
|
84 |
+
const partialVideo: Partial<TimelineSegment> = {
|
85 |
+
category: ClapSegmentCategory.VIDEO,
|
86 |
+
startTimeInMs,
|
87 |
+
endTimeInMs,
|
88 |
+
|
89 |
+
prompt: 'movie',
|
90 |
+
label: 'movie', // `${file.name.split(".")[0] || "Untitled"}`, // a short label to name the segment (optional, can be human or LLM-defined)
|
91 |
+
|
92 |
+
outputType: ClapOutputType.VIDEO,
|
93 |
+
status: ClapSegmentStatus.TO_GENERATE,
|
94 |
+
|
95 |
+
assetUrl: '',
|
96 |
+
assetDurationInMs: durationInMs,
|
97 |
+
assetSourceType: ClapAssetSource.EMPTY,
|
98 |
+
assetFileFormat: undefined,
|
99 |
+
track: track ? track : undefined,
|
100 |
+
}
|
101 |
+
|
102 |
+
const video = await clapSegmentToTimelineSegment(newSegment(partialVideo))
|
103 |
+
|
104 |
+
if (isValidNumber(track)) {
|
105 |
+
video.track = track
|
106 |
+
}
|
107 |
+
|
108 |
+
video.outputType = ClapOutputType.VIDEO
|
109 |
+
|
110 |
+
// we assume we want it to be immediately visible
|
111 |
+
video.visibility = SegmentVisibility.VISIBLE
|
112 |
+
|
113 |
+
// console.log("newSegment:", audioSegment)
|
114 |
+
|
115 |
+
// poof! type disappears.. it's magic
|
116 |
+
newSegments.push(video)
|
117 |
+
|
118 |
+
const partialStoryboard: Partial<TimelineSegment> = {
|
119 |
prompt: 'Storyboard', // note: this can be set later with an automatic captioning worker
|
120 |
startTimeInMs, // start time of the segment
|
121 |
endTimeInMs, // end time of the segment (startTimeInMs + durationInMs)
|
|
|
124 |
label: `${file.name}`, // a short label to name the segment (optional, can be human or LLM-defined)
|
125 |
category,
|
126 |
assetUrl,
|
127 |
+
assetDurationInMs: durationInMs,
|
128 |
assetSourceType: ClapAssetSource.DATA,
|
129 |
assetFileFormat: `${file.type}`,
|
130 |
+
|
131 |
+
// important: we try to go below
|
132 |
+
track: track ? track + 1 : undefined,
|
133 |
}
|
134 |
|
135 |
+
const storyboard = await clapSegmentToTimelineSegment(
|
136 |
+
newSegment(partialStoryboard)
|
137 |
)
|
138 |
|
139 |
if (isValidNumber(track)) {
|
140 |
+
storyboard.track = track
|
141 |
}
|
142 |
|
143 |
+
storyboard.outputType = ClapOutputType.IMAGE
|
144 |
|
145 |
// we assume we want it to be immediately visible
|
146 |
+
storyboard.visibility = SegmentVisibility.VISIBLE
|
147 |
|
148 |
// console.log("newSegment:", audioSegment)
|
149 |
|
150 |
// poof! type disappears.. it's magic
|
151 |
+
newSegments.push(storyboard)
|
152 |
break
|
153 |
}
|
154 |
|
src/services/io/useIO.ts
CHANGED
@@ -3,7 +3,10 @@
|
|
3 |
import {
|
4 |
ClapAssetSource,
|
5 |
ClapEntity,
|
|
|
|
|
6 |
ClapProject,
|
|
|
7 |
ClapSegmentCategory,
|
8 |
ClapSegmentStatus,
|
9 |
getClapAssetSourceType,
|
@@ -11,6 +14,7 @@ import {
|
|
11 |
newSegment,
|
12 |
parseClap,
|
13 |
serializeClap,
|
|
|
14 |
} from '@aitube/clap'
|
15 |
import {
|
16 |
TimelineStore,
|
@@ -19,6 +23,7 @@ import {
|
|
19 |
removeFinalVideosAndConvertToTimelineSegments,
|
20 |
getFinalVideo,
|
21 |
DEFAULT_DURATION_IN_MS_PER_STEP,
|
|
|
22 |
} from '@aitube/timeline'
|
23 |
import { ParseScriptProgressUpdate, parseScriptToClap } from '@aitube/broadway'
|
24 |
import { IOStore, TaskCategory, TaskVisibility } from '@aitube/clapper-services'
|
@@ -43,9 +48,11 @@ import {
|
|
43 |
import { sleep } from '@/lib/utils/sleep'
|
44 |
import { FFMPegAudioInput, FFMPegVideoInput } from './ffmpegUtils'
|
45 |
import { createFullVideo } from './createFullVideo'
|
46 |
-
import {
|
47 |
import { extractCaptionsFromFrames } from './extractCaptionsFromFrames'
|
48 |
import { base64DataUriToFile } from '@/lib/utils/base64DataUriToFile'
|
|
|
|
|
49 |
|
50 |
export const useIO = create<IOStore>((set, get) => ({
|
51 |
...getDefaultIOState(),
|
@@ -59,7 +66,7 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
59 |
timeline.clear()
|
60 |
},
|
61 |
openFiles: async (files: File[]) => {
|
62 |
-
const { openClapBlob, openScreenplay } = get()
|
63 |
const timeline: TimelineStore = useTimeline.getState()
|
64 |
const { segments, addSegments } = timeline
|
65 |
|
@@ -101,101 +108,166 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
101 |
const newSegments = await parseFileIntoSegments({ file })
|
102 |
|
103 |
console.log('calling timeline.addSegments with:', newSegments)
|
104 |
-
await timeline.addSegments({
|
105 |
-
|
106 |
-
})
|
107 |
return
|
108 |
}
|
109 |
|
110 |
const isVideoFile = fileType.startsWith('video/')
|
111 |
if (isVideoFile) {
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
-
|
116 |
-
|
117 |
-
|
118 |
-
|
|
|
|
|
|
|
|
|
|
|
119 |
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
|
130 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
131 |
})
|
|
|
|
|
132 |
|
133 |
-
|
134 |
-
|
135 |
-
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
-
|
140 |
-
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
151 |
|
152 |
-
|
153 |
-
|
|
|
|
|
|
|
|
|
|
|
154 |
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
-
|
160 |
-
|
|
|
|
|
161 |
|
162 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
|
164 |
-
|
165 |
|
166 |
-
|
167 |
-
const captioningTask = useTasks.getState().add({
|
168 |
-
category: TaskCategory.IMPORT,
|
169 |
-
// visibility: TaskVisibility.BLOCKER,
|
170 |
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
value: 0,
|
176 |
-
})
|
177 |
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
|
184 |
-
nbStoryboards: number
|
185 |
-
) => {
|
186 |
-
captioningTask.setProgress({
|
187 |
-
message: `Analyzing storyboards (${progress}%)`,
|
188 |
-
value: progress,
|
189 |
-
})
|
190 |
-
}
|
191 |
-
)
|
192 |
-
console.log('captions:', captions)
|
193 |
-
// TODO: add
|
194 |
|
195 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
196 |
}
|
197 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
198 |
}
|
|
|
|
|
199 |
},
|
200 |
openScreenplay: async (
|
201 |
projectName: string,
|
@@ -270,6 +342,7 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
270 |
task.fail(`${err || 'unknown screenplay import error'}`)
|
271 |
} finally {
|
272 |
}
|
|
|
273 |
},
|
274 |
openScreenplayUrl: async (url: string) => {
|
275 |
const timeline: TimelineStore = useTimeline.getState()
|
@@ -326,6 +399,7 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
326 |
} catch (err) {
|
327 |
task.fail(`${err || 'unknown error'}`)
|
328 |
}
|
|
|
329 |
},
|
330 |
saveAnyFile: (blob: Blob, fileName: string) => {
|
331 |
// Create an object URL for the compressed clap blob
|
@@ -391,6 +465,7 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
391 |
} catch (err) {
|
392 |
task.fail(`${err || 'unknown error'}`)
|
393 |
}
|
|
|
394 |
},
|
395 |
openClapBlob: async (projectName: string, fileName: string, blob: Blob) => {
|
396 |
const timeline: TimelineStore = useTimeline.getState()
|
@@ -423,6 +498,7 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
423 |
} catch (err) {
|
424 |
task.fail(`${err || 'unknown error'}`)
|
425 |
}
|
|
|
426 |
},
|
427 |
saveClap: async () => {
|
428 |
const { saveAnyFile } = get()
|
@@ -726,7 +802,9 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
726 |
}
|
727 |
},
|
728 |
|
729 |
-
openMLT: async (file: File) => {
|
|
|
|
|
730 |
saveMLT: async () => {},
|
731 |
generateMLT: async (): Promise<string> => {
|
732 |
const timeline: TimelineStore = useTimeline.getState()
|
@@ -1001,7 +1079,9 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
1001 |
</mlt>`
|
1002 |
},
|
1003 |
|
1004 |
-
openKdenline: async (file: File) => {
|
|
|
|
|
1005 |
|
1006 |
saveKdenline: async () => {
|
1007 |
const { saveAnyFile } = get()
|
@@ -1062,7 +1142,9 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
1062 |
*/
|
1063 |
},
|
1064 |
|
1065 |
-
openOpenTimelineIO: async (file: File) => {
|
|
|
|
|
1066 |
|
1067 |
saveOpenTimelineIO: async () => {},
|
1068 |
|
@@ -1086,6 +1168,9 @@ export const useIO = create<IOStore>((set, get) => ({
|
|
1086 |
}
|
1087 |
|
1088 |
const { entities } = await parseClap(file)
|
|
|
|
|
|
|
1089 |
return entities
|
1090 |
},
|
1091 |
}))
|
|
|
3 |
import {
|
4 |
ClapAssetSource,
|
5 |
ClapEntity,
|
6 |
+
ClapMediaOrientation,
|
7 |
+
ClapOutputType,
|
8 |
ClapProject,
|
9 |
+
ClapSegment,
|
10 |
ClapSegmentCategory,
|
11 |
ClapSegmentStatus,
|
12 |
getClapAssetSourceType,
|
|
|
14 |
newSegment,
|
15 |
parseClap,
|
16 |
serializeClap,
|
17 |
+
UUID,
|
18 |
} from '@aitube/clap'
|
19 |
import {
|
20 |
TimelineStore,
|
|
|
23 |
removeFinalVideosAndConvertToTimelineSegments,
|
24 |
getFinalVideo,
|
25 |
DEFAULT_DURATION_IN_MS_PER_STEP,
|
26 |
+
clapSegmentToTimelineSegment,
|
27 |
} from '@aitube/timeline'
|
28 |
import { ParseScriptProgressUpdate, parseScriptToClap } from '@aitube/broadway'
|
29 |
import { IOStore, TaskCategory, TaskVisibility } from '@aitube/clapper-services'
|
|
|
48 |
import { sleep } from '@/lib/utils/sleep'
|
49 |
import { FFMPegAudioInput, FFMPegVideoInput } from './ffmpegUtils'
|
50 |
import { createFullVideo } from './createFullVideo'
|
51 |
+
import { extractScenesFromVideo } from './extractScenesFromVideo'
|
52 |
import { extractCaptionsFromFrames } from './extractCaptionsFromFrames'
|
53 |
import { base64DataUriToFile } from '@/lib/utils/base64DataUriToFile'
|
54 |
+
import { useUI } from '../ui'
|
55 |
+
import { getTypeAndExtension } from '@/lib/utils/getTypeAndExtension'
|
56 |
|
57 |
export const useIO = create<IOStore>((set, get) => ({
|
58 |
...getDefaultIOState(),
|
|
|
66 |
timeline.clear()
|
67 |
},
|
68 |
openFiles: async (files: File[]) => {
|
69 |
+
const { openClapBlob, openScreenplay, openVideo } = get()
|
70 |
const timeline: TimelineStore = useTimeline.getState()
|
71 |
const { segments, addSegments } = timeline
|
72 |
|
|
|
108 |
const newSegments = await parseFileIntoSegments({ file })
|
109 |
|
110 |
console.log('calling timeline.addSegments with:', newSegments)
|
111 |
+
await timeline.addSegments({ segments: newSegments })
|
112 |
+
|
|
|
113 |
return
|
114 |
}
|
115 |
|
116 |
const isVideoFile = fileType.startsWith('video/')
|
117 |
if (isVideoFile) {
|
118 |
+
await openVideo(projectName, fileName, file)
|
119 |
+
return
|
120 |
+
}
|
121 |
+
}
|
122 |
+
useUI.getState().setShowWelcomeScreen(false)
|
123 |
+
},
|
124 |
+
openVideo: async (
|
125 |
+
projectName: string,
|
126 |
+
fileName: string,
|
127 |
+
fileContent: string | Blob
|
128 |
+
): Promise<void> => {
|
129 |
+
const timeline: TimelineStore = useTimeline.getState()
|
130 |
|
131 |
+
const sceneExtractionTask = useTasks.getState().add({
|
132 |
+
category: TaskCategory.IMPORT,
|
133 |
+
visibility: TaskVisibility.BLOCKER,
|
134 |
+
initialMessage: `Starting up, can take a few minutes..`,
|
135 |
+
successMessage: `Extracting scenes.. 100%`,
|
136 |
+
value: 0,
|
137 |
+
})
|
138 |
+
|
139 |
+
const file =
|
140 |
+
typeof fileContent === 'string'
|
141 |
+
? base64DataUriToFile(fileContent, fileName)
|
142 |
+
: fileContent
|
143 |
+
|
144 |
+
const scenes = await extractScenesFromVideo(file, {
|
145 |
+
frameFormat: 'png', // in theory we could also use 'jpg', but this freezes FFmpeg
|
146 |
+
maxWidth: 1024,
|
147 |
+
maxHeight: 576,
|
148 |
+
framesPerScene: 1,
|
149 |
+
autoCrop: true,
|
150 |
+
sceneThreshold: 0.1,
|
151 |
+
minSceneDuration: 1,
|
152 |
+
debug: true,
|
153 |
+
onProgress: (progress: number) => {
|
154 |
+
sceneExtractionTask.setProgress({
|
155 |
+
message: `Extracting scenes.. ${progress}%`,
|
156 |
+
value: progress,
|
157 |
})
|
158 |
+
},
|
159 |
+
})
|
160 |
|
161 |
+
// optional: reset the project
|
162 |
+
// await timeline.setClap(newClap())
|
163 |
+
|
164 |
+
let currentStoryboardIndex = 0
|
165 |
+
let startTimeInMs = 0
|
166 |
+
const durationInSteps = 4
|
167 |
+
const durationInMs = durationInSteps * DEFAULT_DURATION_IN_MS_PER_STEP
|
168 |
+
let endTimeInMs = startTimeInMs + durationInMs
|
169 |
+
|
170 |
+
// TODO: extract info from the original video to determine things like
|
171 |
+
// the orientation, duration..
|
172 |
+
timeline.setClap(
|
173 |
+
newClap({
|
174 |
+
meta: {
|
175 |
+
id: UUID(),
|
176 |
+
title: projectName,
|
177 |
+
description: `${projectName} (${fileName})`,
|
178 |
+
synopsis: '',
|
179 |
+
licence:
|
180 |
+
"This OpenClap file is just a conversion from the original screenplay and doesn't claim any copyright or intellectual property. All rights reserved to the original intellectual property and copyright holders. Using OpenClap isn't piracy.",
|
181 |
+
|
182 |
+
orientation: ClapMediaOrientation.LANDSCAPE,
|
183 |
+
durationInMs: frames.length * durationInMs,
|
184 |
+
|
185 |
+
// TODO: those should come from the Clapper user settings
|
186 |
+
|
187 |
+
width: 1024,
|
188 |
+
height: 576,
|
189 |
+
|
190 |
+
defaultVideoModel: '', // <-- we should deprecate this no?
|
191 |
+
extraPositivePrompt: '',
|
192 |
+
screenplay: '',
|
193 |
+
isLoop: false,
|
194 |
+
isInteractive: false,
|
195 |
+
},
|
196 |
+
})
|
197 |
+
)
|
198 |
|
199 |
+
for (const scene of scenes) {
|
200 |
+
console.log('parsing scene:', scene)
|
201 |
+
try {
|
202 |
+
const frameFile = base64DataUriToFile(
|
203 |
+
scene.frames[0],
|
204 |
+
`storyboard_${++currentStoryboardIndex}.png`
|
205 |
+
)
|
206 |
|
207 |
+
const assetDurationInMs = scene.endTimeInMs - scene.startTimeInMs
|
208 |
+
|
209 |
+
// this returns multiple segments (video, image..)
|
210 |
+
const newSegments = await parseFileIntoSegments({
|
211 |
+
file: frameFile,
|
212 |
+
startTimeInMs: scene.startTimeInMs,
|
213 |
+
endTimeInMs: scene.endTimeInMs,
|
214 |
+
})
|
215 |
|
216 |
+
for (const newSegment of newSegments) {
|
217 |
+
newSegment.assetDurationInMs = assetDurationInMs
|
218 |
+
if (newSegment.category === ClapSegmentCategory.VIDEO) {
|
219 |
+
const { assetFileFormat, outputType } = getTypeAndExtension(
|
220 |
+
scene.video
|
221 |
+
)
|
222 |
+
newSegment.assetFileFormat = assetFileFormat
|
223 |
+
newSegment.assetUrl = scene.video
|
224 |
+
newSegment.status = ClapSegmentStatus.COMPLETED
|
225 |
+
newSegment.outputType = outputType
|
226 |
+
}
|
227 |
+
}
|
228 |
+
await timeline.addSegments({ segments: newSegments })
|
229 |
+
} catch (err) {
|
230 |
+
console.error(`failed to process scene:`, scene)
|
231 |
+
console.error(err)
|
232 |
+
}
|
233 |
+
}
|
234 |
|
235 |
+
sceneExtractionTask.success()
|
236 |
|
237 |
+
const enableCaptioning = false
|
|
|
|
|
|
|
238 |
|
239 |
+
if (enableCaptioning) {
|
240 |
+
const captioningTask = useTasks.getState().add({
|
241 |
+
category: TaskCategory.IMPORT,
|
242 |
+
// visibility: TaskVisibility.BLOCKER,
|
|
|
|
|
243 |
|
244 |
+
// since this is very long task, we can run it in the background
|
245 |
+
visibility: TaskVisibility.BACKGROUND,
|
246 |
+
initialMessage: `Analyzing storyboards..`,
|
247 |
+
successMessage: `Analyzing storyboards.. 100% done`,
|
248 |
+
value: 0,
|
249 |
+
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
250 |
|
251 |
+
console.log('calling extractCaptionsFromFrames() with:', frames)
|
252 |
+
/*
|
253 |
+
const captions = await extractCaptionsFromFrames(
|
254 |
+
frames,
|
255 |
+
(progress: number, storyboardIndex: number, nbStoryboards: number) => {
|
256 |
+
captioningTask.setProgress({
|
257 |
+
message: `Analyzing storyboards (${progress}%)`,
|
258 |
+
value: progress,
|
259 |
+
})
|
260 |
}
|
261 |
+
)
|
262 |
+
|
263 |
+
console.log('captions:', captions)
|
264 |
+
*/
|
265 |
+
// TODO: add
|
266 |
+
|
267 |
+
captioningTask.success()
|
268 |
}
|
269 |
+
|
270 |
+
useUI.getState().setShowWelcomeScreen(false)
|
271 |
},
|
272 |
openScreenplay: async (
|
273 |
projectName: string,
|
|
|
342 |
task.fail(`${err || 'unknown screenplay import error'}`)
|
343 |
} finally {
|
344 |
}
|
345 |
+
useUI.getState().setShowWelcomeScreen(false)
|
346 |
},
|
347 |
openScreenplayUrl: async (url: string) => {
|
348 |
const timeline: TimelineStore = useTimeline.getState()
|
|
|
399 |
} catch (err) {
|
400 |
task.fail(`${err || 'unknown error'}`)
|
401 |
}
|
402 |
+
useUI.getState().setShowWelcomeScreen(false)
|
403 |
},
|
404 |
saveAnyFile: (blob: Blob, fileName: string) => {
|
405 |
// Create an object URL for the compressed clap blob
|
|
|
465 |
} catch (err) {
|
466 |
task.fail(`${err || 'unknown error'}`)
|
467 |
}
|
468 |
+
useUI.getState().setShowWelcomeScreen(false)
|
469 |
},
|
470 |
openClapBlob: async (projectName: string, fileName: string, blob: Blob) => {
|
471 |
const timeline: TimelineStore = useTimeline.getState()
|
|
|
498 |
} catch (err) {
|
499 |
task.fail(`${err || 'unknown error'}`)
|
500 |
}
|
501 |
+
useUI.getState().setShowWelcomeScreen(false)
|
502 |
},
|
503 |
saveClap: async () => {
|
504 |
const { saveAnyFile } = get()
|
|
|
802 |
}
|
803 |
},
|
804 |
|
805 |
+
openMLT: async (file: File) => {
|
806 |
+
useUI.getState().setShowWelcomeScreen(false)
|
807 |
+
},
|
808 |
saveMLT: async () => {},
|
809 |
generateMLT: async (): Promise<string> => {
|
810 |
const timeline: TimelineStore = useTimeline.getState()
|
|
|
1079 |
</mlt>`
|
1080 |
},
|
1081 |
|
1082 |
+
openKdenline: async (file: File) => {
|
1083 |
+
useUI.getState().setShowWelcomeScreen(false)
|
1084 |
+
},
|
1085 |
|
1086 |
saveKdenline: async () => {
|
1087 |
const { saveAnyFile } = get()
|
|
|
1142 |
*/
|
1143 |
},
|
1144 |
|
1145 |
+
openOpenTimelineIO: async (file: File) => {
|
1146 |
+
useUI.getState().setShowWelcomeScreen(false)
|
1147 |
+
},
|
1148 |
|
1149 |
saveOpenTimelineIO: async () => {},
|
1150 |
|
|
|
1168 |
}
|
1169 |
|
1170 |
const { entities } = await parseClap(file)
|
1171 |
+
|
1172 |
+
useUI.getState().setShowWelcomeScreen(false)
|
1173 |
+
|
1174 |
return entities
|
1175 |
},
|
1176 |
}))
|
src/services/resolver/useResolver.ts
CHANGED
@@ -597,10 +597,12 @@ export const useResolver = create<ResolverStore>((set, get) => ({
|
|
597 |
) as TimelineSegment
|
598 |
|
599 |
if (newSegment.outputType === ClapOutputType.AUDIO) {
|
600 |
-
|
601 |
-
|
602 |
-
|
603 |
-
|
|
|
|
|
604 |
}
|
605 |
}
|
606 |
|
|
|
597 |
) as TimelineSegment
|
598 |
|
599 |
if (newSegment.outputType === ClapOutputType.AUDIO) {
|
600 |
+
if (newSegment.assetUrl) {
|
601 |
+
try {
|
602 |
+
newSegment.audioBuffer = await getAudioBuffer(newSegment.assetUrl)
|
603 |
+
} catch (err) {
|
604 |
+
console.error(`failed to load the audio file: ${err}`)
|
605 |
+
}
|
606 |
}
|
607 |
}
|
608 |
|
src/services/ui/getDefaultUIState.ts
CHANGED
@@ -6,6 +6,8 @@ import {
|
|
6 |
|
7 |
export function getDefaultUIState(): UIState {
|
8 |
const state: UIState = {
|
|
|
|
|
9 |
hasBetaAccess: false,
|
10 |
themeName: 'backstage',
|
11 |
showApiKeys: false,
|
|
|
6 |
|
7 |
export function getDefaultUIState(): UIState {
|
8 |
const state: UIState = {
|
9 |
+
isTopMenuOpen: false,
|
10 |
+
showWelcomeScreen: true,
|
11 |
hasBetaAccess: false,
|
12 |
themeName: 'backstage',
|
13 |
showApiKeys: false,
|
src/services/ui/useUI.ts
CHANGED
@@ -21,6 +21,13 @@ export const useUI = create<UIStore>()(
|
|
21 |
persist(
|
22 |
(set, get) => ({
|
23 |
...getDefaultUIState(),
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
setHasBetaAccess: (hasBetaAccess: boolean) => {
|
25 |
set({ hasBetaAccess })
|
26 |
},
|
|
|
21 |
persist(
|
22 |
(set, get) => ({
|
23 |
...getDefaultUIState(),
|
24 |
+
setIsTopMenuOpen: (isTopMenuOpen: boolean) => {
|
25 |
+
set({ isTopMenuOpen })
|
26 |
+
},
|
27 |
+
setShowWelcomeScreen: (showWelcomeScreen: boolean) => {
|
28 |
+
console.log('setShowWelcomeScreen called with:', showWelcomeScreen)
|
29 |
+
set({ showWelcomeScreen: showWelcomeScreen })
|
30 |
+
},
|
31 |
setHasBetaAccess: (hasBetaAccess: boolean) => {
|
32 |
set({ hasBetaAccess })
|
33 |
},
|