Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
•
ed9e9d0
1
Parent(s):
6f5eecd
work on magic gradio wrapper + replicate
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- README.md +1 -0
- package-lock.json +639 -43
- package.json +2 -3
- src/app/api/resolve/providers/falai/index.ts +120 -137
- src/app/api/resolve/providers/gradio/index.ts +30 -0
- src/app/api/resolve/providers/huggingface/generateImage.ts +42 -0
- src/app/api/resolve/providers/huggingface/generateVideo.ts +28 -0
- src/app/api/resolve/providers/huggingface/generateVoice.ts +29 -0
- src/app/api/resolve/providers/huggingface/index.ts +15 -26
- src/app/api/resolve/providers/replicate/index.ts +44 -20
- src/app/api/resolve/providers/stabilityai/generateImage.ts +63 -0
- src/app/api/resolve/providers/stabilityai/generateVideo.ts +89 -0
- src/app/api/resolve/providers/stabilityai/index.ts +10 -62
- src/app/api/resolve/providers/stabilityai/performRequest.ts +0 -64
- src/app/api/resolve/route.ts +51 -3
- src/components/settings/constants.ts +63 -6
- src/components/toolbars/top-menu/lists/hasNoPublicAPI.ts +1 -0
- src/controllers/audio/analyzeAudio.ts +1 -2
- src/controllers/audio/getAudioBuffer.ts +0 -15
- src/controllers/audio/readFileAsArrayBuffer.ts +0 -16
- src/controllers/audio/startAudioSourceNode.ts +1 -2
- src/controllers/audio/types.ts +1 -1
- src/controllers/audio/useAudio.ts +1 -3
- src/controllers/io/parseFileIntoSegments.ts +3 -4
- src/controllers/metrics/constants.ts +15 -0
- src/controllers/metrics/getDefaultMetricsPerProvider.ts +9 -0
- src/controllers/renderer/useRenderer.ts +1 -3
- src/controllers/resolver/useResolver.ts +106 -49
- src/controllers/settings/getDefaultSettingsState.ts +15 -6
- src/controllers/settings/types.ts +2 -2
- src/controllers/settings/useSettings.ts +3 -3
- src/lib/ffmpeg/getMediaInfo.ts +83 -0
- src/lib/hf/adapter/README.md +3 -0
- src/lib/hf/adapter/adaptAnyInputsToGradioInputs.ts +65 -0
- src/lib/hf/adapter/findMainGradioEndpoint.ts +57 -0
- src/lib/hf/adapter/getAdaptationScore.ts +18 -0
- src/lib/hf/adapter/getDefaultFields.ts +24 -0
- src/lib/hf/adapter/identifyField.ts +218 -0
- src/lib/hf/callGradioApi.ts +77 -0
- src/lib/hf/cloneSpace.ts +11 -0
- src/lib/hf/getCurrentOwner.ts +25 -0
- src/lib/hf/getGradioApiInfo.ts +20 -0
- src/lib/hf/getSpaceStatus.ts +34 -0
- src/lib/hf/{getMyGradioSpaces.ts → getSpaces.ts} +13 -17
- src/lib/hf/parseHuggingFaceHubId.ts +35 -0
- src/lib/hf/runSpace.ts +27 -0
- src/lib/hf/types.ts +63 -3
- src/lib/hf/useMyGradioSpaces.ts +5 -5
- src/lib/utils/getResolveRequestPrompts.ts +0 -67
- src/lib/utils/getTypeAndExtension.ts +47 -0
README.md
CHANGED
@@ -36,6 +36,7 @@ You can propose bounties attached to issues (eg `"add support for Adobe Premiere
|
|
36 |
|
37 |
There are various platforms to do this, I propose to try Boss as it has low fees: [boss.dev](https://www.boss.dev/doc/#create-bounties)
|
38 |
|
|
|
39 |
|
40 |
## For developers
|
41 |
|
|
|
36 |
|
37 |
There are various platforms to do this, I propose to try Boss as it has low fees: [boss.dev](https://www.boss.dev/doc/#create-bounties)
|
38 |
|
39 |
+
Please use good judgment if you take on a bounty. Post a message in the GitHub issuek, communicate on our Discord that you are going to take it etc.
|
40 |
|
41 |
## For developers
|
42 |
|
package-lock.json
CHANGED
@@ -10,8 +10,9 @@
|
|
10 |
"dependencies": {
|
11 |
"@aitube/clap": "0.0.27",
|
12 |
"@aitube/engine": "0.0.24",
|
13 |
-
"@aitube/timeline": "0.0.
|
14 |
"@fal-ai/serverless-client": "^0.10.3",
|
|
|
15 |
"@huggingface/hub": "^0.15.1",
|
16 |
"@huggingface/inference": "^2.7.0",
|
17 |
"@langchain/anthropic": "^0.2.0",
|
@@ -56,14 +57,12 @@
|
|
56 |
"@upstash/ratelimit": "^1.1.3",
|
57 |
"@upstash/redis": "^1.31.1",
|
58 |
"autoprefixer": "10.4.17",
|
59 |
-
"axios": "^1.7.2",
|
60 |
"class-variance-authority": "^0.7.0",
|
61 |
"clsx": "^2.1.0",
|
62 |
"cmdk": "^0.2.1",
|
63 |
"eslint": "8.57.0",
|
64 |
"eslint-config-next": "14.1.0",
|
65 |
"fluent-ffmpeg": "^2.1.3",
|
66 |
-
"form-data": "^4.0.0",
|
67 |
"fs-extra": "^11.2.0",
|
68 |
"lucide-react": "^0.334.0",
|
69 |
"mlt-xml": "^2.0.2",
|
@@ -125,9 +124,9 @@
|
|
125 |
}
|
126 |
},
|
127 |
"node_modules/@aitube/timeline": {
|
128 |
-
"version": "0.0.
|
129 |
-
"resolved": "https://registry.npmjs.org/@aitube/timeline/-/timeline-0.0.
|
130 |
-
"integrity": "sha512-
|
131 |
"dependencies": {
|
132 |
"date-fns": "^3.6.0",
|
133 |
"react-virtualized-auto-sizer": "^1.0.24"
|
@@ -1518,6 +1517,22 @@
|
|
1518 |
"node": ">=6.9.0"
|
1519 |
}
|
1520 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1521 |
"node_modules/@cspotcode/source-map-support": {
|
1522 |
"version": "0.8.1",
|
1523 |
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
@@ -1670,6 +1685,25 @@
|
|
1670 |
"resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.2.tgz",
|
1671 |
"integrity": "sha512-J4yDIIthosAsRZ5CPYP/jQvUAQtlZTTD/4suA08/FEnlxqW3sKS9iAhgsa9VYLZ6vDHn/ixJgIqRQPotoBjxIw=="
|
1672 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1673 |
"node_modules/@huggingface/hub": {
|
1674 |
"version": "0.15.1",
|
1675 |
"resolved": "https://registry.npmjs.org/@huggingface/hub/-/hub-0.15.1.tgz",
|
@@ -2161,6 +2195,144 @@
|
|
2161 |
"url": "https://opencollective.com/libvips"
|
2162 |
}
|
2163 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2164 |
"node_modules/@isaacs/cliui": {
|
2165 |
"version": "8.0.2",
|
2166 |
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
|
@@ -2448,6 +2620,30 @@
|
|
2448 |
"node": ">= 14"
|
2449 |
}
|
2450 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2451 |
"node_modules/@next/env": {
|
2452 |
"version": "14.2.4",
|
2453 |
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.4.tgz",
|
@@ -2628,6 +2824,25 @@
|
|
2628 |
"node": ">= 8"
|
2629 |
}
|
2630 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2631 |
"node_modules/@pkgjs/parseargs": {
|
2632 |
"version": "0.11.0",
|
2633 |
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
|
@@ -4677,11 +4892,21 @@
|
|
4677 |
"resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-23.1.2.tgz",
|
4678 |
"integrity": "sha512-kMCNaZCJugWI86xiEHaY338CU5JpD0B97p1j1IKNn/Zto8PgACjQx0UxbHjmOcLl/dDOBnItwD07KmCs75pxtQ=="
|
4679 |
},
|
|
|
|
|
|
|
|
|
|
|
4680 |
"node_modules/@types/draco3d": {
|
4681 |
"version": "1.4.10",
|
4682 |
"resolved": "https://registry.npmjs.org/@types/draco3d/-/draco3d-1.4.10.tgz",
|
4683 |
"integrity": "sha512-AX22jp8Y7wwaBgAixaSvkoG4M/+PlAcm3Qs4OW8yT9DM4xUpWKeFhLueTAyZF39pviAdcDdeJoACapiAceqNcw=="
|
4684 |
},
|
|
|
|
|
|
|
|
|
|
|
4685 |
"node_modules/@types/fluent-ffmpeg": {
|
4686 |
"version": "2.1.24",
|
4687 |
"resolved": "https://registry.npmjs.org/@types/fluent-ffmpeg/-/fluent-ffmpeg-2.1.24.tgz",
|
@@ -4715,6 +4940,14 @@
|
|
4715 |
"@types/node": "*"
|
4716 |
}
|
4717 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
4718 |
"node_modules/@types/node": {
|
4719 |
"version": "20.12.7",
|
4720 |
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz",
|
@@ -4777,6 +5010,11 @@
|
|
4777 |
"resolved": "https://registry.npmjs.org/@types/stats.js/-/stats.js-0.17.3.tgz",
|
4778 |
"integrity": "sha512-pXNfAD3KHOdif9EQXZ9deK82HVNaXP5ZIF5RP2QG6OQFNTaY2YIetfrE9t528vEreGQvEPRDDc8muaoYeK0SxQ=="
|
4779 |
},
|
|
|
|
|
|
|
|
|
|
|
4780 |
"node_modules/@types/three": {
|
4781 |
"version": "0.165.0",
|
4782 |
"resolved": "https://registry.npmjs.org/@types/three/-/three-0.165.0.tgz",
|
@@ -4800,6 +5038,11 @@
|
|
4800 |
"resolved": "https://registry.npmjs.org/@types/webxr/-/webxr-0.5.16.tgz",
|
4801 |
"integrity": "sha512-0E0Cl84FECtzrB4qG19TNTqpunw0F1YF0QZZnFMF6pDw1kNKJtrlTKlVB34stGIsHbZsYQ7H0tNjPfZftkHHoA=="
|
4802 |
},
|
|
|
|
|
|
|
|
|
|
|
4803 |
"node_modules/@typescript-eslint/parser": {
|
4804 |
"version": "6.21.0",
|
4805 |
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.21.0.tgz",
|
@@ -5046,6 +5289,31 @@
|
|
5046 |
"url": "https://github.com/sponsors/epoberezkin"
|
5047 |
}
|
5048 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5049 |
"node_modules/ansi-regex": {
|
5050 |
"version": "5.0.1",
|
5051 |
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
@@ -5365,16 +5633,6 @@
|
|
5365 |
"node": ">=4"
|
5366 |
}
|
5367 |
},
|
5368 |
-
"node_modules/axios": {
|
5369 |
-
"version": "1.7.2",
|
5370 |
-
"resolved": "https://registry.npmjs.org/axios/-/axios-1.7.2.tgz",
|
5371 |
-
"integrity": "sha512-2A8QhOMrbomlDuiLeK9XibIBzuHeRcqqNOHp0Cyp5EoJ1IFDh+XZH3A6BkXtv0K4gFGCI0Y4BM7B1wOEi0Rmgw==",
|
5372 |
-
"dependencies": {
|
5373 |
-
"follow-redirects": "^1.15.6",
|
5374 |
-
"form-data": "^4.0.0",
|
5375 |
-
"proxy-from-env": "^1.1.0"
|
5376 |
-
}
|
5377 |
-
},
|
5378 |
"node_modules/axobject-query": {
|
5379 |
"version": "3.2.1",
|
5380 |
"resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz",
|
@@ -5548,6 +5806,18 @@
|
|
5548 |
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
5549 |
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
5550 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5551 |
"node_modules/busboy": {
|
5552 |
"version": "1.6.0",
|
5553 |
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
|
@@ -5741,11 +6011,80 @@
|
|
5741 |
"url": "https://github.com/sponsors/sindresorhus"
|
5742 |
}
|
5743 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5744 |
"node_modules/client-only": {
|
5745 |
"version": "0.0.1",
|
5746 |
"resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz",
|
5747 |
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="
|
5748 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5749 |
"node_modules/clsx": {
|
5750 |
"version": "2.1.1",
|
5751 |
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
|
@@ -6124,6 +6463,14 @@
|
|
6124 |
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
|
6125 |
"peer": true
|
6126 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6127 |
"node_modules/create-require": {
|
6128 |
"version": "1.1.1",
|
6129 |
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
|
@@ -7192,6 +7539,14 @@
|
|
7192 |
"node": ">=0.8.x"
|
7193 |
}
|
7194 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7195 |
"node_modules/eventsource-parser": {
|
7196 |
"version": "1.1.2",
|
7197 |
"resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz",
|
@@ -7292,6 +7647,11 @@
|
|
7292 |
"reusify": "^1.0.4"
|
7293 |
}
|
7294 |
},
|
|
|
|
|
|
|
|
|
|
|
7295 |
"node_modules/fflate": {
|
7296 |
"version": "0.8.2",
|
7297 |
"resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz",
|
@@ -7397,25 +7757,6 @@
|
|
7397 |
"which": "bin/which"
|
7398 |
}
|
7399 |
},
|
7400 |
-
"node_modules/follow-redirects": {
|
7401 |
-
"version": "1.15.6",
|
7402 |
-
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
|
7403 |
-
"integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
|
7404 |
-
"funding": [
|
7405 |
-
{
|
7406 |
-
"type": "individual",
|
7407 |
-
"url": "https://github.com/sponsors/RubenVerborgh"
|
7408 |
-
}
|
7409 |
-
],
|
7410 |
-
"engines": {
|
7411 |
-
"node": ">=4.0"
|
7412 |
-
},
|
7413 |
-
"peerDependenciesMeta": {
|
7414 |
-
"debug": {
|
7415 |
-
"optional": true
|
7416 |
-
}
|
7417 |
-
}
|
7418 |
-
},
|
7419 |
"node_modules/for-each": {
|
7420 |
"version": "0.3.3",
|
7421 |
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
|
@@ -7588,6 +7929,14 @@
|
|
7588 |
"node": ">=6.9.0"
|
7589 |
}
|
7590 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7591 |
"node_modules/get-east-asian-width": {
|
7592 |
"version": "1.2.0",
|
7593 |
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.2.0.tgz",
|
@@ -7818,6 +8167,14 @@
|
|
7818 |
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
|
7819 |
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="
|
7820 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7821 |
"node_modules/groq-sdk": {
|
7822 |
"version": "0.3.3",
|
7823 |
"resolved": "https://registry.npmjs.org/groq-sdk/-/groq-sdk-0.3.3.tgz",
|
@@ -7942,6 +8299,11 @@
|
|
7942 |
"he": "bin/he"
|
7943 |
}
|
7944 |
},
|
|
|
|
|
|
|
|
|
|
|
7945 |
"node_modules/hls.js": {
|
7946 |
"version": "1.3.5",
|
7947 |
"resolved": "https://registry.npmjs.org/hls.js/-/hls.js-1.3.5.tgz",
|
@@ -8287,6 +8649,11 @@
|
|
8287 |
"url": "https://github.com/sponsors/ljharb"
|
8288 |
}
|
8289 |
},
|
|
|
|
|
|
|
|
|
|
|
8290 |
"node_modules/is-number": {
|
8291 |
"version": "7.0.0",
|
8292 |
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
@@ -9005,6 +9372,107 @@
|
|
9005 |
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
9006 |
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
9007 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9008 |
"node_modules/mustache": {
|
9009 |
"version": "4.2.0",
|
9010 |
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
|
@@ -9013,6 +9481,14 @@
|
|
9013 |
"mustache": "bin/mustache"
|
9014 |
}
|
9015 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9016 |
"node_modules/mz": {
|
9017 |
"version": "2.7.0",
|
9018 |
"resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz",
|
@@ -9176,6 +9652,16 @@
|
|
9176 |
"node": ">= 6.13.0"
|
9177 |
}
|
9178 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9179 |
"node_modules/node-html-parser": {
|
9180 |
"version": "6.1.13",
|
9181 |
"resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.13.tgz",
|
@@ -9484,6 +9970,11 @@
|
|
9484 |
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
|
9485 |
}
|
9486 |
},
|
|
|
|
|
|
|
|
|
|
|
9487 |
"node_modules/p-finally": {
|
9488 |
"version": "1.0.0",
|
9489 |
"resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
|
@@ -9621,6 +10112,11 @@
|
|
9621 |
"node": "14 || >=16.14"
|
9622 |
}
|
9623 |
},
|
|
|
|
|
|
|
|
|
|
|
9624 |
"node_modules/path-type": {
|
9625 |
"version": "4.0.0",
|
9626 |
"resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
|
@@ -9897,11 +10393,6 @@
|
|
9897 |
"react-is": "^16.13.1"
|
9898 |
}
|
9899 |
},
|
9900 |
-
"node_modules/proxy-from-env": {
|
9901 |
-
"version": "1.1.0",
|
9902 |
-
"resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
|
9903 |
-
"integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
|
9904 |
-
},
|
9905 |
"node_modules/punycode": {
|
9906 |
"version": "2.3.1",
|
9907 |
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
|
@@ -10353,6 +10844,14 @@
|
|
10353 |
"readable-stream": ">=4.0.0"
|
10354 |
}
|
10355 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10356 |
"node_modules/require-from-string": {
|
10357 |
"version": "2.0.2",
|
10358 |
"resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
|
@@ -10557,6 +11056,14 @@
|
|
10557 |
"loose-envify": "^1.1.0"
|
10558 |
}
|
10559 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10560 |
"node_modules/semver": {
|
10561 |
"version": "7.6.2",
|
10562 |
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
|
@@ -10778,6 +11285,14 @@
|
|
10778 |
"resolved": "https://registry.npmjs.org/stats.js/-/stats.js-0.17.0.tgz",
|
10779 |
"integrity": "sha512-hNKz8phvYLPEcRkeG1rsGmV5ChMjKDAWU7/OJJdDErPBNChQXxCo3WZurGpnWc6gZhAzEPFad1aVgyOANH1sMw=="
|
10780 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
10781 |
"node_modules/stdin-discarder": {
|
10782 |
"version": "0.2.2",
|
10783 |
"resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz",
|
@@ -10797,6 +11312,11 @@
|
|
10797 |
"node": ">=10.0.0"
|
10798 |
}
|
10799 |
},
|
|
|
|
|
|
|
|
|
|
|
10800 |
"node_modules/string_decoder": {
|
10801 |
"version": "1.3.0",
|
10802 |
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
@@ -11162,6 +11682,11 @@
|
|
11162 |
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
|
11163 |
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="
|
11164 |
},
|
|
|
|
|
|
|
|
|
|
|
11165 |
"node_modules/thenify": {
|
11166 |
"version": "3.3.1",
|
11167 |
"resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz",
|
@@ -11992,6 +12517,34 @@
|
|
11992 |
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
11993 |
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
|
11994 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11995 |
"node_modules/yallist": {
|
11996 |
"version": "4.0.0",
|
11997 |
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
@@ -12008,6 +12561,49 @@
|
|
12008 |
"node": ">= 14"
|
12009 |
}
|
12010 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12011 |
"node_modules/yn": {
|
12012 |
"version": "3.1.1",
|
12013 |
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
|
@@ -12041,9 +12637,9 @@
|
|
12041 |
}
|
12042 |
},
|
12043 |
"node_modules/zod-to-json-schema": {
|
12044 |
-
"version": "3.23.
|
12045 |
-
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.
|
12046 |
-
"integrity": "sha512-
|
12047 |
"peerDependencies": {
|
12048 |
"zod": "^3.23.3"
|
12049 |
}
|
|
|
10 |
"dependencies": {
|
11 |
"@aitube/clap": "0.0.27",
|
12 |
"@aitube/engine": "0.0.24",
|
13 |
+
"@aitube/timeline": "0.0.29",
|
14 |
"@fal-ai/serverless-client": "^0.10.3",
|
15 |
+
"@gradio/client": "^1.1.1",
|
16 |
"@huggingface/hub": "^0.15.1",
|
17 |
"@huggingface/inference": "^2.7.0",
|
18 |
"@langchain/anthropic": "^0.2.0",
|
|
|
57 |
"@upstash/ratelimit": "^1.1.3",
|
58 |
"@upstash/redis": "^1.31.1",
|
59 |
"autoprefixer": "10.4.17",
|
|
|
60 |
"class-variance-authority": "^0.7.0",
|
61 |
"clsx": "^2.1.0",
|
62 |
"cmdk": "^0.2.1",
|
63 |
"eslint": "8.57.0",
|
64 |
"eslint-config-next": "14.1.0",
|
65 |
"fluent-ffmpeg": "^2.1.3",
|
|
|
66 |
"fs-extra": "^11.2.0",
|
67 |
"lucide-react": "^0.334.0",
|
68 |
"mlt-xml": "^2.0.2",
|
|
|
124 |
}
|
125 |
},
|
126 |
"node_modules/@aitube/timeline": {
|
127 |
+
"version": "0.0.29",
|
128 |
+
"resolved": "https://registry.npmjs.org/@aitube/timeline/-/timeline-0.0.29.tgz",
|
129 |
+
"integrity": "sha512-m2SD8iLyH8ZmiGmtZKJn8vOdTyIsP5DVcy0G0BcOpdE4magNtrUYho+fD+bCyYWr8Io4C5GNWyNQzdHRCR8aqw==",
|
130 |
"dependencies": {
|
131 |
"date-fns": "^3.6.0",
|
132 |
"react-virtualized-auto-sizer": "^1.0.24"
|
|
|
1517 |
"node": ">=6.9.0"
|
1518 |
}
|
1519 |
},
|
1520 |
+
"node_modules/@bundled-es-modules/cookie": {
|
1521 |
+
"version": "2.0.0",
|
1522 |
+
"resolved": "https://registry.npmjs.org/@bundled-es-modules/cookie/-/cookie-2.0.0.tgz",
|
1523 |
+
"integrity": "sha512-Or6YHg/kamKHpxULAdSqhGqnWFneIXu1NKvvfBBzKGwpVsYuFIQ5aBPHDnnoR3ghW1nvSkALd+EF9iMtY7Vjxw==",
|
1524 |
+
"dependencies": {
|
1525 |
+
"cookie": "^0.5.0"
|
1526 |
+
}
|
1527 |
+
},
|
1528 |
+
"node_modules/@bundled-es-modules/statuses": {
|
1529 |
+
"version": "1.0.1",
|
1530 |
+
"resolved": "https://registry.npmjs.org/@bundled-es-modules/statuses/-/statuses-1.0.1.tgz",
|
1531 |
+
"integrity": "sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==",
|
1532 |
+
"dependencies": {
|
1533 |
+
"statuses": "^2.0.1"
|
1534 |
+
}
|
1535 |
+
},
|
1536 |
"node_modules/@cspotcode/source-map-support": {
|
1537 |
"version": "0.8.1",
|
1538 |
"resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
|
|
|
1685 |
"resolved": "https://registry.npmjs.org/@floating-ui/utils/-/utils-0.2.2.tgz",
|
1686 |
"integrity": "sha512-J4yDIIthosAsRZ5CPYP/jQvUAQtlZTTD/4suA08/FEnlxqW3sKS9iAhgsa9VYLZ6vDHn/ixJgIqRQPotoBjxIw=="
|
1687 |
},
|
1688 |
+
"node_modules/@gradio/client": {
|
1689 |
+
"version": "1.1.1",
|
1690 |
+
"resolved": "https://registry.npmjs.org/@gradio/client/-/client-1.1.1.tgz",
|
1691 |
+
"integrity": "sha512-6Hzc+/wmNRkodefp0bvfOBQbEakwg31Ye9IVOjfoHkvunHoIzjupM+1m7VwhQXt8pMCjOw8Hc7zhvwnwy6f5GQ==",
|
1692 |
+
"dependencies": {
|
1693 |
+
"@types/eventsource": "^1.1.15",
|
1694 |
+
"bufferutil": "^4.0.7",
|
1695 |
+
"eventsource": "^2.0.2",
|
1696 |
+
"fetch-event-stream": "^0.1.5",
|
1697 |
+
"msw": "^2.2.1",
|
1698 |
+
"semiver": "^1.1.0",
|
1699 |
+
"textlinestream": "^1.1.1",
|
1700 |
+
"typescript": "^5.0.0",
|
1701 |
+
"ws": "^8.13.0"
|
1702 |
+
},
|
1703 |
+
"engines": {
|
1704 |
+
"node": ">=18.0.0"
|
1705 |
+
}
|
1706 |
+
},
|
1707 |
"node_modules/@huggingface/hub": {
|
1708 |
"version": "0.15.1",
|
1709 |
"resolved": "https://registry.npmjs.org/@huggingface/hub/-/hub-0.15.1.tgz",
|
|
|
2195 |
"url": "https://opencollective.com/libvips"
|
2196 |
}
|
2197 |
},
|
2198 |
+
"node_modules/@inquirer/confirm": {
|
2199 |
+
"version": "3.1.9",
|
2200 |
+
"resolved": "https://registry.npmjs.org/@inquirer/confirm/-/confirm-3.1.9.tgz",
|
2201 |
+
"integrity": "sha512-UF09aejxCi4Xqm6N/jJAiFXArXfi9al52AFaSD+2uIHnhZGtd1d6lIGTRMPouVSJxbGEi+HkOWSYaiEY/+szUw==",
|
2202 |
+
"dependencies": {
|
2203 |
+
"@inquirer/core": "^8.2.2",
|
2204 |
+
"@inquirer/type": "^1.3.3"
|
2205 |
+
},
|
2206 |
+
"engines": {
|
2207 |
+
"node": ">=18"
|
2208 |
+
}
|
2209 |
+
},
|
2210 |
+
"node_modules/@inquirer/core": {
|
2211 |
+
"version": "8.2.2",
|
2212 |
+
"resolved": "https://registry.npmjs.org/@inquirer/core/-/core-8.2.2.tgz",
|
2213 |
+
"integrity": "sha512-K8SuNX45jEFlX3EBJpu9B+S2TISzMPGXZIuJ9ME924SqbdW6Pt6fIkKvXg7mOEOKJ4WxpQsxj0UTfcL/A434Ww==",
|
2214 |
+
"dependencies": {
|
2215 |
+
"@inquirer/figures": "^1.0.3",
|
2216 |
+
"@inquirer/type": "^1.3.3",
|
2217 |
+
"@types/mute-stream": "^0.0.4",
|
2218 |
+
"@types/node": "^20.12.13",
|
2219 |
+
"@types/wrap-ansi": "^3.0.0",
|
2220 |
+
"ansi-escapes": "^4.3.2",
|
2221 |
+
"chalk": "^4.1.2",
|
2222 |
+
"cli-spinners": "^2.9.2",
|
2223 |
+
"cli-width": "^4.1.0",
|
2224 |
+
"mute-stream": "^1.0.0",
|
2225 |
+
"signal-exit": "^4.1.0",
|
2226 |
+
"strip-ansi": "^6.0.1",
|
2227 |
+
"wrap-ansi": "^6.2.0"
|
2228 |
+
},
|
2229 |
+
"engines": {
|
2230 |
+
"node": ">=18"
|
2231 |
+
}
|
2232 |
+
},
|
2233 |
+
"node_modules/@inquirer/core/node_modules/@types/node": {
|
2234 |
+
"version": "20.14.5",
|
2235 |
+
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.5.tgz",
|
2236 |
+
"integrity": "sha512-aoRR+fJkZT2l0aGOJhuA8frnCSoNX6W7U2mpNq63+BxBIj5BQFt8rHy627kijCmm63ijdSdwvGgpUsU6MBsZZA==",
|
2237 |
+
"dependencies": {
|
2238 |
+
"undici-types": "~5.26.4"
|
2239 |
+
}
|
2240 |
+
},
|
2241 |
+
"node_modules/@inquirer/core/node_modules/ansi-styles": {
|
2242 |
+
"version": "4.3.0",
|
2243 |
+
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
2244 |
+
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
2245 |
+
"dependencies": {
|
2246 |
+
"color-convert": "^2.0.1"
|
2247 |
+
},
|
2248 |
+
"engines": {
|
2249 |
+
"node": ">=8"
|
2250 |
+
},
|
2251 |
+
"funding": {
|
2252 |
+
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
2253 |
+
}
|
2254 |
+
},
|
2255 |
+
"node_modules/@inquirer/core/node_modules/chalk": {
|
2256 |
+
"version": "4.1.2",
|
2257 |
+
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
2258 |
+
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
|
2259 |
+
"dependencies": {
|
2260 |
+
"ansi-styles": "^4.1.0",
|
2261 |
+
"supports-color": "^7.1.0"
|
2262 |
+
},
|
2263 |
+
"engines": {
|
2264 |
+
"node": ">=10"
|
2265 |
+
},
|
2266 |
+
"funding": {
|
2267 |
+
"url": "https://github.com/chalk/chalk?sponsor=1"
|
2268 |
+
}
|
2269 |
+
},
|
2270 |
+
"node_modules/@inquirer/core/node_modules/emoji-regex": {
|
2271 |
+
"version": "8.0.0",
|
2272 |
+
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
2273 |
+
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
2274 |
+
},
|
2275 |
+
"node_modules/@inquirer/core/node_modules/has-flag": {
|
2276 |
+
"version": "4.0.0",
|
2277 |
+
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
2278 |
+
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
2279 |
+
"engines": {
|
2280 |
+
"node": ">=8"
|
2281 |
+
}
|
2282 |
+
},
|
2283 |
+
"node_modules/@inquirer/core/node_modules/string-width": {
|
2284 |
+
"version": "4.2.3",
|
2285 |
+
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
2286 |
+
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
2287 |
+
"dependencies": {
|
2288 |
+
"emoji-regex": "^8.0.0",
|
2289 |
+
"is-fullwidth-code-point": "^3.0.0",
|
2290 |
+
"strip-ansi": "^6.0.1"
|
2291 |
+
},
|
2292 |
+
"engines": {
|
2293 |
+
"node": ">=8"
|
2294 |
+
}
|
2295 |
+
},
|
2296 |
+
"node_modules/@inquirer/core/node_modules/supports-color": {
|
2297 |
+
"version": "7.2.0",
|
2298 |
+
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
|
2299 |
+
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
2300 |
+
"dependencies": {
|
2301 |
+
"has-flag": "^4.0.0"
|
2302 |
+
},
|
2303 |
+
"engines": {
|
2304 |
+
"node": ">=8"
|
2305 |
+
}
|
2306 |
+
},
|
2307 |
+
"node_modules/@inquirer/core/node_modules/wrap-ansi": {
|
2308 |
+
"version": "6.2.0",
|
2309 |
+
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
|
2310 |
+
"integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
|
2311 |
+
"dependencies": {
|
2312 |
+
"ansi-styles": "^4.0.0",
|
2313 |
+
"string-width": "^4.1.0",
|
2314 |
+
"strip-ansi": "^6.0.0"
|
2315 |
+
},
|
2316 |
+
"engines": {
|
2317 |
+
"node": ">=8"
|
2318 |
+
}
|
2319 |
+
},
|
2320 |
+
"node_modules/@inquirer/figures": {
|
2321 |
+
"version": "1.0.3",
|
2322 |
+
"resolved": "https://registry.npmjs.org/@inquirer/figures/-/figures-1.0.3.tgz",
|
2323 |
+
"integrity": "sha512-ErXXzENMH5pJt5/ssXV0DfWUZqly8nGzf0UcBV9xTnP+KyffE2mqyxIMBrZ8ijQck2nU0TQm40EQB53YreyWHw==",
|
2324 |
+
"engines": {
|
2325 |
+
"node": ">=18"
|
2326 |
+
}
|
2327 |
+
},
|
2328 |
+
"node_modules/@inquirer/type": {
|
2329 |
+
"version": "1.3.3",
|
2330 |
+
"resolved": "https://registry.npmjs.org/@inquirer/type/-/type-1.3.3.tgz",
|
2331 |
+
"integrity": "sha512-xTUt0NulylX27/zMx04ZYar/kr1raaiFTVvQ5feljQsiAgdm0WPj4S73/ye0fbslh+15QrIuDvfCXTek7pMY5A==",
|
2332 |
+
"engines": {
|
2333 |
+
"node": ">=18"
|
2334 |
+
}
|
2335 |
+
},
|
2336 |
"node_modules/@isaacs/cliui": {
|
2337 |
"version": "8.0.2",
|
2338 |
"resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
|
|
|
2620 |
"node": ">= 14"
|
2621 |
}
|
2622 |
},
|
2623 |
+
"node_modules/@mswjs/cookies": {
|
2624 |
+
"version": "1.1.1",
|
2625 |
+
"resolved": "https://registry.npmjs.org/@mswjs/cookies/-/cookies-1.1.1.tgz",
|
2626 |
+
"integrity": "sha512-W68qOHEjx1iD+4VjQudlx26CPIoxmIAtK4ZCexU0/UJBG6jYhcuyzKJx+Iw8uhBIGd9eba64XgWVgo20it1qwA==",
|
2627 |
+
"engines": {
|
2628 |
+
"node": ">=18"
|
2629 |
+
}
|
2630 |
+
},
|
2631 |
+
"node_modules/@mswjs/interceptors": {
|
2632 |
+
"version": "0.29.1",
|
2633 |
+
"resolved": "https://registry.npmjs.org/@mswjs/interceptors/-/interceptors-0.29.1.tgz",
|
2634 |
+
"integrity": "sha512-3rDakgJZ77+RiQUuSK69t1F0m8BQKA8Vh5DCS5V0DWvNY67zob2JhhQrhCO0AKLGINTRSFd1tBaHcJTkhefoSw==",
|
2635 |
+
"dependencies": {
|
2636 |
+
"@open-draft/deferred-promise": "^2.2.0",
|
2637 |
+
"@open-draft/logger": "^0.3.0",
|
2638 |
+
"@open-draft/until": "^2.0.0",
|
2639 |
+
"is-node-process": "^1.2.0",
|
2640 |
+
"outvariant": "^1.2.1",
|
2641 |
+
"strict-event-emitter": "^0.5.1"
|
2642 |
+
},
|
2643 |
+
"engines": {
|
2644 |
+
"node": ">=18"
|
2645 |
+
}
|
2646 |
+
},
|
2647 |
"node_modules/@next/env": {
|
2648 |
"version": "14.2.4",
|
2649 |
"resolved": "https://registry.npmjs.org/@next/env/-/env-14.2.4.tgz",
|
|
|
2824 |
"node": ">= 8"
|
2825 |
}
|
2826 |
},
|
2827 |
+
"node_modules/@open-draft/deferred-promise": {
|
2828 |
+
"version": "2.2.0",
|
2829 |
+
"resolved": "https://registry.npmjs.org/@open-draft/deferred-promise/-/deferred-promise-2.2.0.tgz",
|
2830 |
+
"integrity": "sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA=="
|
2831 |
+
},
|
2832 |
+
"node_modules/@open-draft/logger": {
|
2833 |
+
"version": "0.3.0",
|
2834 |
+
"resolved": "https://registry.npmjs.org/@open-draft/logger/-/logger-0.3.0.tgz",
|
2835 |
+
"integrity": "sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==",
|
2836 |
+
"dependencies": {
|
2837 |
+
"is-node-process": "^1.2.0",
|
2838 |
+
"outvariant": "^1.4.0"
|
2839 |
+
}
|
2840 |
+
},
|
2841 |
+
"node_modules/@open-draft/until": {
|
2842 |
+
"version": "2.1.0",
|
2843 |
+
"resolved": "https://registry.npmjs.org/@open-draft/until/-/until-2.1.0.tgz",
|
2844 |
+
"integrity": "sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg=="
|
2845 |
+
},
|
2846 |
"node_modules/@pkgjs/parseargs": {
|
2847 |
"version": "0.11.0",
|
2848 |
"resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
|
|
|
4892 |
"resolved": "https://registry.npmjs.org/@tweenjs/tween.js/-/tween.js-23.1.2.tgz",
|
4893 |
"integrity": "sha512-kMCNaZCJugWI86xiEHaY338CU5JpD0B97p1j1IKNn/Zto8PgACjQx0UxbHjmOcLl/dDOBnItwD07KmCs75pxtQ=="
|
4894 |
},
|
4895 |
+
"node_modules/@types/cookie": {
|
4896 |
+
"version": "0.6.0",
|
4897 |
+
"resolved": "https://registry.npmjs.org/@types/cookie/-/cookie-0.6.0.tgz",
|
4898 |
+
"integrity": "sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA=="
|
4899 |
+
},
|
4900 |
"node_modules/@types/draco3d": {
|
4901 |
"version": "1.4.10",
|
4902 |
"resolved": "https://registry.npmjs.org/@types/draco3d/-/draco3d-1.4.10.tgz",
|
4903 |
"integrity": "sha512-AX22jp8Y7wwaBgAixaSvkoG4M/+PlAcm3Qs4OW8yT9DM4xUpWKeFhLueTAyZF39pviAdcDdeJoACapiAceqNcw=="
|
4904 |
},
|
4905 |
+
"node_modules/@types/eventsource": {
|
4906 |
+
"version": "1.1.15",
|
4907 |
+
"resolved": "https://registry.npmjs.org/@types/eventsource/-/eventsource-1.1.15.tgz",
|
4908 |
+
"integrity": "sha512-XQmGcbnxUNa06HR3VBVkc9+A2Vpi9ZyLJcdS5dwaQQ/4ZMWFO+5c90FnMUpbtMZwB/FChoYHwuVg8TvkECacTA=="
|
4909 |
+
},
|
4910 |
"node_modules/@types/fluent-ffmpeg": {
|
4911 |
"version": "2.1.24",
|
4912 |
"resolved": "https://registry.npmjs.org/@types/fluent-ffmpeg/-/fluent-ffmpeg-2.1.24.tgz",
|
|
|
4940 |
"@types/node": "*"
|
4941 |
}
|
4942 |
},
|
4943 |
+
"node_modules/@types/mute-stream": {
|
4944 |
+
"version": "0.0.4",
|
4945 |
+
"resolved": "https://registry.npmjs.org/@types/mute-stream/-/mute-stream-0.0.4.tgz",
|
4946 |
+
"integrity": "sha512-CPM9nzrCPPJHQNA9keH9CVkVI+WR5kMa+7XEs5jcGQ0VoAGnLv242w8lIVgwAEfmE4oufJRaTc9PNLQl0ioAow==",
|
4947 |
+
"dependencies": {
|
4948 |
+
"@types/node": "*"
|
4949 |
+
}
|
4950 |
+
},
|
4951 |
"node_modules/@types/node": {
|
4952 |
"version": "20.12.7",
|
4953 |
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.12.7.tgz",
|
|
|
5010 |
"resolved": "https://registry.npmjs.org/@types/stats.js/-/stats.js-0.17.3.tgz",
|
5011 |
"integrity": "sha512-pXNfAD3KHOdif9EQXZ9deK82HVNaXP5ZIF5RP2QG6OQFNTaY2YIetfrE9t528vEreGQvEPRDDc8muaoYeK0SxQ=="
|
5012 |
},
|
5013 |
+
"node_modules/@types/statuses": {
|
5014 |
+
"version": "2.0.5",
|
5015 |
+
"resolved": "https://registry.npmjs.org/@types/statuses/-/statuses-2.0.5.tgz",
|
5016 |
+
"integrity": "sha512-jmIUGWrAiwu3dZpxntxieC+1n/5c3mjrImkmOSQ2NC5uP6cYO4aAZDdSmRcI5C1oiTmqlZGHC+/NmJrKogbP5A=="
|
5017 |
+
},
|
5018 |
"node_modules/@types/three": {
|
5019 |
"version": "0.165.0",
|
5020 |
"resolved": "https://registry.npmjs.org/@types/three/-/three-0.165.0.tgz",
|
|
|
5038 |
"resolved": "https://registry.npmjs.org/@types/webxr/-/webxr-0.5.16.tgz",
|
5039 |
"integrity": "sha512-0E0Cl84FECtzrB4qG19TNTqpunw0F1YF0QZZnFMF6pDw1kNKJtrlTKlVB34stGIsHbZsYQ7H0tNjPfZftkHHoA=="
|
5040 |
},
|
5041 |
+
"node_modules/@types/wrap-ansi": {
|
5042 |
+
"version": "3.0.0",
|
5043 |
+
"resolved": "https://registry.npmjs.org/@types/wrap-ansi/-/wrap-ansi-3.0.0.tgz",
|
5044 |
+
"integrity": "sha512-ltIpx+kM7g/MLRZfkbL7EsCEjfzCcScLpkg37eXEtx5kmrAKBkTJwd1GIAjDSL8wTpM6Hzn5YO4pSb91BEwu1g=="
|
5045 |
+
},
|
5046 |
"node_modules/@typescript-eslint/parser": {
|
5047 |
"version": "6.21.0",
|
5048 |
"resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-6.21.0.tgz",
|
|
|
5289 |
"url": "https://github.com/sponsors/epoberezkin"
|
5290 |
}
|
5291 |
},
|
5292 |
+
"node_modules/ansi-escapes": {
|
5293 |
+
"version": "4.3.2",
|
5294 |
+
"resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz",
|
5295 |
+
"integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==",
|
5296 |
+
"dependencies": {
|
5297 |
+
"type-fest": "^0.21.3"
|
5298 |
+
},
|
5299 |
+
"engines": {
|
5300 |
+
"node": ">=8"
|
5301 |
+
},
|
5302 |
+
"funding": {
|
5303 |
+
"url": "https://github.com/sponsors/sindresorhus"
|
5304 |
+
}
|
5305 |
+
},
|
5306 |
+
"node_modules/ansi-escapes/node_modules/type-fest": {
|
5307 |
+
"version": "0.21.3",
|
5308 |
+
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz",
|
5309 |
+
"integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==",
|
5310 |
+
"engines": {
|
5311 |
+
"node": ">=10"
|
5312 |
+
},
|
5313 |
+
"funding": {
|
5314 |
+
"url": "https://github.com/sponsors/sindresorhus"
|
5315 |
+
}
|
5316 |
+
},
|
5317 |
"node_modules/ansi-regex": {
|
5318 |
"version": "5.0.1",
|
5319 |
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
|
|
|
5633 |
"node": ">=4"
|
5634 |
}
|
5635 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5636 |
"node_modules/axobject-query": {
|
5637 |
"version": "3.2.1",
|
5638 |
"resolved": "https://registry.npmjs.org/axobject-query/-/axobject-query-3.2.1.tgz",
|
|
|
5806 |
"resolved": "https://registry.npmjs.org/buffer-equal-constant-time/-/buffer-equal-constant-time-1.0.1.tgz",
|
5807 |
"integrity": "sha512-zRpUiDwd/xk6ADqPMATG8vc9VPrkck7T07OIx0gnjmJAnHnTVXNQG3vfvWNuiZIkwu9KrKdA1iJKfsfTVxE6NA=="
|
5808 |
},
|
5809 |
+
"node_modules/bufferutil": {
|
5810 |
+
"version": "4.0.8",
|
5811 |
+
"resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.8.tgz",
|
5812 |
+
"integrity": "sha512-4T53u4PdgsXqKaIctwF8ifXlRTTmEPJ8iEPWFdGZvcf7sbwYo6FKFEX9eNNAnzFZ7EzJAQ3CJeOtCRA4rDp7Pw==",
|
5813 |
+
"hasInstallScript": true,
|
5814 |
+
"dependencies": {
|
5815 |
+
"node-gyp-build": "^4.3.0"
|
5816 |
+
},
|
5817 |
+
"engines": {
|
5818 |
+
"node": ">=6.14.2"
|
5819 |
+
}
|
5820 |
+
},
|
5821 |
"node_modules/busboy": {
|
5822 |
"version": "1.6.0",
|
5823 |
"resolved": "https://registry.npmjs.org/busboy/-/busboy-1.6.0.tgz",
|
|
|
6011 |
"url": "https://github.com/sponsors/sindresorhus"
|
6012 |
}
|
6013 |
},
|
6014 |
+
"node_modules/cli-width": {
|
6015 |
+
"version": "4.1.0",
|
6016 |
+
"resolved": "https://registry.npmjs.org/cli-width/-/cli-width-4.1.0.tgz",
|
6017 |
+
"integrity": "sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==",
|
6018 |
+
"engines": {
|
6019 |
+
"node": ">= 12"
|
6020 |
+
}
|
6021 |
+
},
|
6022 |
"node_modules/client-only": {
|
6023 |
"version": "0.0.1",
|
6024 |
"resolved": "https://registry.npmjs.org/client-only/-/client-only-0.0.1.tgz",
|
6025 |
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA=="
|
6026 |
},
|
6027 |
+
"node_modules/cliui": {
|
6028 |
+
"version": "8.0.1",
|
6029 |
+
"resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
|
6030 |
+
"integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
|
6031 |
+
"dependencies": {
|
6032 |
+
"string-width": "^4.2.0",
|
6033 |
+
"strip-ansi": "^6.0.1",
|
6034 |
+
"wrap-ansi": "^7.0.0"
|
6035 |
+
},
|
6036 |
+
"engines": {
|
6037 |
+
"node": ">=12"
|
6038 |
+
}
|
6039 |
+
},
|
6040 |
+
"node_modules/cliui/node_modules/ansi-styles": {
|
6041 |
+
"version": "4.3.0",
|
6042 |
+
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
6043 |
+
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
6044 |
+
"dependencies": {
|
6045 |
+
"color-convert": "^2.0.1"
|
6046 |
+
},
|
6047 |
+
"engines": {
|
6048 |
+
"node": ">=8"
|
6049 |
+
},
|
6050 |
+
"funding": {
|
6051 |
+
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
6052 |
+
}
|
6053 |
+
},
|
6054 |
+
"node_modules/cliui/node_modules/emoji-regex": {
|
6055 |
+
"version": "8.0.0",
|
6056 |
+
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
6057 |
+
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
6058 |
+
},
|
6059 |
+
"node_modules/cliui/node_modules/string-width": {
|
6060 |
+
"version": "4.2.3",
|
6061 |
+
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
6062 |
+
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
6063 |
+
"dependencies": {
|
6064 |
+
"emoji-regex": "^8.0.0",
|
6065 |
+
"is-fullwidth-code-point": "^3.0.0",
|
6066 |
+
"strip-ansi": "^6.0.1"
|
6067 |
+
},
|
6068 |
+
"engines": {
|
6069 |
+
"node": ">=8"
|
6070 |
+
}
|
6071 |
+
},
|
6072 |
+
"node_modules/cliui/node_modules/wrap-ansi": {
|
6073 |
+
"version": "7.0.0",
|
6074 |
+
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
6075 |
+
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
|
6076 |
+
"dependencies": {
|
6077 |
+
"ansi-styles": "^4.0.0",
|
6078 |
+
"string-width": "^4.1.0",
|
6079 |
+
"strip-ansi": "^6.0.0"
|
6080 |
+
},
|
6081 |
+
"engines": {
|
6082 |
+
"node": ">=10"
|
6083 |
+
},
|
6084 |
+
"funding": {
|
6085 |
+
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
|
6086 |
+
}
|
6087 |
+
},
|
6088 |
"node_modules/clsx": {
|
6089 |
"version": "2.1.1",
|
6090 |
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
|
|
|
6463 |
"integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
|
6464 |
"peer": true
|
6465 |
},
|
6466 |
+
"node_modules/cookie": {
|
6467 |
+
"version": "0.5.0",
|
6468 |
+
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz",
|
6469 |
+
"integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==",
|
6470 |
+
"engines": {
|
6471 |
+
"node": ">= 0.6"
|
6472 |
+
}
|
6473 |
+
},
|
6474 |
"node_modules/create-require": {
|
6475 |
"version": "1.1.1",
|
6476 |
"resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
|
|
|
7539 |
"node": ">=0.8.x"
|
7540 |
}
|
7541 |
},
|
7542 |
+
"node_modules/eventsource": {
|
7543 |
+
"version": "2.0.2",
|
7544 |
+
"resolved": "https://registry.npmjs.org/eventsource/-/eventsource-2.0.2.tgz",
|
7545 |
+
"integrity": "sha512-IzUmBGPR3+oUG9dUeXynyNmf91/3zUSJg1lCktzKw47OXuhco54U3r9B7O4XX+Rb1Itm9OZ2b0RkTs10bICOxA==",
|
7546 |
+
"engines": {
|
7547 |
+
"node": ">=12.0.0"
|
7548 |
+
}
|
7549 |
+
},
|
7550 |
"node_modules/eventsource-parser": {
|
7551 |
"version": "1.1.2",
|
7552 |
"resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-1.1.2.tgz",
|
|
|
7647 |
"reusify": "^1.0.4"
|
7648 |
}
|
7649 |
},
|
7650 |
+
"node_modules/fetch-event-stream": {
|
7651 |
+
"version": "0.1.5",
|
7652 |
+
"resolved": "https://registry.npmjs.org/fetch-event-stream/-/fetch-event-stream-0.1.5.tgz",
|
7653 |
+
"integrity": "sha512-V1PWovkspxQfssq/NnxoEyQo1DV+MRK/laPuPblIZmSjMN8P5u46OhlFQznSr9p/t0Sp8Uc6SbM3yCMfr0KU8g=="
|
7654 |
+
},
|
7655 |
"node_modules/fflate": {
|
7656 |
"version": "0.8.2",
|
7657 |
"resolved": "https://registry.npmjs.org/fflate/-/fflate-0.8.2.tgz",
|
|
|
7757 |
"which": "bin/which"
|
7758 |
}
|
7759 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
7760 |
"node_modules/for-each": {
|
7761 |
"version": "0.3.3",
|
7762 |
"resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
|
|
|
7929 |
"node": ">=6.9.0"
|
7930 |
}
|
7931 |
},
|
7932 |
+
"node_modules/get-caller-file": {
|
7933 |
+
"version": "2.0.5",
|
7934 |
+
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
7935 |
+
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
|
7936 |
+
"engines": {
|
7937 |
+
"node": "6.* || 8.* || >= 10.*"
|
7938 |
+
}
|
7939 |
+
},
|
7940 |
"node_modules/get-east-asian-width": {
|
7941 |
"version": "1.2.0",
|
7942 |
"resolved": "https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.2.0.tgz",
|
|
|
8167 |
"resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
|
8168 |
"integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="
|
8169 |
},
|
8170 |
+
"node_modules/graphql": {
|
8171 |
+
"version": "16.8.2",
|
8172 |
+
"resolved": "https://registry.npmjs.org/graphql/-/graphql-16.8.2.tgz",
|
8173 |
+
"integrity": "sha512-cvVIBILwuoSyD54U4cF/UXDh5yAobhNV/tPygI4lZhgOIJQE/WLWC4waBRb4I6bDVYb3OVx3lfHbaQOEoUD5sg==",
|
8174 |
+
"engines": {
|
8175 |
+
"node": "^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0"
|
8176 |
+
}
|
8177 |
+
},
|
8178 |
"node_modules/groq-sdk": {
|
8179 |
"version": "0.3.3",
|
8180 |
"resolved": "https://registry.npmjs.org/groq-sdk/-/groq-sdk-0.3.3.tgz",
|
|
|
8299 |
"he": "bin/he"
|
8300 |
}
|
8301 |
},
|
8302 |
+
"node_modules/headers-polyfill": {
|
8303 |
+
"version": "4.0.3",
|
8304 |
+
"resolved": "https://registry.npmjs.org/headers-polyfill/-/headers-polyfill-4.0.3.tgz",
|
8305 |
+
"integrity": "sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ=="
|
8306 |
+
},
|
8307 |
"node_modules/hls.js": {
|
8308 |
"version": "1.3.5",
|
8309 |
"resolved": "https://registry.npmjs.org/hls.js/-/hls.js-1.3.5.tgz",
|
|
|
8649 |
"url": "https://github.com/sponsors/ljharb"
|
8650 |
}
|
8651 |
},
|
8652 |
+
"node_modules/is-node-process": {
|
8653 |
+
"version": "1.2.0",
|
8654 |
+
"resolved": "https://registry.npmjs.org/is-node-process/-/is-node-process-1.2.0.tgz",
|
8655 |
+
"integrity": "sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw=="
|
8656 |
+
},
|
8657 |
"node_modules/is-number": {
|
8658 |
"version": "7.0.0",
|
8659 |
"resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
|
|
|
9372 |
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz",
|
9373 |
"integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w=="
|
9374 |
},
|
9375 |
+
"node_modules/msw": {
|
9376 |
+
"version": "2.3.1",
|
9377 |
+
"resolved": "https://registry.npmjs.org/msw/-/msw-2.3.1.tgz",
|
9378 |
+
"integrity": "sha512-ocgvBCLn/5l3jpl1lssIb3cniuACJLoOfZu01e3n5dbJrpA5PeeWn28jCLgQDNt6d7QT8tF2fYRzm9JoEHtiig==",
|
9379 |
+
"hasInstallScript": true,
|
9380 |
+
"dependencies": {
|
9381 |
+
"@bundled-es-modules/cookie": "^2.0.0",
|
9382 |
+
"@bundled-es-modules/statuses": "^1.0.1",
|
9383 |
+
"@inquirer/confirm": "^3.0.0",
|
9384 |
+
"@mswjs/cookies": "^1.1.0",
|
9385 |
+
"@mswjs/interceptors": "^0.29.0",
|
9386 |
+
"@open-draft/until": "^2.1.0",
|
9387 |
+
"@types/cookie": "^0.6.0",
|
9388 |
+
"@types/statuses": "^2.0.4",
|
9389 |
+
"chalk": "^4.1.2",
|
9390 |
+
"graphql": "^16.8.1",
|
9391 |
+
"headers-polyfill": "^4.0.2",
|
9392 |
+
"is-node-process": "^1.2.0",
|
9393 |
+
"outvariant": "^1.4.2",
|
9394 |
+
"path-to-regexp": "^6.2.0",
|
9395 |
+
"strict-event-emitter": "^0.5.1",
|
9396 |
+
"type-fest": "^4.9.0",
|
9397 |
+
"yargs": "^17.7.2"
|
9398 |
+
},
|
9399 |
+
"bin": {
|
9400 |
+
"msw": "cli/index.js"
|
9401 |
+
},
|
9402 |
+
"engines": {
|
9403 |
+
"node": ">=18"
|
9404 |
+
},
|
9405 |
+
"funding": {
|
9406 |
+
"url": "https://github.com/sponsors/mswjs"
|
9407 |
+
},
|
9408 |
+
"peerDependencies": {
|
9409 |
+
"typescript": ">= 4.7.x"
|
9410 |
+
},
|
9411 |
+
"peerDependenciesMeta": {
|
9412 |
+
"typescript": {
|
9413 |
+
"optional": true
|
9414 |
+
}
|
9415 |
+
}
|
9416 |
+
},
|
9417 |
+
"node_modules/msw/node_modules/ansi-styles": {
|
9418 |
+
"version": "4.3.0",
|
9419 |
+
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
9420 |
+
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
9421 |
+
"dependencies": {
|
9422 |
+
"color-convert": "^2.0.1"
|
9423 |
+
},
|
9424 |
+
"engines": {
|
9425 |
+
"node": ">=8"
|
9426 |
+
},
|
9427 |
+
"funding": {
|
9428 |
+
"url": "https://github.com/chalk/ansi-styles?sponsor=1"
|
9429 |
+
}
|
9430 |
+
},
|
9431 |
+
"node_modules/msw/node_modules/chalk": {
|
9432 |
+
"version": "4.1.2",
|
9433 |
+
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
|
9434 |
+
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
|
9435 |
+
"dependencies": {
|
9436 |
+
"ansi-styles": "^4.1.0",
|
9437 |
+
"supports-color": "^7.1.0"
|
9438 |
+
},
|
9439 |
+
"engines": {
|
9440 |
+
"node": ">=10"
|
9441 |
+
},
|
9442 |
+
"funding": {
|
9443 |
+
"url": "https://github.com/chalk/chalk?sponsor=1"
|
9444 |
+
}
|
9445 |
+
},
|
9446 |
+
"node_modules/msw/node_modules/has-flag": {
|
9447 |
+
"version": "4.0.0",
|
9448 |
+
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
9449 |
+
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
|
9450 |
+
"engines": {
|
9451 |
+
"node": ">=8"
|
9452 |
+
}
|
9453 |
+
},
|
9454 |
+
"node_modules/msw/node_modules/supports-color": {
|
9455 |
+
"version": "7.2.0",
|
9456 |
+
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
|
9457 |
+
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
9458 |
+
"dependencies": {
|
9459 |
+
"has-flag": "^4.0.0"
|
9460 |
+
},
|
9461 |
+
"engines": {
|
9462 |
+
"node": ">=8"
|
9463 |
+
}
|
9464 |
+
},
|
9465 |
+
"node_modules/msw/node_modules/type-fest": {
|
9466 |
+
"version": "4.20.1",
|
9467 |
+
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-4.20.1.tgz",
|
9468 |
+
"integrity": "sha512-R6wDsVsoS9xYOpy8vgeBlqpdOyzJ12HNfQhC/aAKWM3YoCV9TtunJzh/QpkMgeDhkoynDcw5f1y+qF9yc/HHyg==",
|
9469 |
+
"engines": {
|
9470 |
+
"node": ">=16"
|
9471 |
+
},
|
9472 |
+
"funding": {
|
9473 |
+
"url": "https://github.com/sponsors/sindresorhus"
|
9474 |
+
}
|
9475 |
+
},
|
9476 |
"node_modules/mustache": {
|
9477 |
"version": "4.2.0",
|
9478 |
"resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz",
|
|
|
9481 |
"mustache": "bin/mustache"
|
9482 |
}
|
9483 |
},
|
9484 |
+
"node_modules/mute-stream": {
|
9485 |
+
"version": "1.0.0",
|
9486 |
+
"resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-1.0.0.tgz",
|
9487 |
+
"integrity": "sha512-avsJQhyd+680gKXyG/sQc0nXaC6rBkPOfyHYcFb9+hdkqQkR9bdnkJ0AMZhke0oesPqIO+mFFJ+IdBc7mst4IA==",
|
9488 |
+
"engines": {
|
9489 |
+
"node": "^14.17.0 || ^16.13.0 || >=18.0.0"
|
9490 |
+
}
|
9491 |
+
},
|
9492 |
"node_modules/mz": {
|
9493 |
"version": "2.7.0",
|
9494 |
"resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz",
|
|
|
9652 |
"node": ">= 6.13.0"
|
9653 |
}
|
9654 |
},
|
9655 |
+
"node_modules/node-gyp-build": {
|
9656 |
+
"version": "4.8.1",
|
9657 |
+
"resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.1.tgz",
|
9658 |
+
"integrity": "sha512-OSs33Z9yWr148JZcbZd5WiAXhh/n9z8TxQcdMhIOlpN9AhWpLfvVFO73+m77bBABQMaY9XSvIa+qk0jlI7Gcaw==",
|
9659 |
+
"bin": {
|
9660 |
+
"node-gyp-build": "bin.js",
|
9661 |
+
"node-gyp-build-optional": "optional.js",
|
9662 |
+
"node-gyp-build-test": "build-test.js"
|
9663 |
+
}
|
9664 |
+
},
|
9665 |
"node_modules/node-html-parser": {
|
9666 |
"version": "6.1.13",
|
9667 |
"resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.13.tgz",
|
|
|
9970 |
"url": "https://github.com/chalk/strip-ansi?sponsor=1"
|
9971 |
}
|
9972 |
},
|
9973 |
+
"node_modules/outvariant": {
|
9974 |
+
"version": "1.4.2",
|
9975 |
+
"resolved": "https://registry.npmjs.org/outvariant/-/outvariant-1.4.2.tgz",
|
9976 |
+
"integrity": "sha512-Ou3dJ6bA/UJ5GVHxah4LnqDwZRwAmWxrG3wtrHrbGnP4RnLCtA64A4F+ae7Y8ww660JaddSoArUR5HjipWSHAQ=="
|
9977 |
+
},
|
9978 |
"node_modules/p-finally": {
|
9979 |
"version": "1.0.0",
|
9980 |
"resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz",
|
|
|
10112 |
"node": "14 || >=16.14"
|
10113 |
}
|
10114 |
},
|
10115 |
+
"node_modules/path-to-regexp": {
|
10116 |
+
"version": "6.2.2",
|
10117 |
+
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-6.2.2.tgz",
|
10118 |
+
"integrity": "sha512-GQX3SSMokngb36+whdpRXE+3f9V8UzyAorlYvOGx87ufGHehNTn5lCxrKtLyZ4Yl/wEKnNnr98ZzOwwDZV5ogw=="
|
10119 |
+
},
|
10120 |
"node_modules/path-type": {
|
10121 |
"version": "4.0.0",
|
10122 |
"resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz",
|
|
|
10393 |
"react-is": "^16.13.1"
|
10394 |
}
|
10395 |
},
|
|
|
|
|
|
|
|
|
|
|
10396 |
"node_modules/punycode": {
|
10397 |
"version": "2.3.1",
|
10398 |
"resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
|
|
|
10844 |
"readable-stream": ">=4.0.0"
|
10845 |
}
|
10846 |
},
|
10847 |
+
"node_modules/require-directory": {
|
10848 |
+
"version": "2.1.1",
|
10849 |
+
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
10850 |
+
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
|
10851 |
+
"engines": {
|
10852 |
+
"node": ">=0.10.0"
|
10853 |
+
}
|
10854 |
+
},
|
10855 |
"node_modules/require-from-string": {
|
10856 |
"version": "2.0.2",
|
10857 |
"resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
|
|
|
11056 |
"loose-envify": "^1.1.0"
|
11057 |
}
|
11058 |
},
|
11059 |
+
"node_modules/semiver": {
|
11060 |
+
"version": "1.1.0",
|
11061 |
+
"resolved": "https://registry.npmjs.org/semiver/-/semiver-1.1.0.tgz",
|
11062 |
+
"integrity": "sha512-QNI2ChmuioGC1/xjyYwyZYADILWyW6AmS1UH6gDj/SFUUUS4MBAWs/7mxnkRPc/F4iHezDP+O8t0dO8WHiEOdg==",
|
11063 |
+
"engines": {
|
11064 |
+
"node": ">=6"
|
11065 |
+
}
|
11066 |
+
},
|
11067 |
"node_modules/semver": {
|
11068 |
"version": "7.6.2",
|
11069 |
"resolved": "https://registry.npmjs.org/semver/-/semver-7.6.2.tgz",
|
|
|
11285 |
"resolved": "https://registry.npmjs.org/stats.js/-/stats.js-0.17.0.tgz",
|
11286 |
"integrity": "sha512-hNKz8phvYLPEcRkeG1rsGmV5ChMjKDAWU7/OJJdDErPBNChQXxCo3WZurGpnWc6gZhAzEPFad1aVgyOANH1sMw=="
|
11287 |
},
|
11288 |
+
"node_modules/statuses": {
|
11289 |
+
"version": "2.0.1",
|
11290 |
+
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
|
11291 |
+
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
|
11292 |
+
"engines": {
|
11293 |
+
"node": ">= 0.8"
|
11294 |
+
}
|
11295 |
+
},
|
11296 |
"node_modules/stdin-discarder": {
|
11297 |
"version": "0.2.2",
|
11298 |
"resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz",
|
|
|
11312 |
"node": ">=10.0.0"
|
11313 |
}
|
11314 |
},
|
11315 |
+
"node_modules/strict-event-emitter": {
|
11316 |
+
"version": "0.5.1",
|
11317 |
+
"resolved": "https://registry.npmjs.org/strict-event-emitter/-/strict-event-emitter-0.5.1.tgz",
|
11318 |
+
"integrity": "sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ=="
|
11319 |
+
},
|
11320 |
"node_modules/string_decoder": {
|
11321 |
"version": "1.3.0",
|
11322 |
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
|
|
|
11682 |
"resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
|
11683 |
"integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw=="
|
11684 |
},
|
11685 |
+
"node_modules/textlinestream": {
|
11686 |
+
"version": "1.1.1",
|
11687 |
+
"resolved": "https://registry.npmjs.org/textlinestream/-/textlinestream-1.1.1.tgz",
|
11688 |
+
"integrity": "sha512-iBHbi7BQxrFmwZUQJsT0SjNzlLLsXhvW/kg7EyOMVMBIrlnj/qYofwo1LVLZi+3GbUEo96Iu2eqToI2+lZoAEQ=="
|
11689 |
+
},
|
11690 |
"node_modules/thenify": {
|
11691 |
"version": "3.3.1",
|
11692 |
"resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz",
|
|
|
12517 |
"resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
|
12518 |
"integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="
|
12519 |
},
|
12520 |
+
"node_modules/ws": {
|
12521 |
+
"version": "8.17.1",
|
12522 |
+
"resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz",
|
12523 |
+
"integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==",
|
12524 |
+
"engines": {
|
12525 |
+
"node": ">=10.0.0"
|
12526 |
+
},
|
12527 |
+
"peerDependencies": {
|
12528 |
+
"bufferutil": "^4.0.1",
|
12529 |
+
"utf-8-validate": ">=5.0.2"
|
12530 |
+
},
|
12531 |
+
"peerDependenciesMeta": {
|
12532 |
+
"bufferutil": {
|
12533 |
+
"optional": true
|
12534 |
+
},
|
12535 |
+
"utf-8-validate": {
|
12536 |
+
"optional": true
|
12537 |
+
}
|
12538 |
+
}
|
12539 |
+
},
|
12540 |
+
"node_modules/y18n": {
|
12541 |
+
"version": "5.0.8",
|
12542 |
+
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
|
12543 |
+
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
|
12544 |
+
"engines": {
|
12545 |
+
"node": ">=10"
|
12546 |
+
}
|
12547 |
+
},
|
12548 |
"node_modules/yallist": {
|
12549 |
"version": "4.0.0",
|
12550 |
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
|
|
|
12561 |
"node": ">= 14"
|
12562 |
}
|
12563 |
},
|
12564 |
+
"node_modules/yargs": {
|
12565 |
+
"version": "17.7.2",
|
12566 |
+
"resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
|
12567 |
+
"integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
|
12568 |
+
"dependencies": {
|
12569 |
+
"cliui": "^8.0.1",
|
12570 |
+
"escalade": "^3.1.1",
|
12571 |
+
"get-caller-file": "^2.0.5",
|
12572 |
+
"require-directory": "^2.1.1",
|
12573 |
+
"string-width": "^4.2.3",
|
12574 |
+
"y18n": "^5.0.5",
|
12575 |
+
"yargs-parser": "^21.1.1"
|
12576 |
+
},
|
12577 |
+
"engines": {
|
12578 |
+
"node": ">=12"
|
12579 |
+
}
|
12580 |
+
},
|
12581 |
+
"node_modules/yargs-parser": {
|
12582 |
+
"version": "21.1.1",
|
12583 |
+
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
|
12584 |
+
"integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
|
12585 |
+
"engines": {
|
12586 |
+
"node": ">=12"
|
12587 |
+
}
|
12588 |
+
},
|
12589 |
+
"node_modules/yargs/node_modules/emoji-regex": {
|
12590 |
+
"version": "8.0.0",
|
12591 |
+
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
12592 |
+
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
12593 |
+
},
|
12594 |
+
"node_modules/yargs/node_modules/string-width": {
|
12595 |
+
"version": "4.2.3",
|
12596 |
+
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
|
12597 |
+
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
|
12598 |
+
"dependencies": {
|
12599 |
+
"emoji-regex": "^8.0.0",
|
12600 |
+
"is-fullwidth-code-point": "^3.0.0",
|
12601 |
+
"strip-ansi": "^6.0.1"
|
12602 |
+
},
|
12603 |
+
"engines": {
|
12604 |
+
"node": ">=8"
|
12605 |
+
}
|
12606 |
+
},
|
12607 |
"node_modules/yn": {
|
12608 |
"version": "3.1.1",
|
12609 |
"resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
|
|
|
12637 |
}
|
12638 |
},
|
12639 |
"node_modules/zod-to-json-schema": {
|
12640 |
+
"version": "3.23.1",
|
12641 |
+
"resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.23.1.tgz",
|
12642 |
+
"integrity": "sha512-oT9INvydob1XV0v1d2IadrR74rLtDInLvDFfAa1CG0Pmg/vxATk7I2gSelfj271mbzeM4Da0uuDQE/Nkj3DWNw==",
|
12643 |
"peerDependencies": {
|
12644 |
"zod": "^3.23.3"
|
12645 |
}
|
package.json
CHANGED
@@ -12,8 +12,9 @@
|
|
12 |
"dependencies": {
|
13 |
"@aitube/clap": "0.0.27",
|
14 |
"@aitube/engine": "0.0.24",
|
15 |
-
"@aitube/timeline": "0.0.
|
16 |
"@fal-ai/serverless-client": "^0.10.3",
|
|
|
17 |
"@huggingface/hub": "^0.15.1",
|
18 |
"@huggingface/inference": "^2.7.0",
|
19 |
"@langchain/anthropic": "^0.2.0",
|
@@ -58,14 +59,12 @@
|
|
58 |
"@upstash/ratelimit": "^1.1.3",
|
59 |
"@upstash/redis": "^1.31.1",
|
60 |
"autoprefixer": "10.4.17",
|
61 |
-
"axios": "^1.7.2",
|
62 |
"class-variance-authority": "^0.7.0",
|
63 |
"clsx": "^2.1.0",
|
64 |
"cmdk": "^0.2.1",
|
65 |
"eslint": "8.57.0",
|
66 |
"eslint-config-next": "14.1.0",
|
67 |
"fluent-ffmpeg": "^2.1.3",
|
68 |
-
"form-data": "^4.0.0",
|
69 |
"fs-extra": "^11.2.0",
|
70 |
"lucide-react": "^0.334.0",
|
71 |
"mlt-xml": "^2.0.2",
|
|
|
12 |
"dependencies": {
|
13 |
"@aitube/clap": "0.0.27",
|
14 |
"@aitube/engine": "0.0.24",
|
15 |
+
"@aitube/timeline": "0.0.29",
|
16 |
"@fal-ai/serverless-client": "^0.10.3",
|
17 |
+
"@gradio/client": "^1.1.1",
|
18 |
"@huggingface/hub": "^0.15.1",
|
19 |
"@huggingface/inference": "^2.7.0",
|
20 |
"@langchain/anthropic": "^0.2.0",
|
|
|
59 |
"@upstash/ratelimit": "^1.1.3",
|
60 |
"@upstash/redis": "^1.31.1",
|
61 |
"autoprefixer": "10.4.17",
|
|
|
62 |
"class-variance-authority": "^0.7.0",
|
63 |
"clsx": "^2.1.0",
|
64 |
"cmdk": "^0.2.1",
|
65 |
"eslint": "8.57.0",
|
66 |
"eslint-config-next": "14.1.0",
|
67 |
"fluent-ffmpeg": "^2.1.3",
|
|
|
68 |
"fs-extra": "^11.2.0",
|
69 |
"lucide-react": "^0.334.0",
|
70 |
"mlt-xml": "^2.0.2",
|
src/app/api/resolve/providers/falai/index.ts
CHANGED
@@ -1,10 +1,8 @@
|
|
1 |
import * as fal from '@fal-ai/serverless-client'
|
2 |
|
3 |
import { FalAiImageSize, ResolveRequest } from "@/types"
|
4 |
-
import { ClapMediaOrientation, ClapSegment, ClapSegmentCategory
|
5 |
-
import { decodeOutput } from '@/lib/utils/decodeOutput'
|
6 |
import { FalAiAudioResponse, FalAiImageResponse, FalAiSpeechResponse, FalAiVideoResponse } from './types'
|
7 |
-
import { getResolveRequestPrompts } from '@/lib/utils/getResolveRequestPrompts'
|
8 |
|
9 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
10 |
if (!request.settings.falAiApiKey) {
|
@@ -15,161 +13,146 @@ export async function resolveSegment(request: ResolveRequest): Promise<ClapSegme
|
|
15 |
credentials: request.settings.falAiApiKey
|
16 |
})
|
17 |
|
18 |
-
const segment
|
19 |
|
20 |
-
let content = ''
|
21 |
|
22 |
-
|
|
|
|
|
|
|
23 |
|
24 |
-
try {
|
25 |
-
|
26 |
-
// for doc see:
|
27 |
-
// https://fal.ai/models/fal-ai/fast-sdxl/api
|
28 |
-
|
29 |
-
if (request.segment.category === ClapSegmentCategory.STORYBOARD) {
|
30 |
-
|
31 |
-
|
32 |
-
if (!prompts.positivePrompt) {
|
33 |
-
console.error(`resolveSegment: cannot resolve a storyboard with an empty prompt`)
|
34 |
-
return segment
|
35 |
-
}
|
36 |
-
|
37 |
-
const imageSize =
|
38 |
-
request.meta.orientation === ClapMediaOrientation.SQUARE
|
39 |
-
? FalAiImageSize.SQUARE_HD
|
40 |
-
: request.meta.orientation === ClapMediaOrientation.PORTRAIT
|
41 |
-
? FalAiImageSize.PORTRAIT_16_9
|
42 |
-
: FalAiImageSize.LANDSCAPE_16_9
|
43 |
-
|
44 |
-
let result: FalAiImageResponse | undefined = undefined
|
45 |
-
|
46 |
-
if (request.settings.falAiModelForImage === "fal-ai/pulid") {
|
47 |
-
if (!request.mainCharacterEntity?.imageId) {
|
48 |
-
// throw new Error(`you selected model ${request.settings.falAiModelForImage}, but no character was found, so skipping`)
|
49 |
-
// console.log(`warning: user selected model ${request.settings.falAiModelForImage}, but no character was found. Falling back to fal-ai/fast-sdxl`)
|
50 |
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
if (request.settings.falAiModelForImage === "fal-ai/pulid") {
|
57 |
-
result = await fal.run(request.settings.falAiModelForImage, {
|
58 |
-
input: {
|
59 |
-
reference_images: [{
|
60 |
-
image_url: request.mainCharacterEntity?.imageId
|
61 |
-
}],
|
62 |
-
image_size: imageSize,
|
63 |
-
num_images: 1,
|
64 |
-
sync_mode: true,
|
65 |
-
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
66 |
-
},
|
67 |
-
}) as FalAiImageResponse
|
68 |
-
|
69 |
-
} else {
|
70 |
-
result = await fal.run(request.settings.falAiModelForImage, {
|
71 |
-
input: {
|
72 |
-
prompt: prompts.positivePrompt,
|
73 |
-
image_size: imageSize,
|
74 |
-
sync_mode: true,
|
75 |
-
num_inference_steps:
|
76 |
-
request.settings.falAiModelForImage === "fal-ai/stable-diffusion-v3-medium"
|
77 |
-
? 40
|
78 |
-
: 25,
|
79 |
-
num_images: 1,
|
80 |
-
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
81 |
-
},
|
82 |
-
}) as FalAiImageResponse
|
83 |
-
}
|
84 |
-
|
85 |
-
|
86 |
-
if (request.settings.censorNotForAllAudiencesContent) {
|
87 |
-
if (result.has_nsfw_concepts.includes(true)) {
|
88 |
-
throw new Error(`The generated content has been filtered according to your safety settings`)
|
89 |
-
}
|
90 |
-
}
|
91 |
-
|
92 |
-
content = result.images[0]?.url
|
93 |
-
} else if (request.segment.category === ClapSegmentCategory.VIDEO) {
|
94 |
|
95 |
-
|
96 |
-
|
97 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
98 |
}
|
|
|
99 |
|
100 |
-
|
101 |
-
|
102 |
-
throw new Error(`cannot generate a video without a storyboard (the concept of Clapper is to use storyboards)`)
|
103 |
-
}
|
104 |
-
const result = await fal.run(request.settings.falAiModelForVideo, {
|
105 |
input: {
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
// will be added to the conditioning frame. The higher the number,
|
112 |
-
// the more noise there will be, and the less the video will look
|
113 |
-
// like the initial image. Increase it for more motion.
|
114 |
-
// Default value: 0.02
|
115 |
-
cond_aug: 0.02,
|
116 |
-
|
117 |
sync_mode: true,
|
118 |
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
119 |
},
|
120 |
-
}) as
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
throw new Error(`The generated content has been filtered according to your safety settings`)
|
125 |
-
}
|
126 |
-
}
|
127 |
-
|
128 |
-
content = result?.video?.url
|
129 |
-
} else if (
|
130 |
-
request.segment.category === ClapSegmentCategory.SOUND
|
131 |
-
||
|
132 |
-
request.segment.category === ClapSegmentCategory.MUSIC
|
133 |
-
) {
|
134 |
-
const result = await fal.run(request.settings.falAiModelForSound, {
|
135 |
input: {
|
136 |
-
|
137 |
-
|
138 |
-
|
139 |
sync_mode: true,
|
|
|
|
|
|
|
|
|
|
|
140 |
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
141 |
},
|
142 |
-
}) as
|
|
|
143 |
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
|
149 |
-
|
150 |
-
text: request.segment.prompt,
|
151 |
|
152 |
-
|
153 |
-
|
154 |
|
155 |
-
|
156 |
-
|
157 |
-
|
158 |
-
|
159 |
|
160 |
-
|
161 |
-
|
162 |
-
throw new Error(`
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
163 |
}
|
164 |
|
165 |
-
segment.assetUrl =
|
166 |
-
|
167 |
-
segment.
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
173 |
}
|
174 |
|
175 |
return segment
|
|
|
1 |
import * as fal from '@fal-ai/serverless-client'
|
2 |
|
3 |
import { FalAiImageSize, ResolveRequest } from "@/types"
|
4 |
+
import { ClapMediaOrientation, ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
|
|
5 |
import { FalAiAudioResponse, FalAiImageResponse, FalAiSpeechResponse, FalAiVideoResponse } from './types'
|
|
|
6 |
|
7 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
8 |
if (!request.settings.falAiApiKey) {
|
|
|
13 |
credentials: request.settings.falAiApiKey
|
14 |
})
|
15 |
|
16 |
+
const segment = request.segment
|
17 |
|
|
|
18 |
|
19 |
+
// for doc see:
|
20 |
+
// https://fal.ai/models/fal-ai/fast-sdxl/api
|
21 |
+
|
22 |
+
if (request.segment.category === ClapSegmentCategory.STORYBOARD) {
|
23 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
24 |
|
25 |
+
if (!request.prompts.image.positive) {
|
26 |
+
console.error(`resolveSegment: cannot resolve a storyboard with an empty prompt`)
|
27 |
+
return segment
|
28 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
29 |
|
30 |
+
const imageSize =
|
31 |
+
request.meta.orientation === ClapMediaOrientation.SQUARE
|
32 |
+
? FalAiImageSize.SQUARE_HD
|
33 |
+
: request.meta.orientation === ClapMediaOrientation.PORTRAIT
|
34 |
+
? FalAiImageSize.PORTRAIT_16_9
|
35 |
+
: FalAiImageSize.LANDSCAPE_16_9
|
36 |
+
|
37 |
+
let result: FalAiImageResponse | undefined = undefined
|
38 |
+
|
39 |
+
if (request.settings.falAiModelForImage === "fal-ai/pulid") {
|
40 |
+
if (!request.prompts.image.identity) {
|
41 |
+
// throw new Error(`you selected model ${request.settings.falAiModelForImage}, but no character was found, so skipping`)
|
42 |
+
// console.log(`warning: user selected model ${request.settings.falAiModelForImage}, but no character was found. Falling back to fal-ai/fast-sdxl`)
|
43 |
+
|
44 |
+
// dirty fix to fallback to a non-face model
|
45 |
+
request.settings.falAiModelForImage = "fal-ai/fast-sdxl"
|
46 |
}
|
47 |
+
}
|
48 |
|
49 |
+
if (request.settings.falAiModelForImage === "fal-ai/pulid") {
|
50 |
+
result = await fal.run(request.settings.falAiModelForImage, {
|
|
|
|
|
|
|
51 |
input: {
|
52 |
+
reference_images: [{
|
53 |
+
image_url: request.prompts.image.identity
|
54 |
+
}],
|
55 |
+
image_size: imageSize,
|
56 |
+
num_images: 1,
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
sync_mode: true,
|
58 |
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
59 |
},
|
60 |
+
}) as FalAiImageResponse
|
61 |
+
|
62 |
+
} else {
|
63 |
+
result = await fal.run(request.settings.falAiModelForImage, {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
input: {
|
65 |
+
prompt: request.prompts.image.positive,
|
66 |
+
image_size: imageSize,
|
|
|
67 |
sync_mode: true,
|
68 |
+
num_inference_steps:
|
69 |
+
request.settings.falAiModelForImage === "fal-ai/stable-diffusion-v3-medium"
|
70 |
+
? 40
|
71 |
+
: 25,
|
72 |
+
num_images: 1,
|
73 |
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
74 |
},
|
75 |
+
}) as FalAiImageResponse
|
76 |
+
}
|
77 |
|
78 |
+
|
79 |
+
if (request.settings.censorNotForAllAudiencesContent) {
|
80 |
+
if (result.has_nsfw_concepts.includes(true)) {
|
81 |
+
throw new Error(`The generated content has been filtered according to your safety settings`)
|
82 |
+
}
|
83 |
+
}
|
|
|
84 |
|
85 |
+
segment.assetUrl = result.images[0]?.url || ""
|
86 |
+
} else if (request.segment.category === ClapSegmentCategory.VIDEO) {
|
87 |
|
88 |
+
// console.log(`request.settings.falAiModelForVideo = `, request.settings.falAiModelForVideo)
|
89 |
+
if (request.settings.falAiModelForVideo !== "fal-ai/stable-video") {
|
90 |
+
throw new Error(`only "fal-ai/stable-video" is supported by Clapper for the moment`)
|
91 |
+
}
|
92 |
|
93 |
+
const storyboard = request.segments.find(s => s.category === ClapSegmentCategory.STORYBOARD)
|
94 |
+
if (!storyboard) {
|
95 |
+
throw new Error(`cannot generate a video without a storyboard (the concept of Clapper is to use storyboards)`)
|
96 |
+
}
|
97 |
+
const result = await fal.run(request.settings.falAiModelForVideo, {
|
98 |
+
input: {
|
99 |
+
image_url: storyboard.assetUrl,
|
100 |
+
|
101 |
+
motion_bucket_id: 55,
|
102 |
+
|
103 |
+
// The conditoning augmentation determines the amount of noise that
|
104 |
+
// will be added to the conditioning frame. The higher the number,
|
105 |
+
// the more noise there will be, and the less the video will look
|
106 |
+
// like the initial image. Increase it for more motion.
|
107 |
+
// Default value: 0.02
|
108 |
+
cond_aug: 0.02,
|
109 |
+
|
110 |
+
sync_mode: true,
|
111 |
+
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
112 |
+
},
|
113 |
+
}) as FalAiVideoResponse
|
114 |
+
|
115 |
+
if (request.settings.censorNotForAllAudiencesContent) {
|
116 |
+
if (result.has_nsfw_concepts.includes(true)) {
|
117 |
+
throw new Error(`The generated content has been filtered according to your safety settings`)
|
118 |
+
}
|
119 |
}
|
120 |
|
121 |
+
segment.assetUrl = result?.video?.url || ""
|
122 |
+
} else if (
|
123 |
+
request.segment.category === ClapSegmentCategory.SOUND
|
124 |
+
||
|
125 |
+
request.segment.category === ClapSegmentCategory.MUSIC
|
126 |
+
) {
|
127 |
+
const result = await fal.run(request.settings.falAiModelForSound, {
|
128 |
+
input: {
|
129 |
+
// note how we use the *segment* prompt for music or sound
|
130 |
+
prompt: request.segment.prompt,
|
131 |
+
|
132 |
+
sync_mode: true,
|
133 |
+
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
134 |
+
},
|
135 |
+
}) as FalAiAudioResponse
|
136 |
+
|
137 |
+
segment.assetUrl = result?.audio_file?.url || ""
|
138 |
+
} else if (
|
139 |
+
request.segment.category === ClapSegmentCategory.DIALOGUE
|
140 |
+
) {
|
141 |
+
const result = await fal.run(request.settings.falAiModelForVoice, {
|
142 |
+
input: {
|
143 |
+
text: request.segment.prompt,
|
144 |
+
|
145 |
+
// todo use the entty audio id, if available
|
146 |
+
audio_url: "https://cdn.themetavoice.xyz/speakers/bria.mp3",
|
147 |
+
|
148 |
+
sync_mode: true,
|
149 |
+
enable_safety_checker: request.settings.censorNotForAllAudiencesContent
|
150 |
+
},
|
151 |
+
}) as FalAiSpeechResponse
|
152 |
+
|
153 |
+
segment.assetUrl = result?.audio_url?.url || ""
|
154 |
+
} else {
|
155 |
+
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Fal.ai". Please open a pull request with (working code) to solve this!`)
|
156 |
}
|
157 |
|
158 |
return segment
|
src/app/api/resolve/providers/gradio/index.ts
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
2 |
+
|
3 |
+
import { ResolveRequest } from "@/types"
|
4 |
+
import { callGradioApi } from "@/lib/hf/callGradioApi"
|
5 |
+
|
6 |
+
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
7 |
+
|
8 |
+
const segment = request.segment
|
9 |
+
|
10 |
+
if (request.segment.category === ClapSegmentCategory.STORYBOARD) {
|
11 |
+
segment.assetUrl = await callGradioApi({
|
12 |
+
url: request.settings.gradioApiUrlForImage,
|
13 |
+
inputs: request.prompts.image,
|
14 |
+
})
|
15 |
+
} if (request.segment.category === ClapSegmentCategory.DIALOGUE) {
|
16 |
+
segment.assetUrl = await callGradioApi({
|
17 |
+
url: request.settings.gradioApiUrlForVoice,
|
18 |
+
inputs: request.prompts.voice,
|
19 |
+
})
|
20 |
+
} if (request.segment.category === ClapSegmentCategory.VIDEO) {
|
21 |
+
segment.assetUrl = await callGradioApi({
|
22 |
+
url: request.settings.gradioApiUrlForVideo,
|
23 |
+
inputs: request.prompts.video,
|
24 |
+
})
|
25 |
+
} else {
|
26 |
+
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Gradio". Please open a pull request with (working code) to solve this!`)
|
27 |
+
}
|
28 |
+
|
29 |
+
return segment
|
30 |
+
}
|
src/app/api/resolve/providers/huggingface/generateImage.ts
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
|
2 |
+
|
3 |
+
import { decodeOutput } from "@/lib/utils/decodeOutput"
|
4 |
+
import { ResolveRequest } from "@/types"
|
5 |
+
|
6 |
+
export async function generateImage(request: ResolveRequest): Promise<string> {
|
7 |
+
|
8 |
+
if (!request.settings.huggingFaceModelForImage) {
|
9 |
+
throw new Error(`HuggingFace.generateImage: cannot generate without a valid huggingFaceModelForImage`)
|
10 |
+
}
|
11 |
+
|
12 |
+
if (!request.prompts.image.positive) {
|
13 |
+
throw new Error(`HuggingFace.generateImage: cannot generate without a valid positive image prompt`)
|
14 |
+
}
|
15 |
+
|
16 |
+
if (!request.settings.huggingFaceApiKey) {
|
17 |
+
throw new Error(`HuggingFace.generateImage: cannot generate without a valid huggingFaceApiKey`)
|
18 |
+
}
|
19 |
+
|
20 |
+
const hf: HfInferenceEndpoint = new HfInference(request.settings.huggingFaceApiKey)
|
21 |
+
|
22 |
+
const blob: Blob = await hf.textToImage({
|
23 |
+
model: request.settings.huggingFaceModelForImage,
|
24 |
+
inputs: request.prompts.image.positive,
|
25 |
+
parameters: {
|
26 |
+
height: request.meta.height,
|
27 |
+
width: request.meta.width,
|
28 |
+
/**
|
29 |
+
* The number of denoising steps. More denoising steps usually lead to a higher quality image at the expense of slower inference.
|
30 |
+
*/
|
31 |
+
// num_inference_steps?: number;
|
32 |
+
/**
|
33 |
+
* Guidance scale: Higher guidance scale encourages to generate images that are closely linked to the text `prompt`, usually at the expense of lower image quality.
|
34 |
+
*/
|
35 |
+
// guidance_scale?: number;
|
36 |
+
}
|
37 |
+
})
|
38 |
+
|
39 |
+
console.log("output from Hugging Face Inference API:", blob)
|
40 |
+
|
41 |
+
throw new Error(`finish me`)
|
42 |
+
}
|
src/app/api/resolve/providers/huggingface/generateVideo.ts
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { ResolveRequest } from "@/types"
|
2 |
+
import { callGradioApi } from "@/lib/hf/callGradioApi"
|
3 |
+
|
4 |
+
export async function generateVideo(request: ResolveRequest): Promise<string> {
|
5 |
+
|
6 |
+
if (!request.settings.huggingFaceModelForVideo) {
|
7 |
+
throw new Error(`HuggingFace.generateVideo: cannot generate without a valid huggingFaceModelForVideo`)
|
8 |
+
}
|
9 |
+
|
10 |
+
if (!request.prompts.video.image) {
|
11 |
+
throw new Error(`HuggingFace.generateVideo: cannot generate without a valid input image prompt`)
|
12 |
+
}
|
13 |
+
|
14 |
+
if (!request.settings.huggingFaceApiKey) {
|
15 |
+
throw new Error(`HuggingFace.generateVideo: cannot generate without a valid huggingFaceApiKey`)
|
16 |
+
}
|
17 |
+
|
18 |
+
// TODO pass a type to the template function
|
19 |
+
const output = await callGradioApi({
|
20 |
+
url: request.settings.huggingFaceModelForVideo,
|
21 |
+
inputs: request.prompts.video,
|
22 |
+
apiKey: request.settings.huggingFaceApiKey
|
23 |
+
})
|
24 |
+
|
25 |
+
console.log(`output from the Gradio API:`, output)
|
26 |
+
|
27 |
+
throw new Error(`please finish me`)
|
28 |
+
}
|
src/app/api/resolve/providers/huggingface/generateVoice.ts
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
|
2 |
+
|
3 |
+
import { ResolveRequest } from "@/types"
|
4 |
+
|
5 |
+
export async function generateVoice(request: ResolveRequest): Promise<string> {
|
6 |
+
|
7 |
+
if (!request.settings.huggingFaceModelForVoice) {
|
8 |
+
throw new Error(`HuggingFace.generateVoice: cannot generate without a valid huggingFaceModelForVoice`)
|
9 |
+
}
|
10 |
+
|
11 |
+
if (!request.prompts.voice.positive) {
|
12 |
+
throw new Error(`HuggingFace.generateVoice: cannot generate without a valid voice prompt`)
|
13 |
+
}
|
14 |
+
|
15 |
+
if (!request.settings.huggingFaceApiKey) {
|
16 |
+
throw new Error(`HuggingFace.generateVoice: cannot generate without a valid huggingFaceApiKey`)
|
17 |
+
}
|
18 |
+
|
19 |
+
const hf: HfInferenceEndpoint = new HfInference(request.settings.huggingFaceApiKey)
|
20 |
+
|
21 |
+
const blob: Blob = await hf.textToSpeech({
|
22 |
+
model: request.settings.huggingFaceModelForVoice,
|
23 |
+
inputs: request.prompts.voice.positive,
|
24 |
+
})
|
25 |
+
|
26 |
+
console.log("output from Hugging Face Inference API:", blob)
|
27 |
+
|
28 |
+
throw new Error(`finish me`)
|
29 |
+
}
|
src/app/api/resolve/providers/huggingface/index.ts
CHANGED
@@ -1,41 +1,30 @@
|
|
1 |
import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
|
2 |
|
3 |
import { ResolveRequest } from "@/types"
|
4 |
-
import { ClapSegment, ClapSegmentCategory
|
5 |
-
|
6 |
-
import {
|
|
|
|
|
7 |
|
8 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
9 |
|
10 |
if (!request.settings.huggingFaceApiKey) {
|
11 |
throw new Error(`Missing API key for "Hugging Face"`)
|
12 |
}
|
13 |
-
|
14 |
-
const hf: HfInferenceEndpoint = new HfInference(request.settings.huggingFaceApiKey)
|
15 |
|
16 |
-
|
17 |
-
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Hugging Face". Please open a pull request with (working code) to solve this!`)
|
18 |
-
}
|
19 |
-
|
20 |
-
const segment: ClapSegment = { ...request.segment }
|
21 |
|
22 |
-
const
|
23 |
-
|
24 |
-
try {
|
25 |
-
const blob: Blob = await hf.textToImage({
|
26 |
-
model: request.settings.huggingFaceModelForImage,
|
27 |
-
inputs: prompts.positivePrompt
|
28 |
-
})
|
29 |
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
segment.assetUrl =
|
36 |
-
|
37 |
-
segment.
|
38 |
}
|
39 |
-
|
40 |
return segment
|
41 |
}
|
|
|
1 |
import { HfInference, HfInferenceEndpoint } from "@huggingface/inference"
|
2 |
|
3 |
import { ResolveRequest } from "@/types"
|
4 |
+
import { ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
5 |
+
|
6 |
+
import { generateImage } from "./generateImage"
|
7 |
+
import { generateVoice } from "./generateVoice"
|
8 |
+
import { generateVideo } from "./generateVideo"
|
9 |
|
10 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
11 |
|
12 |
if (!request.settings.huggingFaceApiKey) {
|
13 |
throw new Error(`Missing API key for "Hugging Face"`)
|
14 |
}
|
|
|
|
|
15 |
|
16 |
+
const segment = request.segment
|
|
|
|
|
|
|
|
|
17 |
|
18 |
+
const hf: HfInferenceEndpoint = new HfInference(request.settings.huggingFaceApiKey)
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
|
20 |
+
if (request.segment.category === ClapSegmentCategory.STORYBOARD) {
|
21 |
+
segment.assetUrl = await generateImage(request)
|
22 |
+
} if (request.segment.category === ClapSegmentCategory.DIALOGUE) {
|
23 |
+
segment.assetUrl = await generateVoice(request)
|
24 |
+
} if (request.segment.category === ClapSegmentCategory.VIDEO) {
|
25 |
+
segment.assetUrl = await generateVideo(request)
|
26 |
+
} else {
|
27 |
+
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Hugging Face" with model (or space) "${request.settings.huggingFaceModelForVideo}". Please open a pull request with (working code) to solve this!`)
|
28 |
}
|
|
|
29 |
return segment
|
30 |
}
|
src/app/api/resolve/providers/replicate/index.ts
CHANGED
@@ -1,9 +1,8 @@
|
|
1 |
import Replicate from 'replicate'
|
2 |
|
|
|
|
|
3 |
import { ResolveRequest } from "@/types"
|
4 |
-
import { ClapSegment, ClapSegmentCategory, ClapSegmentStatus, getClapAssetSourceType } from "@aitube/clap"
|
5 |
-
import { getResolveRequestPrompts } from '@/lib/utils/getResolveRequestPrompts'
|
6 |
-
import { decodeOutput } from '@/lib/utils/decodeOutput'
|
7 |
|
8 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
9 |
if (!request.settings.replicateApiKey) {
|
@@ -15,26 +14,51 @@ export async function resolveSegment(request: ResolveRequest): Promise<ClapSegme
|
|
15 |
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Replicate". Please open a pull request with (working code) to solve this!`)
|
16 |
}
|
17 |
|
18 |
-
const segment
|
19 |
-
|
20 |
-
const prompts = getResolveRequestPrompts(request)
|
21 |
-
|
22 |
-
try {
|
23 |
|
24 |
-
|
25 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
26 |
input: {
|
27 |
-
|
|
|
28 |
}
|
29 |
-
})
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
|
|
|
|
|
|
|
38 |
}
|
39 |
|
40 |
return segment
|
|
|
1 |
import Replicate from 'replicate'
|
2 |
|
3 |
+
import { ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
4 |
+
|
5 |
import { ResolveRequest } from "@/types"
|
|
|
|
|
|
|
6 |
|
7 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
8 |
if (!request.settings.replicateApiKey) {
|
|
|
14 |
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Replicate". Please open a pull request with (working code) to solve this!`)
|
15 |
}
|
16 |
|
17 |
+
const segment = request.segment
|
|
|
|
|
|
|
|
|
18 |
|
19 |
+
// this mapping isn't great, we should use something auto-adapting
|
20 |
+
// like we are doing for Hugging Face (match the fields etc)
|
21 |
+
if (request.segment.category === ClapSegmentCategory.STORYBOARD) {
|
22 |
+
let params: object = {}
|
23 |
+
if (request.settings.replicateModelForImage === "fofr/pulid-lightning") {
|
24 |
+
params = {
|
25 |
+
prompt: request.prompts.image.positive,
|
26 |
+
face_image: request.prompts.image.identity,
|
27 |
+
}
|
28 |
+
} else if (request.settings.replicateModelForImage === "zsxkib/pulid") {
|
29 |
+
params = {
|
30 |
+
prompt: request.prompts.image.positive,
|
31 |
+
main_face_image: request.prompts.image.identity,
|
32 |
+
}
|
33 |
+
} else {
|
34 |
+
params = {
|
35 |
+
prompt: request.prompts.image.positive,
|
36 |
+
}
|
37 |
+
}
|
38 |
+
const response = await replicate.run(
|
39 |
+
request.settings.replicateModelForImage as any,
|
40 |
+
{ input: params }
|
41 |
+
) as any
|
42 |
+
segment.assetUrl = `${response.output || ""}`
|
43 |
+
} else if (request.segment.category === ClapSegmentCategory.DIALOGUE) {
|
44 |
+
const response = await replicate.run(
|
45 |
+
request.settings.replicateModelForVoice as any, {
|
46 |
input: {
|
47 |
+
text: request.prompts.voice.positive,
|
48 |
+
audio: request.prompts.voice.identity,
|
49 |
}
|
50 |
+
}) as any
|
51 |
+
segment.assetUrl = `${response.output || ""}`
|
52 |
+
} else if (request.segment.category === ClapSegmentCategory.VIDEO) {
|
53 |
+
const response = await replicate.run(
|
54 |
+
request.settings.replicateModelForVideo as any, {
|
55 |
+
input: {
|
56 |
+
image: request.prompts.video.image,
|
57 |
+
}
|
58 |
+
}) as any
|
59 |
+
segment.assetUrl = `${response.output || ""}`
|
60 |
+
} else {
|
61 |
+
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Replicate". Please open a pull request with (working code) to solve this!`)
|
62 |
}
|
63 |
|
64 |
return segment
|
src/app/api/resolve/providers/stabilityai/generateImage.ts
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { ClapMediaOrientation } from "@aitube/clap"
|
2 |
+
|
3 |
+
import { ResolveRequest, StabilityAiImageSize } from "@/types"
|
4 |
+
|
5 |
+
export async function generateImage(request: ResolveRequest): Promise<string> {
|
6 |
+
|
7 |
+
if (!request.settings.stabilityAiApiKey) {
|
8 |
+
throw new Error(`StabilityAI.generateImage: cannot generate without a valid stabilityAiApiKey`)
|
9 |
+
}
|
10 |
+
|
11 |
+
if (!request.settings.stabilityAiModelForImage) {
|
12 |
+
throw new Error(`StabilityAI.generateImage: cannot generate without a valid stabilityAiModelForImage`)
|
13 |
+
}
|
14 |
+
|
15 |
+
if (!request.prompts.image.positive) {
|
16 |
+
throw new Error(`StabilityAI.generateImage: cannot generate without a valid positive prompt`)
|
17 |
+
}
|
18 |
+
|
19 |
+
const aspectRatio =
|
20 |
+
request.meta.orientation === ClapMediaOrientation.SQUARE
|
21 |
+
? StabilityAiImageSize.SQUARE
|
22 |
+
: request.meta.orientation === ClapMediaOrientation.PORTRAIT
|
23 |
+
? StabilityAiImageSize.PORTRAIT_9_16
|
24 |
+
: StabilityAiImageSize.LANDSCAPE_16_9
|
25 |
+
|
26 |
+
|
27 |
+
// what's cool about the ultra model is its capacity to take in
|
28 |
+
// very large prompts, up to 10000 characters apparently?
|
29 |
+
|
30 |
+
// To control the weight of a given word use the format (word:weight),
|
31 |
+
// where word is the word you'd like to control the weight of and weight
|
32 |
+
// is a value between 0 and 1.
|
33 |
+
// For example: The sky was a crisp (blue:0.3) and (green:0.8) would
|
34 |
+
// convey a sky that was blue and green, but more green than blue.
|
35 |
+
|
36 |
+
const body = new FormData()
|
37 |
+
body.set("prompt", `${request.prompts.image.positive || ""}`)
|
38 |
+
body.set("output_format", "jpeg") // "png"
|
39 |
+
body.set("negative_prompt", `${request.prompts.image.negative || ""}`)
|
40 |
+
body.set("aspect_ratio", `${aspectRatio || ""}`)
|
41 |
+
|
42 |
+
const response = await fetch(`https://api.stability.ai/v2beta/${request.settings.stabilityAiModelForImage}`, {
|
43 |
+
method: "POST",
|
44 |
+
headers: {
|
45 |
+
Authorization: `Bearer ${request.settings.stabilityAiApiKey}`,
|
46 |
+
},
|
47 |
+
body,
|
48 |
+
cache: "no-store"
|
49 |
+
})
|
50 |
+
|
51 |
+
console.log("response:", response)
|
52 |
+
|
53 |
+
/*
|
54 |
+
if (response.status === 200) {
|
55 |
+
const buffer = Buffer.from(response.data)
|
56 |
+
const rawAssetUrl = `data:image/${payload.output_format};base64,${buffer.toString('base64')}`
|
57 |
+
return rawAssetUrl
|
58 |
+
} else {
|
59 |
+
throw new Error(`${response.status}: ${response.data.toString()}`);
|
60 |
+
}
|
61 |
+
*/
|
62 |
+
throw new Error("finish me")
|
63 |
+
}
|
src/app/api/resolve/providers/stabilityai/generateVideo.ts
ADDED
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { sleep } from "@/lib/utils/sleep"
|
2 |
+
import { ResolveRequest } from "@/types"
|
3 |
+
|
4 |
+
export async function generateVideo(request: ResolveRequest): Promise<string> {
|
5 |
+
|
6 |
+
|
7 |
+
if (!request.settings.stabilityAiApiKey) {
|
8 |
+
throw new Error(`StabilityAI.generateVideo: cannot generate without a valid stabilityAiApiKey`)
|
9 |
+
}
|
10 |
+
|
11 |
+
if (!request.settings.stabilityAiModelForVideo) {
|
12 |
+
throw new Error(`StabilityAI.generateVideo: cannot generate without a valid stabilityAiModelForVideo`)
|
13 |
+
}
|
14 |
+
|
15 |
+
|
16 |
+
if (!request.prompts.video.image) {
|
17 |
+
throw new Error(`StabilityAI.generateVideo: cannot generate without a valid image input`)
|
18 |
+
}
|
19 |
+
|
20 |
+
|
21 |
+
// what's cool about the ultra model is its capacity to take in
|
22 |
+
// very large prompts, up to 10000 characters apparently?
|
23 |
+
|
24 |
+
// To control the weight of a given word use the format (word:weight),
|
25 |
+
// where word is the word you'd like to control the weight of and weight
|
26 |
+
// is a value between 0 and 1.
|
27 |
+
// For example: The sky was a crisp (blue:0.3) and (green:0.8) would
|
28 |
+
// convey a sky that was blue and green, but more green than blue.
|
29 |
+
|
30 |
+
const body = new FormData()
|
31 |
+
|
32 |
+
// Supported Formats: jpeg, png
|
33 |
+
// Supported Dimensions: 1024x576, 576x1024, 768x768
|
34 |
+
|
35 |
+
// "Please ensure that the source image is in the correct format and dimensions"
|
36 |
+
body.set("image", `${request.prompts.video.image || ""}`)
|
37 |
+
|
38 |
+
const response = await fetch(`https://api.stability.ai/v2beta/image-to-video`, {
|
39 |
+
method: "POST",
|
40 |
+
headers: {
|
41 |
+
Authorization: `Bearer ${request.settings.stabilityAiApiKey}`,
|
42 |
+
},
|
43 |
+
body,
|
44 |
+
cache: "no-store"
|
45 |
+
}) as unknown as { data: { id: number } }
|
46 |
+
|
47 |
+
const generationId = response?.data?.id
|
48 |
+
if (!generationId) { throw new Error(`StabilityAI failed to give us a valid response.data.id`) }
|
49 |
+
|
50 |
+
console.log("Generation ID:", generationId);
|
51 |
+
|
52 |
+
|
53 |
+
let pollingCount = 0
|
54 |
+
do {
|
55 |
+
// This is normally a fast model, so let's check every 4 seconds
|
56 |
+
await sleep(10000)
|
57 |
+
|
58 |
+
const res = await fetch(`https://api.stability.ai/v2beta/image-to-video/result/${generationId}`, {
|
59 |
+
method: "GET",
|
60 |
+
headers: {
|
61 |
+
Authorization: `Bearer ${request.settings.stabilityAiApiKey}`,
|
62 |
+
Accept: "video/*", // Use 'application/json' to receive base64 encoded JSON
|
63 |
+
},
|
64 |
+
cache: "no-store"
|
65 |
+
});
|
66 |
+
|
67 |
+
if (res.status === 200) {
|
68 |
+
try {
|
69 |
+
const response = (await res.json()) as any
|
70 |
+
const errors = `${response?.errors || ""}`
|
71 |
+
if (errors) {
|
72 |
+
throw new Error(errors)
|
73 |
+
}
|
74 |
+
return response.output.pop()
|
75 |
+
} catch (err) {
|
76 |
+
console.error("res.json() error:", err)
|
77 |
+
}
|
78 |
+
}
|
79 |
+
|
80 |
+
pollingCount++
|
81 |
+
|
82 |
+
// To prevent indefinite polling, we can stop after a certain number
|
83 |
+
if (pollingCount >= 40) {
|
84 |
+
throw new Error('Request timed out.')
|
85 |
+
}
|
86 |
+
} while (true)
|
87 |
+
|
88 |
+
throw new Error("finish me")
|
89 |
+
}
|
src/app/api/resolve/providers/stabilityai/index.ts
CHANGED
@@ -1,74 +1,22 @@
|
|
|
|
1 |
|
2 |
-
import { ResolveRequest
|
3 |
-
import {
|
4 |
-
import { decodeOutput } from '@/lib/utils/decodeOutput'
|
5 |
-
import { getResolveRequestPrompts } from '@/lib/utils/getResolveRequestPrompts'
|
6 |
-
import { performRequest } from "./performRequest"
|
7 |
|
8 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
9 |
if (!request.settings.stabilityAiApiKey) {
|
10 |
throw new Error(`Missing API key for "Stability.ai"`)
|
11 |
}
|
12 |
|
|
|
13 |
|
14 |
-
|
15 |
-
|
16 |
-
let content = ''
|
17 |
-
|
18 |
-
const prompts = getResolveRequestPrompts(request)
|
19 |
-
|
20 |
-
try {
|
21 |
-
|
22 |
-
// for doc see:
|
23 |
-
// https://fal.ai/models/fal-ai/fast-sdxl/api
|
24 |
|
25 |
-
|
26 |
-
|
27 |
-
|
28 |
-
|
29 |
-
console.error(`resolveSegment: cannot resolve a storyboard with an empty prompt`)
|
30 |
-
return segment
|
31 |
-
}
|
32 |
-
|
33 |
-
const imageSize: StabilityAiImageSize =
|
34 |
-
request.meta.orientation === ClapMediaOrientation.SQUARE
|
35 |
-
? StabilityAiImageSize.SQUARE
|
36 |
-
: request.meta.orientation === ClapMediaOrientation.PORTRAIT
|
37 |
-
? StabilityAiImageSize.PORTRAIT_9_16
|
38 |
-
: StabilityAiImageSize.LANDSCAPE_16_9
|
39 |
-
|
40 |
-
const assetUrl = await performRequest({
|
41 |
-
modelName: request.settings.imageGenerationModel,
|
42 |
-
|
43 |
-
// what's cool about the ultra model is its capacity to take in
|
44 |
-
// very large prompts, up to 10000 characters apparently?
|
45 |
-
|
46 |
-
// To control the weight of a given word use the format (word:weight),
|
47 |
-
// where word is the word you'd like to control the weight of and weight
|
48 |
-
// is a value between 0 and 1.
|
49 |
-
// For example: The sky was a crisp (blue:0.3) and (green:0.8) would
|
50 |
-
// convey a sky that was blue and green, but more green than blue.
|
51 |
-
positivePrompt: prompts.positivePrompt,
|
52 |
-
negativePrompt: prompts.negativePrompt,
|
53 |
-
|
54 |
-
imageSize,
|
55 |
-
|
56 |
-
apiKey: request.settings.stabilityAiApiKey
|
57 |
-
})
|
58 |
-
|
59 |
-
content = assetUrl
|
60 |
-
} else {
|
61 |
-
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Stability.ai". Please open a pull request with (working code) to solve this!`)
|
62 |
-
}
|
63 |
-
|
64 |
-
segment.assetUrl = await decodeOutput(content)
|
65 |
-
segment.assetSourceType = getClapAssetSourceType(segment.assetUrl)
|
66 |
-
} catch (err) {
|
67 |
-
console.error(`failed to call Stability.ai: `, err)
|
68 |
-
segment.assetUrl = ''
|
69 |
-
segment.assetSourceType = getClapAssetSourceType(segment.assetUrl)
|
70 |
-
segment.status = ClapSegmentStatus.TO_GENERATE
|
71 |
}
|
72 |
-
|
73 |
return segment
|
74 |
}
|
|
|
1 |
+
import { ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
2 |
|
3 |
+
import { ResolveRequest } from "@/types"
|
4 |
+
import { generateImage } from "./generateImage"
|
|
|
|
|
|
|
5 |
|
6 |
export async function resolveSegment(request: ResolveRequest): Promise<ClapSegment> {
|
7 |
if (!request.settings.stabilityAiApiKey) {
|
8 |
throw new Error(`Missing API key for "Stability.ai"`)
|
9 |
}
|
10 |
|
11 |
+
const segment = request.segment
|
12 |
|
13 |
+
// for doc see:
|
14 |
+
// https://fal.ai/models/fal-ai/fast-sdxl/api
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
15 |
|
16 |
+
if (request.segment.category === ClapSegmentCategory.STORYBOARD) {
|
17 |
+
segment.assetUrl = await generateImage(request)
|
18 |
+
} else {
|
19 |
+
throw new Error(`Clapper doesn't support ${request.segment.category} generation for provider "Stability.ai". Please open a pull request with (working code) to solve this!`)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
}
|
|
|
21 |
return segment
|
22 |
}
|
src/app/api/resolve/providers/stabilityai/performRequest.ts
DELETED
@@ -1,64 +0,0 @@
|
|
1 |
-
|
2 |
-
import axios from "axios"
|
3 |
-
import FormData from "form-data"
|
4 |
-
|
5 |
-
import { decodeOutput } from "@/lib/utils/decodeOutput"
|
6 |
-
import { StabilityAiImageSize } from "@/types"
|
7 |
-
|
8 |
-
export async function performRequest({
|
9 |
-
positivePrompt,
|
10 |
-
negativePrompt,
|
11 |
-
modelName,
|
12 |
-
imageSize,
|
13 |
-
apiKey
|
14 |
-
}: {
|
15 |
-
positivePrompt: string
|
16 |
-
negativePrompt: string
|
17 |
-
modelName: string
|
18 |
-
imageSize: StabilityAiImageSize
|
19 |
-
apiKey: string
|
20 |
-
}): Promise<string> {
|
21 |
-
|
22 |
-
const payload = {
|
23 |
-
prompt: positivePrompt,
|
24 |
-
output_format: "jpeg", // "webp",
|
25 |
-
negative_prompt: negativePrompt,
|
26 |
-
aspect_ratio: imageSize,
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
// string (GenerationMode)
|
31 |
-
// Default: text-to-image
|
32 |
-
// Enum: image-to-image text-to-image
|
33 |
-
// Controls whether this is a text-to-image or image-to-image generation, which affects which parameters are required:
|
34 |
-
|
35 |
-
// text-to-image requires only the prompt parameter
|
36 |
-
// image-to-image requires the prompt, image, and strength parameters
|
37 |
-
// mode: "text-to-image",
|
38 |
-
|
39 |
-
// "stable-image/generate/sd3" supports this option:
|
40 |
-
// model: "", // the model to use ("SD3 Medium", "SD3 Large", or "SD3 Large Turbo")
|
41 |
-
};
|
42 |
-
|
43 |
-
const response = await axios.postForm(
|
44 |
-
`https://api.stability.ai/v2beta/${modelName}`,
|
45 |
-
axios.toFormData(payload, new FormData()),
|
46 |
-
{
|
47 |
-
validateStatus: undefined,
|
48 |
-
responseType: "arraybuffer",
|
49 |
-
headers: {
|
50 |
-
Authorization: `Bearer ${apiKey}`,
|
51 |
-
Accept: "image/*"
|
52 |
-
},
|
53 |
-
},
|
54 |
-
)
|
55 |
-
|
56 |
-
if (response.status === 200) {
|
57 |
-
const buffer = Buffer.from(response.data)
|
58 |
-
const rawAssetUrl = `data:image/${payload.output_format};base64,${buffer.toString('base64')}`
|
59 |
-
const assetUrl = await decodeOutput(rawAssetUrl)
|
60 |
-
return assetUrl
|
61 |
-
} else {
|
62 |
-
throw new Error(`${response.status}: ${response.data.toString()}`);
|
63 |
-
}
|
64 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/app/api/resolve/route.ts
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import { NextResponse, NextRequest } from "next/server"
|
2 |
-
import { ClapSegmentCategory } from "@aitube/clap"
|
3 |
|
4 |
import { resolveSegment as resolveSegmentUsingHuggingFace } from "./providers/huggingface"
|
5 |
import { resolveSegment as resolveSegmentUsingComfyReplicate } from "./providers/comfy-replicate"
|
@@ -10,6 +10,9 @@ import { resolveSegment as resolveSegmentUsingModelsLab } from "./providers/mode
|
|
10 |
import { resolveSegment as resolveSegmentUsingStabilityAi } from "./providers/stabilityai"
|
11 |
|
12 |
import { ComputeProvider, ResolveRequest } from "@/types"
|
|
|
|
|
|
|
13 |
|
14 |
export async function POST(req: NextRequest) {
|
15 |
// do we really need to secure it?
|
@@ -53,8 +56,53 @@ export async function POST(req: NextRequest) {
|
|
53 |
: null
|
54 |
|
55 |
if (!resolveSegment) { throw new Error(`Provider ${provider} is not supported yet`)}
|
56 |
-
|
57 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
58 |
|
59 |
return NextResponse.json(segment)
|
60 |
}
|
|
|
1 |
import { NextResponse, NextRequest } from "next/server"
|
2 |
+
import { ClapOutputType, ClapSegment, ClapSegmentCategory, ClapSegmentStatus, getClapAssetSourceType } from "@aitube/clap"
|
3 |
|
4 |
import { resolveSegment as resolveSegmentUsingHuggingFace } from "./providers/huggingface"
|
5 |
import { resolveSegment as resolveSegmentUsingComfyReplicate } from "./providers/comfy-replicate"
|
|
|
10 |
import { resolveSegment as resolveSegmentUsingStabilityAi } from "./providers/stabilityai"
|
11 |
|
12 |
import { ComputeProvider, ResolveRequest } from "@/types"
|
13 |
+
import { decodeOutput } from "@/lib/utils/decodeOutput"
|
14 |
+
import { getTypeAndExtension } from "@/lib/utils/getTypeAndExtension"
|
15 |
+
import { getMediaInfo } from "@/lib/ffmpeg/getMediaInfo"
|
16 |
|
17 |
export async function POST(req: NextRequest) {
|
18 |
// do we really need to secure it?
|
|
|
56 |
: null
|
57 |
|
58 |
if (!resolveSegment) { throw new Error(`Provider ${provider} is not supported yet`)}
|
59 |
+
|
60 |
+
let segment = request.segment
|
61 |
+
|
62 |
+
try {
|
63 |
+
segment = await resolveSegment(request)
|
64 |
+
|
65 |
+
// we clean-up and parse the output from all the resolvers:
|
66 |
+
// this will download files hosted on CDNs, convert WAV files to MP3 etc
|
67 |
+
|
68 |
+
segment.assetUrl = await decodeOutput(segment.assetUrl)
|
69 |
+
|
70 |
+
segment.assetSourceType = getClapAssetSourceType(segment.assetUrl)
|
71 |
+
|
72 |
+
segment.status = ClapSegmentStatus.COMPLETED
|
73 |
+
|
74 |
+
const { assetFileFormat, outputType } = getTypeAndExtension(segment.assetUrl)
|
75 |
+
|
76 |
+
segment.assetFileFormat = assetFileFormat
|
77 |
+
segment.outputType = outputType
|
78 |
+
|
79 |
+
if (segment.outputType === ClapOutputType.AUDIO
|
80 |
+
||
|
81 |
+
segment.outputType === ClapOutputType.VIDEO
|
82 |
+
) {
|
83 |
+
const { durationInMs, hasAudio } = await getMediaInfo(segment.assetUrl)
|
84 |
+
segment.assetDurationInMs = durationInMs
|
85 |
+
|
86 |
+
// hasAudio doesn't work properly I think, with small samples
|
87 |
+
segment.outputGain = hasAudio ? 1.0 : 0.0
|
88 |
+
|
89 |
+
/*
|
90 |
+
console.log(`DEBUG:`, {
|
91 |
+
durationInMs,
|
92 |
+
hasAudio,
|
93 |
+
"segment.assetDurationInMs": segment.assetDurationInMs,
|
94 |
+
"segment.outputGain": segment.outputGain,
|
95 |
+
})
|
96 |
+
*/
|
97 |
+
}
|
98 |
+
} catch (err) {
|
99 |
+
console.error(`failed to generate a segment: ${err}`)
|
100 |
+
segment.assetUrl = ''
|
101 |
+
segment.assetSourceType = getClapAssetSourceType(segment.assetUrl)
|
102 |
+
segment.assetDurationInMs = 0
|
103 |
+
segment.outputGain = 0
|
104 |
+
segment.status = ClapSegmentStatus.TO_GENERATE
|
105 |
+
}
|
106 |
|
107 |
return NextResponse.json(segment)
|
108 |
}
|
src/components/settings/constants.ts
CHANGED
@@ -20,12 +20,15 @@ export const computeProviderShortNames = {
|
|
20 |
[ComputeProvider.OPENAI]: "OpenAI",
|
21 |
[ComputeProvider.REPLICATE]: "Replicate",
|
22 |
[ComputeProvider.STABILITYAI]: "StabilityAI",
|
23 |
-
[ComputeProvider.MIDJOURNEY]: "Midjourney (no
|
24 |
[ComputeProvider.SUNO]: "Suno (no music API)",
|
25 |
[ComputeProvider.UDIO]: "Udio (no music API)",
|
26 |
-
[ComputeProvider.LUMALABS]: "
|
27 |
-
[ComputeProvider.KUAISHOU]: "KuaiShou (no
|
28 |
-
[ComputeProvider.RUNWAYML]: "RunwayML (no
|
|
|
|
|
|
|
29 |
}
|
30 |
|
31 |
// for developer sanity purposes, we only support LangChain for now.
|
@@ -56,6 +59,7 @@ export const availableComputeProvidersForImages = [
|
|
56 |
ComputeProvider.FALAI,
|
57 |
ComputeProvider.MODELSLAB,
|
58 |
ComputeProvider.MIDJOURNEY,
|
|
|
59 |
]
|
60 |
|
61 |
export const availableComputeProvidersForVideos = [
|
@@ -71,6 +75,8 @@ export const availableComputeProvidersForVideos = [
|
|
71 |
ComputeProvider.GOOGLE,
|
72 |
ComputeProvider.LUMALABS,
|
73 |
ComputeProvider.KUAISHOU,
|
|
|
|
|
74 |
]
|
75 |
|
76 |
export const availableComputeProvidersForMusic = [
|
@@ -208,7 +214,10 @@ export const availableModelsForImageGeneration: Partial<Record<ComputeProvider,
|
|
208 |
],
|
209 |
[ComputeProvider.MIDJOURNEY]: [
|
210 |
"(No public API)",
|
211 |
-
]
|
|
|
|
|
|
|
212 |
}
|
213 |
|
214 |
// should we call this upscaling or post-processing?
|
@@ -229,6 +238,16 @@ export const availableModelsForVideoGeneration: Partial<Record<ComputeProvider,
|
|
229 |
[ComputeProvider.STABILITYAI]: [
|
230 |
"image-to-video",
|
231 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
232 |
[ComputeProvider.OPENAI]: [
|
233 |
"Sora is unavailable (no public API)",
|
234 |
],
|
@@ -243,7 +262,13 @@ export const availableModelsForVideoGeneration: Partial<Record<ComputeProvider,
|
|
243 |
],
|
244 |
[ComputeProvider.KUAISHOU]: [
|
245 |
"Kling is unavailable (no public API)",
|
246 |
-
]
|
|
|
|
|
|
|
|
|
|
|
|
|
247 |
}
|
248 |
|
249 |
export const availableModelsForVideoUpscaling: Partial<Record<ComputeProvider, string[]>> = {
|
@@ -252,18 +277,50 @@ export const availableModelsForVideoUpscaling: Partial<Record<ComputeProvider, s
|
|
252 |
}
|
253 |
|
254 |
export const availableModelsForSoundGeneration: Partial<Record<ComputeProvider, string[]>> = {
|
|
|
|
|
|
|
|
|
|
|
255 |
[ComputeProvider.FALAI]: [
|
256 |
"fal-ai/stable-audio"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
257 |
]
|
258 |
}
|
259 |
|
260 |
export const availableModelsForVoiceGeneration: Partial<Record<ComputeProvider, string[]>> = {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
261 |
[ComputeProvider.FALAI]: [
|
262 |
"fal-ai/metavoice-v1"
|
|
|
|
|
|
|
|
|
|
|
|
|
263 |
]
|
264 |
}
|
265 |
|
266 |
export const availableModelsForMusicGeneration: Partial<Record<ComputeProvider, string[]>> = {
|
|
|
|
|
|
|
|
|
267 |
[ComputeProvider.FALAI]: [
|
268 |
"fal-ai/stable-audio"
|
269 |
],
|
|
|
20 |
[ComputeProvider.OPENAI]: "OpenAI",
|
21 |
[ComputeProvider.REPLICATE]: "Replicate",
|
22 |
[ComputeProvider.STABILITYAI]: "StabilityAI",
|
23 |
+
[ComputeProvider.MIDJOURNEY]: "Midjourney (no API)",
|
24 |
[ComputeProvider.SUNO]: "Suno (no music API)",
|
25 |
[ComputeProvider.UDIO]: "Udio (no music API)",
|
26 |
+
[ComputeProvider.LUMALABS]: "Luma: Dream Machine (no API)",
|
27 |
+
[ComputeProvider.KUAISHOU]: "KuaiShou: Kling (no API)",
|
28 |
+
[ComputeProvider.RUNWAYML]: "RunwayML: GEN-3 (no API)",
|
29 |
+
[ComputeProvider.HEDRA]: "Hedra: Character-1 (no API)",
|
30 |
+
[ComputeProvider.LEONARDOAI]: "Leonardo.ai (no API)",
|
31 |
+
[ComputeProvider.EVERARTAI]: "Everart.ai (no API)",
|
32 |
}
|
33 |
|
34 |
// for developer sanity purposes, we only support LangChain for now.
|
|
|
59 |
ComputeProvider.FALAI,
|
60 |
ComputeProvider.MODELSLAB,
|
61 |
ComputeProvider.MIDJOURNEY,
|
62 |
+
ComputeProvider.EVERARTAI,
|
63 |
]
|
64 |
|
65 |
export const availableComputeProvidersForVideos = [
|
|
|
75 |
ComputeProvider.GOOGLE,
|
76 |
ComputeProvider.LUMALABS,
|
77 |
ComputeProvider.KUAISHOU,
|
78 |
+
ComputeProvider.HEDRA,
|
79 |
+
ComputeProvider.LEONARDOAI,
|
80 |
]
|
81 |
|
82 |
export const availableComputeProvidersForMusic = [
|
|
|
214 |
],
|
215 |
[ComputeProvider.MIDJOURNEY]: [
|
216 |
"(No public API)",
|
217 |
+
],
|
218 |
+
[ComputeProvider.EVERARTAI]: [
|
219 |
+
"(No public API)",
|
220 |
+
],
|
221 |
}
|
222 |
|
223 |
// should we call this upscaling or post-processing?
|
|
|
238 |
[ComputeProvider.STABILITYAI]: [
|
239 |
"image-to-video",
|
240 |
],
|
241 |
+
[ComputeProvider.HUGGINGFACE]: [
|
242 |
+
"spaces/hpcai-tech/open-sora",
|
243 |
+
"spaces/multimodalart/hallo" // supports audio input
|
244 |
+
],
|
245 |
+
[ComputeProvider.REPLICATE]: [
|
246 |
+
// note: we need a model that accepts cinematic ratios
|
247 |
+
// "camenduru/open-sora"
|
248 |
+
|
249 |
+
"anotherjesse/zeroscope-v2-xl",
|
250 |
+
],
|
251 |
[ComputeProvider.OPENAI]: [
|
252 |
"Sora is unavailable (no public API)",
|
253 |
],
|
|
|
262 |
],
|
263 |
[ComputeProvider.KUAISHOU]: [
|
264 |
"Kling is unavailable (no public API)",
|
265 |
+
],
|
266 |
+
[ComputeProvider.HEDRA]: [
|
267 |
+
"Hedra is unavailable (no public API)",
|
268 |
+
],
|
269 |
+
[ComputeProvider.LEONARDOAI]: [
|
270 |
+
"Leonardo.ai is unavailable (no public API)",
|
271 |
+
],
|
272 |
}
|
273 |
|
274 |
export const availableModelsForVideoUpscaling: Partial<Record<ComputeProvider, string[]>> = {
|
|
|
277 |
}
|
278 |
|
279 |
export const availableModelsForSoundGeneration: Partial<Record<ComputeProvider, string[]>> = {
|
280 |
+
[ComputeProvider.HUGGINGFACE]: [
|
281 |
+
"cvssp/audioldm2",
|
282 |
+
"cvssp/audioldm2-large",
|
283 |
+
"cvssp/audioldm"
|
284 |
+
],
|
285 |
[ComputeProvider.FALAI]: [
|
286 |
"fal-ai/stable-audio"
|
287 |
+
],
|
288 |
+
[ComputeProvider.ELEVENLABS]: [
|
289 |
+
"v1/sound-generation"
|
290 |
+
],
|
291 |
+
[ComputeProvider.REPLICATE]: [
|
292 |
+
"declare-lab/tango",
|
293 |
+
"suno-ai/bark",
|
294 |
+
"sepal/audiogen"
|
295 |
]
|
296 |
}
|
297 |
|
298 |
export const availableModelsForVoiceGeneration: Partial<Record<ComputeProvider, string[]>> = {
|
299 |
+
[ComputeProvider.HUGGINGFACE]: [
|
300 |
+
"coqui/XTTS-v2",
|
301 |
+
"myshell-ai/OpenVoiceV2",
|
302 |
+
"myshell-ai/OpenVoice",
|
303 |
+
"WhisperSpeech/WhisperSpeech",
|
304 |
+
"metavoiceio/metavoice-1B-v0.1",
|
305 |
+
"parler-tts/parler_tts_mini_v0.1",
|
306 |
+
"parler-tts/parler-tts-mini-expresso"
|
307 |
+
],
|
308 |
[ComputeProvider.FALAI]: [
|
309 |
"fal-ai/metavoice-v1"
|
310 |
+
],
|
311 |
+
[ComputeProvider.REPLICATE]: [
|
312 |
+
"chenxwh/openvoice"
|
313 |
+
],
|
314 |
+
[ComputeProvider.ELEVENLABS]: [
|
315 |
+
"v1/text-to-speech"
|
316 |
]
|
317 |
}
|
318 |
|
319 |
export const availableModelsForMusicGeneration: Partial<Record<ComputeProvider, string[]>> = {
|
320 |
+
[ComputeProvider.HUGGINGFACE]: [
|
321 |
+
"cvssp/audioldm2-music",
|
322 |
+
"facebook/musicgen-large",
|
323 |
+
],
|
324 |
[ComputeProvider.FALAI]: [
|
325 |
"fal-ai/stable-audio"
|
326 |
],
|
src/components/toolbars/top-menu/lists/hasNoPublicAPI.ts
CHANGED
@@ -16,6 +16,7 @@ export function hasNoPublicAPI(model: string) {
|
|
16 |
model === ComputeProvider.KUAISHOU ||
|
17 |
model === ComputeProvider.RUNWAYML ||
|
18 |
model === ComputeProvider.LUMALABS ||
|
|
|
19 |
model === ComputeProvider.UDIO
|
20 |
) {
|
21 |
return true
|
|
|
16 |
model === ComputeProvider.KUAISHOU ||
|
17 |
model === ComputeProvider.RUNWAYML ||
|
18 |
model === ComputeProvider.LUMALABS ||
|
19 |
+
model === ComputeProvider.HEDRA ||
|
20 |
model === ComputeProvider.UDIO
|
21 |
) {
|
22 |
return true
|
src/controllers/audio/analyzeAudio.ts
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
-
import { DEFAULT_DURATION_IN_MS_PER_STEP } from "@aitube/timeline"
|
2 |
|
3 |
-
import { getAudioBuffer } from "./getAudioBuffer"
|
4 |
import { AudioAnalysis } from "./types"
|
5 |
import { detectBPM } from "./detectBPM"
|
6 |
|
|
|
1 |
+
import { DEFAULT_DURATION_IN_MS_PER_STEP, getAudioBuffer } from "@aitube/timeline"
|
2 |
|
|
|
3 |
import { AudioAnalysis } from "./types"
|
4 |
import { detectBPM } from "./detectBPM"
|
5 |
|
src/controllers/audio/getAudioBuffer.ts
DELETED
@@ -1,15 +0,0 @@
|
|
1 |
-
import { readFileAsArrayBuffer } from "./readFileAsArrayBuffer"
|
2 |
-
|
3 |
-
export async function getAudioBuffer(file: File): Promise<AudioBuffer> {
|
4 |
-
const audioContext = new AudioContext() // initialize AudioContext
|
5 |
-
const arrayBuffer = await readFileAsArrayBuffer(file)
|
6 |
-
|
7 |
-
// decode audio data from your arrayBuffer
|
8 |
-
return new Promise((resolve, reject) => {
|
9 |
-
audioContext.decodeAudioData(arrayBuffer, (buffer) => {
|
10 |
-
resolve(buffer)
|
11 |
-
}, (err) => {
|
12 |
-
reject(err)
|
13 |
-
})
|
14 |
-
})
|
15 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/controllers/audio/readFileAsArrayBuffer.ts
DELETED
@@ -1,16 +0,0 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
export async function readFileAsArrayBuffer(file: File): Promise<ArrayBuffer> {
|
4 |
-
return new Promise((resolve, reject) => {
|
5 |
-
let reader = new FileReader();
|
6 |
-
reader.onload = () => {
|
7 |
-
// when the reader has loaded, resolve the Promise with the result
|
8 |
-
resolve(reader.result as ArrayBuffer);
|
9 |
-
};
|
10 |
-
reader.onerror = (error) => {
|
11 |
-
// if there's an error, reject the Promise with the error
|
12 |
-
reject(error);
|
13 |
-
};
|
14 |
-
reader.readAsArrayBuffer(file);
|
15 |
-
});
|
16 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/controllers/audio/startAudioSourceNode.ts
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
import { UUID } from "@aitube/clap"
|
2 |
-
|
3 |
-
import { RuntimeSegment } from "@/types"
|
4 |
|
5 |
import { CurrentlyPlayingAudioSource } from "./types"
|
6 |
|
|
|
1 |
import { UUID } from "@aitube/clap"
|
2 |
+
import { RuntimeSegment } from "@aitube/timeline"
|
|
|
3 |
|
4 |
import { CurrentlyPlayingAudioSource } from "./types"
|
5 |
|
src/controllers/audio/types.ts
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
import { RuntimeSegment } from "
|
2 |
|
3 |
export type AudioAnalysis = {
|
4 |
audioBuffer: AudioBuffer
|
|
|
1 |
+
import { RuntimeSegment } from "@aitube/timeline"
|
2 |
|
3 |
export type AudioAnalysis = {
|
4 |
audioBuffer: AudioBuffer
|
src/controllers/audio/useAudio.ts
CHANGED
@@ -1,9 +1,7 @@
|
|
1 |
"use client"
|
2 |
|
3 |
import { create } from "zustand"
|
4 |
-
import { TimelineStore, useTimeline } from "@aitube/timeline"
|
5 |
-
|
6 |
-
import { RuntimeSegment } from "@/types"
|
7 |
|
8 |
import { AudioStore } from "./types"
|
9 |
import { getDefaultAudioState } from "./getDefaultAudioState"
|
|
|
1 |
"use client"
|
2 |
|
3 |
import { create } from "zustand"
|
4 |
+
import { TimelineStore, useTimeline, RuntimeSegment } from "@aitube/timeline"
|
|
|
|
|
5 |
|
6 |
import { AudioStore } from "./types"
|
7 |
import { getDefaultAudioState } from "./getDefaultAudioState"
|
src/controllers/io/parseFileIntoSegments.ts
CHANGED
@@ -1,13 +1,12 @@
|
|
1 |
"use client"
|
2 |
|
3 |
-
import { ClapAssetSource, ClapOutputType, ClapSegment, ClapSegmentCategory, ClapSegmentStatus,
|
4 |
-
import {
|
5 |
|
6 |
-
import {
|
7 |
|
8 |
import { analyzeAudio } from "../audio/analyzeAudio"
|
9 |
import { ResourceCategory, ResourceType } from "./types"
|
10 |
-
import { blobToBase64DataUri } from "@/lib/utils/blobToBase64DataUri"
|
11 |
|
12 |
export async function parseFileIntoSegments({ file }: {
|
13 |
/**
|
|
|
1 |
"use client"
|
2 |
|
3 |
+
import { ClapAssetSource, ClapOutputType, ClapSegment, ClapSegmentCategory, ClapSegmentStatus, newSegment, UUID } from "@aitube/clap"
|
4 |
+
import { RuntimeSegment } from "@aitube/timeline"
|
5 |
|
6 |
+
import { blobToBase64DataUri } from "@/lib/utils/blobToBase64DataUri"
|
7 |
|
8 |
import { analyzeAudio } from "../audio/analyzeAudio"
|
9 |
import { ResourceCategory, ResourceType } from "./types"
|
|
|
10 |
|
11 |
export async function parseFileIntoSegments({ file }: {
|
12 |
/**
|
src/controllers/metrics/constants.ts
CHANGED
@@ -66,6 +66,12 @@ export const estimatedMetrics: Record<ComputeProvider, Record<string, ProviderMe
|
|
66 |
// TODO list the most popular models
|
67 |
},
|
68 |
[ComputeProvider.FALAI]: {
|
|
|
|
|
|
|
|
|
|
|
|
|
69 |
"fal-ai/fast-sdxl": {
|
70 |
estimationType: ProviderMetricsEstimationType.MANUAL_MEASUREMENTS,
|
71 |
averageCostPerComputeTimeInSec: 0.001105904404873477,
|
@@ -119,4 +125,13 @@ export const estimatedMetrics: Record<ComputeProvider, Record<string, ProviderMe
|
|
119 |
[ComputeProvider.RUNWAYML]: {
|
120 |
// TODO list the most popular models
|
121 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
122 |
}
|
|
|
66 |
// TODO list the most popular models
|
67 |
},
|
68 |
[ComputeProvider.FALAI]: {
|
69 |
+
"fal-ai/metavoice-v1": {
|
70 |
+
estimationType: ProviderMetricsEstimationType.MANUAL_MEASUREMENTS,
|
71 |
+
averageCostPerComputeTimeInSec: 0.0006666666666666666,
|
72 |
+
averageDurationInSec: 15,
|
73 |
+
averageCostPerGeneration: 0.01,
|
74 |
+
},
|
75 |
"fal-ai/fast-sdxl": {
|
76 |
estimationType: ProviderMetricsEstimationType.MANUAL_MEASUREMENTS,
|
77 |
averageCostPerComputeTimeInSec: 0.001105904404873477,
|
|
|
125 |
[ComputeProvider.RUNWAYML]: {
|
126 |
// TODO list the most popular models
|
127 |
},
|
128 |
+
[ComputeProvider.HEDRA]: {
|
129 |
+
// TODO list the most popular models
|
130 |
+
},
|
131 |
+
[ComputeProvider.LEONARDOAI]: {
|
132 |
+
// TODO list the most popular models
|
133 |
+
},
|
134 |
+
[ComputeProvider.EVERARTAI]: {
|
135 |
+
// TODO list the most popular models
|
136 |
+
},
|
137 |
}
|
src/controllers/metrics/getDefaultMetricsPerProvider.ts
CHANGED
@@ -80,6 +80,15 @@ export function getDefaultMetricsPerProvider(): MetricsPerProvider {
|
|
80 |
[ComputeProvider.RUNWAYML]: {
|
81 |
...getDefaultComputeProviderMetrics(),
|
82 |
},
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
}
|
84 |
return metricsPerProvider
|
85 |
}
|
|
|
80 |
[ComputeProvider.RUNWAYML]: {
|
81 |
...getDefaultComputeProviderMetrics(),
|
82 |
},
|
83 |
+
[ComputeProvider.HEDRA]: {
|
84 |
+
...getDefaultComputeProviderMetrics(),
|
85 |
+
},
|
86 |
+
[ComputeProvider.LEONARDOAI]: {
|
87 |
+
...getDefaultComputeProviderMetrics(),
|
88 |
+
},
|
89 |
+
[ComputeProvider.EVERARTAI]: {
|
90 |
+
...getDefaultComputeProviderMetrics(),
|
91 |
+
},
|
92 |
}
|
93 |
return metricsPerProvider
|
94 |
}
|
src/controllers/renderer/useRenderer.ts
CHANGED
@@ -2,9 +2,7 @@
|
|
2 |
|
3 |
import { create } from "zustand"
|
4 |
import { ClapOutputType, ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
5 |
-
import { TimelineStore, useTimeline } from "@aitube/timeline"
|
6 |
-
|
7 |
-
import { RuntimeSegment } from "@/types"
|
8 |
|
9 |
import { BufferedSegments, RendererStore } from "./types"
|
10 |
import { getDefaultRendererState } from "./getDefaultRendererState"
|
|
|
2 |
|
3 |
import { create } from "zustand"
|
4 |
import { ClapOutputType, ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
5 |
+
import { TimelineStore, useTimeline, RuntimeSegment } from "@aitube/timeline"
|
|
|
|
|
6 |
|
7 |
import { BufferedSegments, RendererStore } from "./types"
|
8 |
import { getDefaultRendererState } from "./getDefaultRendererState"
|
src/controllers/resolver/useResolver.ts
CHANGED
@@ -1,17 +1,17 @@
|
|
1 |
"use client"
|
2 |
|
3 |
import { create } from "zustand"
|
4 |
-
import { ClapEntity, ClapSegment, ClapSegmentCategory, ClapSegmentFilteringMode, ClapSegmentStatus, filterSegments } from "@aitube/clap"
|
5 |
-
import { RenderingStrategy, TimelineStore, useTimeline } from "@aitube/timeline"
|
|
|
6 |
|
7 |
-
import { ResolveRequest,
|
8 |
|
9 |
import { getDefaultResolverState } from "./getDefaultResolverState"
|
10 |
import { useSettings } from "../settings"
|
11 |
import { DEFAULT_WAIT_TIME_IF_NOTHING_TO_DO_IN_MS } from "./constants"
|
12 |
import { ResolverStore } from "./types"
|
13 |
|
14 |
-
|
15 |
export const useResolver = create<ResolverStore>((set, get) => ({
|
16 |
...getDefaultResolverState(),
|
17 |
|
@@ -75,8 +75,8 @@ export const useResolver = create<ResolverStore>((set, get) => ({
|
|
75 |
// segments visible on screen are show first,
|
76 |
// then those nearby, then the hidden ones
|
77 |
const segments: RuntimeSegment[] = ([...allSegments] as RuntimeSegment[]).sort((segment1, segment2) => {
|
78 |
-
const priority1 = (
|
79 |
-
const priority2 = (
|
80 |
|
81 |
return priority2 - priority1
|
82 |
})
|
@@ -385,7 +385,7 @@ export const useResolver = create<ResolverStore>((set, get) => ({
|
|
385 |
// throw new Error(`please call setSegmentRender(...) first`)
|
386 |
}
|
387 |
|
388 |
-
const
|
389 |
ClapSegmentFilteringMode.ANY,
|
390 |
segment,
|
391 |
allSegments
|
@@ -397,34 +397,78 @@ export const useResolver = create<ResolverStore>((set, get) => ({
|
|
397 |
}
|
398 |
|
399 |
segment.status = ClapSegmentStatus.IN_PROGRESS
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
400 |
|
401 |
-
|
402 |
-
|
403 |
-
|
404 |
-
|
405 |
-
|
406 |
-
|
407 |
-
|
408 |
-
|
409 |
-
|
410 |
-
|
411 |
-
|
412 |
-
|
413 |
-
|
414 |
-
|
415 |
-
|
416 |
-
|
417 |
-
|
418 |
-
|
419 |
-
|
420 |
-
|
421 |
-
|
422 |
-
|
423 |
-
|
424 |
-
mainCharacterEntity
|
425 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
426 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
427 |
|
|
|
428 |
const res = await fetch("/api/resolve", {
|
429 |
method: "POST",
|
430 |
headers: {
|
@@ -437,26 +481,39 @@ export const useResolver = create<ResolverStore>((set, get) => ({
|
|
437 |
const newSegmentData = (await res.json()) as ClapSegment
|
438 |
// console.log(`useResolver.resolveSegment(): newSegmentData`, newSegmentData)
|
439 |
|
440 |
-
const {
|
441 |
-
id,
|
442 |
-
assetUrl,
|
443 |
-
assetDurationInMs,
|
444 |
-
assetFileFormat,
|
445 |
-
assetSourceType,
|
446 |
-
status
|
447 |
-
} = newSegmentData
|
448 |
-
|
449 |
// note: this modifies the old object in-place
|
450 |
-
|
451 |
-
|
452 |
-
|
453 |
-
|
454 |
-
|
455 |
-
|
456 |
-
|
457 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
458 |
|
459 |
newSegment.status = ClapSegmentStatus.COMPLETED
|
|
|
460 |
trackSilentChangeInSegment(newSegment.id)
|
461 |
return newSegment
|
462 |
} catch (err) {
|
|
|
1 |
"use client"
|
2 |
|
3 |
import { create } from "zustand"
|
4 |
+
import { ClapEntity, ClapOutputType, ClapSegment, ClapSegmentCategory, ClapSegmentFilteringMode, ClapSegmentStatus, filterSegments } from "@aitube/clap"
|
5 |
+
import { RenderingStrategy, TimelineStore, useTimeline, getAudioBuffer, RuntimeSegment, SegmentVisibility, segmentVisibilityPriority } from "@aitube/timeline"
|
6 |
+
import { getVideoPrompt } from "@aitube/engine"
|
7 |
|
8 |
+
import { ResolveRequest, ResolveRequestPrompts } from "@/types"
|
9 |
|
10 |
import { getDefaultResolverState } from "./getDefaultResolverState"
|
11 |
import { useSettings } from "../settings"
|
12 |
import { DEFAULT_WAIT_TIME_IF_NOTHING_TO_DO_IN_MS } from "./constants"
|
13 |
import { ResolverStore } from "./types"
|
14 |
|
|
|
15 |
export const useResolver = create<ResolverStore>((set, get) => ({
|
16 |
...getDefaultResolverState(),
|
17 |
|
|
|
75 |
// segments visible on screen are show first,
|
76 |
// then those nearby, then the hidden ones
|
77 |
const segments: RuntimeSegment[] = ([...allSegments] as RuntimeSegment[]).sort((segment1, segment2) => {
|
78 |
+
const priority1 = (segmentVisibilityPriority as any)[segment1.visibility || SegmentVisibility.HIDDEN] || 0
|
79 |
+
const priority2 = (segmentVisibilityPriority as any)[segment2.visibility || SegmentVisibility.HIDDEN] || 0
|
80 |
|
81 |
return priority2 - priority1
|
82 |
})
|
|
|
385 |
// throw new Error(`please call setSegmentRender(...) first`)
|
386 |
}
|
387 |
|
388 |
+
const segments: ClapSegment[] = filterSegments(
|
389 |
ClapSegmentFilteringMode.ANY,
|
390 |
segment,
|
391 |
allSegments
|
|
|
397 |
}
|
398 |
|
399 |
segment.status = ClapSegmentStatus.IN_PROGRESS
|
400 |
+
|
401 |
+
const entities = clap.entityIndex || {}
|
402 |
+
|
403 |
+
const speakingCharactersIds = segments.map(s =>
|
404 |
+
s.category === ClapSegmentCategory.DIALOGUE ? s.entityId : null
|
405 |
+
).filter(id => id) as string[]
|
406 |
+
|
407 |
+
const generalCharactersIds = segments.map(s =>
|
408 |
+
s.category === ClapSegmentCategory.CHARACTER ? s.entityId : null
|
409 |
+
).filter(id => id) as string[]
|
410 |
+
|
411 |
+
const mainCharacterId: string | undefined = speakingCharactersIds.at(0) || generalCharactersIds.at(0) || undefined
|
412 |
+
|
413 |
+
const mainCharacterEntity: ClapEntity | undefined = mainCharacterId ? (entities[mainCharacterId] || undefined) : undefined
|
414 |
+
|
415 |
+
const storyboard = segments.find(s => s.category === ClapSegmentCategory.STORYBOARD)
|
416 |
|
417 |
+
const dialogue = segments.find(s => s.category === ClapSegmentCategory.DIALOGUE)
|
418 |
+
|
419 |
+
const imagePrompt = getVideoPrompt(
|
420 |
+
segments,
|
421 |
+
entities
|
422 |
+
)
|
423 |
+
|
424 |
+
const positiveImagePrompt = [
|
425 |
+
settings.imagePromptPrefix,
|
426 |
+
imagePrompt,
|
427 |
+
settings.imagePromptSuffix,
|
428 |
+
].map(x => x.trim()).filter(x => x).join(", ")
|
429 |
+
|
430 |
+
const negativeImagePrompt = [
|
431 |
+
settings.imageNegativePrompt
|
432 |
+
].map(x => x.trim()).filter(x => x).join(", ")
|
433 |
+
|
434 |
+
// note: not all AI models will support those parameters.
|
435 |
+
// in 2024, even the "best" proprietary video models like Sora, Veo, Kling, Gen-3, Dream Machine etc..
|
436 |
+
// don't support voice input for lip syncing, for instance.
|
437 |
+
const prompts: ResolveRequestPrompts = {
|
438 |
+
image: {
|
439 |
+
// the "identification picture" of the character, if available
|
440 |
+
identity: `${mainCharacterEntity?.imageId || ""}`,
|
441 |
+
positive: positiveImagePrompt,
|
442 |
+
negative: negativeImagePrompt
|
443 |
+
},
|
444 |
+
video: {
|
445 |
+
// image to animate
|
446 |
+
image: `${storyboard?.assetUrl || ""}`,
|
447 |
+
|
448 |
+
// dialogue line to lip-sync
|
449 |
+
voice: `${dialogue?.assetUrl || ""}`,
|
450 |
+
},
|
451 |
+
voice: {
|
452 |
+
identity: `${mainCharacterEntity?.audioId || ""}`,
|
453 |
+
positive: "",
|
454 |
+
negative: ""
|
455 |
}
|
456 |
+
}
|
457 |
+
|
458 |
+
const request: ResolveRequest = {
|
459 |
+
settings,
|
460 |
+
segment,
|
461 |
+
segments,
|
462 |
+
entities,
|
463 |
+
speakingCharactersIds,
|
464 |
+
generalCharactersIds,
|
465 |
+
mainCharacterId,
|
466 |
+
mainCharacterEntity,
|
467 |
+
meta: clap.meta,
|
468 |
+
prompts,
|
469 |
+
}
|
470 |
|
471 |
+
try {
|
472 |
const res = await fetch("/api/resolve", {
|
473 |
method: "POST",
|
474 |
headers: {
|
|
|
481 |
const newSegmentData = (await res.json()) as ClapSegment
|
482 |
// console.log(`useResolver.resolveSegment(): newSegmentData`, newSegmentData)
|
483 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
484 |
// note: this modifies the old object in-place
|
485 |
+
// it is super important as this helps preserving the reference
|
486 |
+
const newSegment = Object.assign(segment, newSegmentData) as RuntimeSegment
|
487 |
+
|
488 |
+
if (newSegment.outputType === ClapOutputType.AUDIO) {
|
489 |
+
try {
|
490 |
+
newSegment.audioBuffer = await getAudioBuffer(newSegment.assetUrl)
|
491 |
+
} catch (err) {
|
492 |
+
console.error(`failed to load the audio file: ${err}`)
|
493 |
+
}
|
494 |
+
}
|
495 |
+
|
496 |
+
// after a segment has ben resolved, it is possible that the size
|
497 |
+
// of its asset changed (eg. a dialogue line longer than the segment's length)
|
498 |
+
//
|
499 |
+
// there are multiple ways to solve this, one approach could be to
|
500 |
+
// just add some more B-roll (more shots)
|
501 |
+
//
|
502 |
+
// or we can also extend it, which is the current simple solution
|
503 |
+
//
|
504 |
+
// for the other categories, such as MUSIC or SOUND,
|
505 |
+
// we assume it is okay if they are too short or too long,
|
506 |
+
// and that we can crop them etc
|
507 |
+
//
|
508 |
+
// note that video clips are also concerned: we want them to perfectly fit
|
509 |
+
if (newSegment.category === ClapSegmentCategory.DIALOGUE) {
|
510 |
+
await timeline.fitSegmentToAssetDuration(newSegment)
|
511 |
+
} else if (newSegment.category === ClapSegmentCategory.VIDEO) {
|
512 |
+
await timeline.fitSegmentToAssetDuration(newSegment)
|
513 |
+
}
|
514 |
|
515 |
newSegment.status = ClapSegmentStatus.COMPLETED
|
516 |
+
|
517 |
trackSilentChangeInSegment(newSegment.id)
|
518 |
return newSegment
|
519 |
} catch (err) {
|
src/controllers/settings/getDefaultSettingsState.ts
CHANGED
@@ -68,19 +68,28 @@ export function getDefaultSettingsState(): SettingsState {
|
|
68 |
comfyWorkflowForSound: "{}",
|
69 |
comfyWorkflowForMusic: "{}",
|
70 |
|
71 |
-
// "
|
72 |
-
//
|
73 |
-
|
74 |
-
|
75 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
76 |
huggingFaceModelForVoice: "",
|
77 |
huggingFaceModelForSound: "",
|
78 |
huggingFaceModelForMusic: "",
|
79 |
|
|
|
|
|
|
|
80 |
gradioApiUrlForAssistant: "",
|
81 |
gradioApiUrlForImage: "",
|
82 |
gradioApiUrlForVideo: "",
|
83 |
-
|
84 |
gradioApiUrlForSound: "",
|
85 |
gradioApiUrlForMusic: "",
|
86 |
|
|
|
68 |
comfyWorkflowForSound: "{}",
|
69 |
comfyWorkflowForMusic: "{}",
|
70 |
|
71 |
+
// now how we prefix everything with "models"
|
72 |
+
// that's because it will be possible at some point to also
|
73 |
+
// call a space eg. spaces/openai/sora (this one is just a silly example, of course)
|
74 |
+
// "models/HuggingFaceH4/zephyr-7b-beta"
|
75 |
+
// "models/mistralai/Mixtral-8x7B-Instruct-v0.1",
|
76 |
+
huggingFaceModelForAssistant: "models/mistralai/Mixtral-8x7B-Instruct-v0.1",
|
77 |
+
huggingFaceModelForImage: "models/sd-community/sdxl-flash",
|
78 |
+
|
79 |
+
// huggingFaceModelForVideo: "spaces/multimodalart/hallo",
|
80 |
+
huggingFaceModelForVideo: "spaces/hpcai-tech/open-sora",
|
81 |
+
|
82 |
huggingFaceModelForVoice: "",
|
83 |
huggingFaceModelForSound: "",
|
84 |
huggingFaceModelForMusic: "",
|
85 |
|
86 |
+
// those are not designed for Hugging Face specifically,
|
87 |
+
// but to be compatible with any Gradio API URL that the
|
88 |
+
// user would set manually (eg. running on localhost)
|
89 |
gradioApiUrlForAssistant: "",
|
90 |
gradioApiUrlForImage: "",
|
91 |
gradioApiUrlForVideo: "",
|
92 |
+
gradioApiUrlForVoice: "",
|
93 |
gradioApiUrlForSound: "",
|
94 |
gradioApiUrlForMusic: "",
|
95 |
|
src/controllers/settings/types.ts
CHANGED
@@ -81,7 +81,7 @@ export type SettingsState = {
|
|
81 |
gradioApiUrlForAssistant: string
|
82 |
gradioApiUrlForImage: string
|
83 |
gradioApiUrlForVideo: string
|
84 |
-
|
85 |
gradioApiUrlForSound: string
|
86 |
gradioApiUrlForMusic: string
|
87 |
|
@@ -210,7 +210,7 @@ export type SettingsControls = {
|
|
210 |
setGradioApiUrlForAssistant: (gradioApiUrlForAssistant?: string) => void
|
211 |
setGradioApiUrlForImage: (gradioApiUrlForImage?: string) => void
|
212 |
setGradioApiUrlForVideo: (gradioApiUrlForVideo?: string) => void
|
213 |
-
|
214 |
setGradioApiUrlForSound: (gradioApiUrlForSound?: string) => void
|
215 |
setGradioApiUrlForMusic: (gradioApiUrlForMusic?: string) => void
|
216 |
|
|
|
81 |
gradioApiUrlForAssistant: string
|
82 |
gradioApiUrlForImage: string
|
83 |
gradioApiUrlForVideo: string
|
84 |
+
gradioApiUrlForVoice: string
|
85 |
gradioApiUrlForSound: string
|
86 |
gradioApiUrlForMusic: string
|
87 |
|
|
|
210 |
setGradioApiUrlForAssistant: (gradioApiUrlForAssistant?: string) => void
|
211 |
setGradioApiUrlForImage: (gradioApiUrlForImage?: string) => void
|
212 |
setGradioApiUrlForVideo: (gradioApiUrlForVideo?: string) => void
|
213 |
+
setGradioApiUrlForVoice: (gradioApiUrlForVoice?: string) => void
|
214 |
setGradioApiUrlForSound: (gradioApiUrlForSound?: string) => void
|
215 |
setGradioApiUrlForMusic: (gradioApiUrlForMusic?: string) => void
|
216 |
|
src/controllers/settings/useSettings.ts
CHANGED
@@ -245,8 +245,8 @@ export const useSettings = create<SettingsStore>()(
|
|
245 |
setGradioApiUrlForVideo: (gradioApiUrlForVideo?: string) => {
|
246 |
set({ gradioApiUrlForVideo: getValidString(gradioApiUrlForVideo, getDefaultSettingsState().gradioApiUrlForVideo) })
|
247 |
},
|
248 |
-
|
249 |
-
set({
|
250 |
},
|
251 |
setGradioApiUrlForSound: (gradioApiUrlForSound?: string) => {
|
252 |
set({ gradioApiUrlForSound: getValidString(gradioApiUrlForSound, getDefaultSettingsState().gradioApiUrlForSound) })
|
@@ -449,7 +449,7 @@ export const useSettings = create<SettingsStore>()(
|
|
449 |
gradioApiUrlForAssistant: state.gradioApiUrlForAssistant || defaultSettings.gradioApiUrlForAssistant,
|
450 |
gradioApiUrlForImage: state.gradioApiUrlForImage || defaultSettings.gradioApiUrlForImage,
|
451 |
gradioApiUrlForVideo: state.gradioApiUrlForVideo || defaultSettings.gradioApiUrlForVideo,
|
452 |
-
|
453 |
gradioApiUrlForSound: state.gradioApiUrlForSound || defaultSettings.gradioApiUrlForSound,
|
454 |
gradioApiUrlForMusic: state.gradioApiUrlForMusic || defaultSettings.gradioApiUrlForMusic,
|
455 |
replicateModelForImage: state.replicateModelForImage || defaultSettings.replicateModelForImage,
|
|
|
245 |
setGradioApiUrlForVideo: (gradioApiUrlForVideo?: string) => {
|
246 |
set({ gradioApiUrlForVideo: getValidString(gradioApiUrlForVideo, getDefaultSettingsState().gradioApiUrlForVideo) })
|
247 |
},
|
248 |
+
setGradioApiUrlForVoice: (gradioApiUrlForVoice?: string) => {
|
249 |
+
set({ gradioApiUrlForVoice: getValidString(gradioApiUrlForVoice, getDefaultSettingsState().gradioApiUrlForVoice) })
|
250 |
},
|
251 |
setGradioApiUrlForSound: (gradioApiUrlForSound?: string) => {
|
252 |
set({ gradioApiUrlForSound: getValidString(gradioApiUrlForSound, getDefaultSettingsState().gradioApiUrlForSound) })
|
|
|
449 |
gradioApiUrlForAssistant: state.gradioApiUrlForAssistant || defaultSettings.gradioApiUrlForAssistant,
|
450 |
gradioApiUrlForImage: state.gradioApiUrlForImage || defaultSettings.gradioApiUrlForImage,
|
451 |
gradioApiUrlForVideo: state.gradioApiUrlForVideo || defaultSettings.gradioApiUrlForVideo,
|
452 |
+
gradioApiUrlForVoice: state.gradioApiUrlForVoice || defaultSettings.gradioApiUrlForVoice,
|
453 |
gradioApiUrlForSound: state.gradioApiUrlForSound || defaultSettings.gradioApiUrlForSound,
|
454 |
gradioApiUrlForMusic: state.gradioApiUrlForMusic || defaultSettings.gradioApiUrlForMusic,
|
455 |
replicateModelForImage: state.replicateModelForImage || defaultSettings.replicateModelForImage,
|
src/lib/ffmpeg/getMediaInfo.ts
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { tmpdir } from "node:os"
|
2 |
+
import { writeFile, rm } from "node:fs/promises"
|
3 |
+
import { join } from "node:path"
|
4 |
+
|
5 |
+
import ffmpeg from "fluent-ffmpeg"
|
6 |
+
|
7 |
+
export type MediaMetadata = {
|
8 |
+
durationInSec: number;
|
9 |
+
durationInMs: number;
|
10 |
+
hasAudio: boolean;
|
11 |
+
};
|
12 |
+
|
13 |
+
/**
|
14 |
+
* Get the media info of a base64 or file path
|
15 |
+
* @param input
|
16 |
+
* @returns
|
17 |
+
*/
|
18 |
+
export async function getMediaInfo(input: string): Promise<MediaMetadata> {
|
19 |
+
// If the input is a base64 string
|
20 |
+
if (input.startsWith("data:")) {
|
21 |
+
// Extract the base64 content
|
22 |
+
// Extract the base64 content
|
23 |
+
const [head, tail] = input.split(";base64,")
|
24 |
+
if (!tail) {
|
25 |
+
throw new Error("Invalid base64 data");
|
26 |
+
}
|
27 |
+
|
28 |
+
const extension = head.split("/").pop() || ""
|
29 |
+
const base64Content = tail || ""
|
30 |
+
|
31 |
+
// Decode the base64 content to a buffer
|
32 |
+
const buffer = Buffer.from(base64Content, 'base64')
|
33 |
+
|
34 |
+
// Generate a temporary file name
|
35 |
+
const tempFileName = join(tmpdir(), `temp-media-${Date.now()}.${extension}`);
|
36 |
+
|
37 |
+
// Write the buffer to a temporary file
|
38 |
+
await writeFile(tempFileName, buffer);
|
39 |
+
|
40 |
+
// Get metadata from the temporary file then delete the file
|
41 |
+
try {
|
42 |
+
return await getMetaDataFromPath(tempFileName);
|
43 |
+
} finally {
|
44 |
+
await rm(tempFileName);
|
45 |
+
}
|
46 |
+
}
|
47 |
+
|
48 |
+
// If the input is a path to the file
|
49 |
+
return await getMetaDataFromPath(input);
|
50 |
+
}
|
51 |
+
|
52 |
+
async function getMetaDataFromPath(filePath: string): Promise<MediaMetadata> {
|
53 |
+
return new Promise((resolve, reject) => {
|
54 |
+
ffmpeg.ffprobe(filePath, (err, metadata) => {
|
55 |
+
|
56 |
+
let results = {
|
57 |
+
durationInSec: 0,
|
58 |
+
durationInMs: 0,
|
59 |
+
hasAudio: false,
|
60 |
+
}
|
61 |
+
|
62 |
+
if (err) {
|
63 |
+
console.error("getMediaInfo(): failed to analyze the source (might happen with empty files)", err)
|
64 |
+
// reject(err);
|
65 |
+
resolve(results);
|
66 |
+
return;
|
67 |
+
}
|
68 |
+
|
69 |
+
try {
|
70 |
+
results.durationInSec = metadata?.format?.duration || 0;
|
71 |
+
results.durationInMs = results.durationInSec * 1000;
|
72 |
+
results.hasAudio = (metadata?.streams || []).some((stream) => stream.codec_type === 'audio');
|
73 |
+
|
74 |
+
} catch (err) {
|
75 |
+
console.error(`getMediaInfo(): failed to analyze the source (might happen with empty files)`, err)
|
76 |
+
results.durationInSec = 0
|
77 |
+
results.durationInMs = 0
|
78 |
+
results.hasAudio = false
|
79 |
+
}
|
80 |
+
resolve(results);
|
81 |
+
});
|
82 |
+
});
|
83 |
+
}
|
src/lib/hf/adapter/README.md
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
Important: if you add a new field,
|
2 |
+
please make sure you check all the functions inside adapter/*.ts files
|
3 |
+
to support it
|
src/lib/hf/adapter/adaptAnyInputsToGradioInputs.ts
ADDED
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { GradioApiInfo, SupportedFields } from "../types"
|
2 |
+
import { identifyField } from "./identifyField"
|
3 |
+
import { getDefaultFields } from "./getDefaultFields"
|
4 |
+
import { findMainGradioEndpoint } from "./findMainGradioEndpoint"
|
5 |
+
|
6 |
+
/**
|
7 |
+
* This function try to adapt arbitrary inputs to strict gradio inputs
|
8 |
+
*
|
9 |
+
* @param param0
|
10 |
+
* @returns
|
11 |
+
*/
|
12 |
+
export function adaptAnyInputsToGradioInputs({
|
13 |
+
inputs,
|
14 |
+
gradioApiInfo,
|
15 |
+
}: {
|
16 |
+
inputs: Record<string, any>,
|
17 |
+
gradioApiInfo: GradioApiInfo
|
18 |
+
}): {
|
19 |
+
endpoint: string
|
20 |
+
inputs: Array<string | number | boolean | undefined | null>
|
21 |
+
} {
|
22 |
+
|
23 |
+
const mainGradioEndpoint = findMainGradioEndpoint({ gradioApiInfo })
|
24 |
+
|
25 |
+
if (!mainGradioEndpoint) {
|
26 |
+
throw new Error(`couldn't find a suitable endpoint`)
|
27 |
+
}
|
28 |
+
|
29 |
+
// input fields passed by the parent calling function
|
30 |
+
let inputFields: Record<string, Partial<SupportedFields>> = {}
|
31 |
+
let allInputFields = getDefaultFields()
|
32 |
+
for (const [key, value] of Object.entries(inputs)) {
|
33 |
+
const inputField = identifyField(key, value)
|
34 |
+
inputFields[key] = inputField
|
35 |
+
allInputFields = {...allInputFields, ...inputField}
|
36 |
+
}
|
37 |
+
|
38 |
+
// the gradio input array
|
39 |
+
const gradioInputs: any[] = []
|
40 |
+
|
41 |
+
for (const parameter of mainGradioEndpoint.endpoint.parameters) {
|
42 |
+
let gradioInputValue: any = undefined
|
43 |
+
|
44 |
+
const fields = mainGradioEndpoint.fields[parameter.parameter_name]
|
45 |
+
|
46 |
+
// TODO: rewrite this in a better way maybe
|
47 |
+
// until then, please don't blame me if you forget to update those!
|
48 |
+
if (fields.hasPositiveTextPrompt) { gradioInputValue = allInputFields.inputPositiveTextPrompt }
|
49 |
+
if (fields.hasNegativeTextPrompt) { gradioInputValue = allInputFields.inputNegativeTextPrompt }
|
50 |
+
if (fields.hasInputImage) { gradioInputValue = allInputFields.inputImage }
|
51 |
+
if (fields.hasInputAudio) { gradioInputValue = allInputFields.inputAudio }
|
52 |
+
if (fields.hasInputWidth) { gradioInputValue = allInputFields.inputWidth }
|
53 |
+
if (fields.hasInputHeight) { gradioInputValue = allInputFields.inputHeight }
|
54 |
+
if (fields.hasInputSteps) { gradioInputValue = allInputFields.inputSteps }
|
55 |
+
if (fields.hasInputGuidance) { gradioInputValue = allInputFields.inputGuidance }
|
56 |
+
if (fields.hasInputSeed) { gradioInputValue = allInputFields.inputSeed }
|
57 |
+
|
58 |
+
gradioInputs.push(gradioInputValue)
|
59 |
+
}
|
60 |
+
|
61 |
+
return {
|
62 |
+
endpoint: mainGradioEndpoint.name,
|
63 |
+
inputs: gradioInputs
|
64 |
+
}
|
65 |
+
}
|
src/lib/hf/adapter/findMainGradioEndpoint.ts
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { GradioApiInfo, GradioEndpoint, SupportedFields } from "../types"
|
2 |
+
import { identifyField } from "./identifyField"
|
3 |
+
import { getDefaultFields } from "./getDefaultFields"
|
4 |
+
import { getAdaptationScore } from "./getAdaptationScore"
|
5 |
+
|
6 |
+
/**
|
7 |
+
* Find the main entrypoint (main entry endpoint) of a Gradio API
|
8 |
+
*/
|
9 |
+
export function findMainGradioEndpoint({
|
10 |
+
gradioApiInfo,
|
11 |
+
}: {
|
12 |
+
gradioApiInfo: GradioApiInfo
|
13 |
+
}): GradioEndpoint | undefined {
|
14 |
+
const endpoints: GradioEndpoint[] = [
|
15 |
+
...Object.entries(gradioApiInfo.named_endpoints)
|
16 |
+
.map(([name, endpoint]) => ({ isNamed: true, name, endpoint, fields: {}, score: 0 })),
|
17 |
+
...Object.entries(gradioApiInfo.unnamed_endpoints)
|
18 |
+
.map(([name, endpoint]) => ({ isNamed: true, name, endpoint, fields: {}, score: 0 })),
|
19 |
+
]
|
20 |
+
|
21 |
+
// generally the main entry point will be called "/run", "/call", "/predict" etc
|
22 |
+
// and contain stuff we usually expect: a text prompt, or image etc
|
23 |
+
const sortableEndpoints = endpoints.map(({ isNamed, name, endpoint, score }) => {
|
24 |
+
console.log(`found endpoint: ${name}`)
|
25 |
+
|
26 |
+
const isContinuous = !!endpoint.type?.continuous
|
27 |
+
const isGenerator = !!endpoint.type?.generator
|
28 |
+
const canCancel = !!endpoint.type?.cancel
|
29 |
+
|
30 |
+
let gradioFields: Record<string, Partial<SupportedFields>> = {}
|
31 |
+
let allGradioFields = getDefaultFields()
|
32 |
+
for (const gradioParameter of endpoint.parameters) {
|
33 |
+
const gradioParameterField = identifyField(
|
34 |
+
gradioParameter.parameter_name,
|
35 |
+
gradioParameter.parameter_default
|
36 |
+
)
|
37 |
+
gradioFields[gradioParameter.parameter_name] = gradioParameterField
|
38 |
+
allGradioFields = { ...allGradioFields, ...gradioParameterField }
|
39 |
+
}
|
40 |
+
|
41 |
+
score = getAdaptationScore(allGradioFields)
|
42 |
+
console.log(`allGradioFields:`, allGradioFields)
|
43 |
+
console.log(`score:`, score)
|
44 |
+
|
45 |
+
return {
|
46 |
+
isNamed,
|
47 |
+
name,
|
48 |
+
endpoint,
|
49 |
+
fields: gradioFields,
|
50 |
+
score,
|
51 |
+
}
|
52 |
+
})
|
53 |
+
|
54 |
+
return sortableEndpoints.sort((a, b) => {
|
55 |
+
return b.score - a.score
|
56 |
+
}).at(0)
|
57 |
+
}
|
src/lib/hf/adapter/getAdaptationScore.ts
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { SupportedFields } from "../types"
|
2 |
+
|
3 |
+
export function getAdaptationScore(supportedFields: SupportedFields) {
|
4 |
+
let score = 0
|
5 |
+
|
6 |
+
// TODO: rewrite this in a better way maybe
|
7 |
+
// until then, please don't blame me if you forget to update those!
|
8 |
+
score += supportedFields.hasPositiveTextPrompt ? 1 : 0
|
9 |
+
score += supportedFields.hasNegativeTextPrompt ? 1 : 0
|
10 |
+
score += supportedFields.hasInputImage ? 1 : 0
|
11 |
+
score += supportedFields.hasInputAudio ? 1 : 0
|
12 |
+
score += supportedFields.hasInputWidth ? 1 : 0
|
13 |
+
score += supportedFields.hasInputHeight ? 1 : 0
|
14 |
+
score += supportedFields.hasInputSteps ? 1 : 0
|
15 |
+
score += supportedFields.hasInputGuidance ? 1 : 0
|
16 |
+
score += supportedFields.inputSeed ? 1 : 0
|
17 |
+
return score
|
18 |
+
}
|
src/lib/hf/adapter/getDefaultFields.ts
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { SupportedFields } from "../types"
|
2 |
+
|
3 |
+
export function getDefaultFields(): SupportedFields {
|
4 |
+
return {
|
5 |
+
inputPositiveTextPrompt: "",
|
6 |
+
hasPositiveTextPrompt: false,
|
7 |
+
inputNegativeTextPrompt: "",
|
8 |
+
hasNegativeTextPrompt: false,
|
9 |
+
inputImage: "",
|
10 |
+
hasInputImage: false,
|
11 |
+
inputAudio: "",
|
12 |
+
hasInputAudio: false,
|
13 |
+
inputWidth: 1024,
|
14 |
+
hasInputWidth: false,
|
15 |
+
inputHeight: 574,
|
16 |
+
hasInputHeight: false,
|
17 |
+
inputSteps: 8,
|
18 |
+
hasInputSteps: false,
|
19 |
+
inputGuidance: 7,
|
20 |
+
hasInputGuidance: false,
|
21 |
+
inputSeed: 0,
|
22 |
+
hasInputSeed: false
|
23 |
+
}
|
24 |
+
}
|
src/lib/hf/adapter/identifyField.ts
ADDED
@@ -0,0 +1,218 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { SupportedFields } from "../types"
|
2 |
+
|
3 |
+
export function identifyField(key: string, value?: any, index?: number): Partial<SupportedFields> {
|
4 |
+
const normalizedKey = key.toLowerCase().trim()
|
5 |
+
switch (normalizedKey) {
|
6 |
+
|
7 |
+
case "width":
|
8 |
+
let strWidth = ""
|
9 |
+
let numWidth = 0
|
10 |
+
if (typeof value === "string" && value.length) {
|
11 |
+
strWidth = value
|
12 |
+
}
|
13 |
+
let maybeNumWidth = Number(strWidth)
|
14 |
+
if (typeof maybeNumWidth === "number" && isFinite(maybeNumWidth) && !isNaN(maybeNumWidth) && maybeNumWidth) {
|
15 |
+
numWidth = maybeNumWidth
|
16 |
+
return {
|
17 |
+
hasInputWidth: true,
|
18 |
+
inputWidth: numWidth
|
19 |
+
}
|
20 |
+
} else if (strWidth) {
|
21 |
+
return {
|
22 |
+
hasInputWidth: true,
|
23 |
+
inputWidth: strWidth
|
24 |
+
}
|
25 |
+
} else {
|
26 |
+
return {
|
27 |
+
hasInputWidth: true,
|
28 |
+
// indexInputWidth: index,
|
29 |
+
}
|
30 |
+
}
|
31 |
+
|
32 |
+
case "height":
|
33 |
+
let strHeight = ""
|
34 |
+
let numHeight = 0
|
35 |
+
if (typeof value === "string" && value.length) {
|
36 |
+
strHeight = value
|
37 |
+
}
|
38 |
+
let maybeNumHeight = Number(strHeight)
|
39 |
+
if (typeof maybeNumHeight === "number" && isFinite(maybeNumHeight) && !isNaN(maybeNumHeight) && maybeNumHeight) {
|
40 |
+
numHeight = maybeNumHeight
|
41 |
+
return {
|
42 |
+
hasInputHeight: true,
|
43 |
+
inputHeight: numHeight
|
44 |
+
}
|
45 |
+
} else if (strHeight) {
|
46 |
+
return {
|
47 |
+
hasInputHeight: true,
|
48 |
+
inputHeight: strHeight
|
49 |
+
}
|
50 |
+
} else {
|
51 |
+
return {
|
52 |
+
hasInputHeight: true,
|
53 |
+
// indexInputHeight: index,
|
54 |
+
}
|
55 |
+
}
|
56 |
+
|
57 |
+
case "seed":
|
58 |
+
let strSeed = ""
|
59 |
+
let numSeed = 0
|
60 |
+
if (typeof value === "string" && value.length) {
|
61 |
+
strSeed = value
|
62 |
+
}
|
63 |
+
let maybeNumSeed = Number(strSeed)
|
64 |
+
if (typeof maybeNumSeed === "number" && isFinite(maybeNumSeed) && !isNaN(maybeNumSeed) && maybeNumSeed) {
|
65 |
+
numSeed = maybeNumSeed
|
66 |
+
return {
|
67 |
+
hasInputSeed: true,
|
68 |
+
inputSeed: numSeed
|
69 |
+
}
|
70 |
+
} else if (strSeed) {
|
71 |
+
return {
|
72 |
+
hasInputSeed: true,
|
73 |
+
inputSeed: strSeed
|
74 |
+
}
|
75 |
+
} else {
|
76 |
+
return {
|
77 |
+
hasInputSeed: true,
|
78 |
+
// indexInputSeed: index,
|
79 |
+
}
|
80 |
+
}
|
81 |
+
|
82 |
+
case "steps":
|
83 |
+
case "n_steps":
|
84 |
+
case "nb_steps":
|
85 |
+
case "num_steps":
|
86 |
+
case "step_count":
|
87 |
+
case "inference_steps":
|
88 |
+
case "n_inference_steps":
|
89 |
+
case "nb_inference_steps":
|
90 |
+
case "num_inference_steps":
|
91 |
+
let strSteps = ""
|
92 |
+
let numSteps = 0
|
93 |
+
if (typeof value === "string" && value.length) {
|
94 |
+
strSteps = value
|
95 |
+
}
|
96 |
+
let maybeNumSteps = Number(strSteps)
|
97 |
+
if (typeof maybeNumSteps === "number" && isFinite(maybeNumSteps) && !isNaN(maybeNumSteps) && maybeNumSteps) {
|
98 |
+
numSteps = maybeNumSteps
|
99 |
+
return {
|
100 |
+
hasInputSteps: true,
|
101 |
+
inputSteps: numSteps
|
102 |
+
}
|
103 |
+
} else if (strSteps) {
|
104 |
+
return {
|
105 |
+
hasInputSteps: true,
|
106 |
+
inputSteps: strSteps
|
107 |
+
}
|
108 |
+
} else {
|
109 |
+
return {
|
110 |
+
hasInputSteps: true,
|
111 |
+
// indexInputSteps: index,
|
112 |
+
}
|
113 |
+
}
|
114 |
+
|
115 |
+
// note: what we have to choose depends on what Gradio expects
|
116 |
+
// steps = numSteps
|
117 |
+
break;
|
118 |
+
|
119 |
+
case "guidance":
|
120 |
+
case "guidance_scale":
|
121 |
+
case "guidancescale":
|
122 |
+
let strGuidanceScale = ""
|
123 |
+
let numGuidanceScale = 0
|
124 |
+
if (typeof value === "string" && value.length) {
|
125 |
+
strGuidanceScale = value
|
126 |
+
}
|
127 |
+
let maybeNumGuidanceScale = Number(strGuidanceScale)
|
128 |
+
if (typeof maybeNumGuidanceScale === "number" && isFinite(maybeNumGuidanceScale) && !isNaN(maybeNumGuidanceScale) && maybeNumGuidanceScale) {
|
129 |
+
numGuidanceScale = maybeNumGuidanceScale
|
130 |
+
return {
|
131 |
+
hasInputGuidance: true,
|
132 |
+
inputGuidance: numGuidanceScale
|
133 |
+
}
|
134 |
+
} else if (strGuidanceScale) {
|
135 |
+
return {
|
136 |
+
hasInputGuidance: true,
|
137 |
+
inputGuidance: strGuidanceScale
|
138 |
+
}
|
139 |
+
} else {
|
140 |
+
return {
|
141 |
+
hasInputGuidance: true,
|
142 |
+
// indexInputGuidance: index,
|
143 |
+
}
|
144 |
+
}
|
145 |
+
|
146 |
+
case "negative":
|
147 |
+
case "negativeprompt":
|
148 |
+
case "negative_prompt":
|
149 |
+
if (typeof value === "string" && value.length) {
|
150 |
+
return {
|
151 |
+
hasNegativeTextPrompt: true,
|
152 |
+
inputNegativeTextPrompt: value,
|
153 |
+
}
|
154 |
+
} else {
|
155 |
+
return {
|
156 |
+
hasNegativeTextPrompt: true,
|
157 |
+
// indexNegativeTextPrompt: index,
|
158 |
+
}
|
159 |
+
}
|
160 |
+
|
161 |
+
case "source_image":
|
162 |
+
case "input_image":
|
163 |
+
case "image_input":
|
164 |
+
case "image":
|
165 |
+
case "image":
|
166 |
+
if (typeof value === "string" && value.length) {
|
167 |
+
return {
|
168 |
+
hasInputImage: true,
|
169 |
+
inputImage: value
|
170 |
+
}
|
171 |
+
} else {
|
172 |
+
return {
|
173 |
+
hasInputImage: true,
|
174 |
+
// indexPositiveTextPrompt: index,
|
175 |
+
}
|
176 |
+
}
|
177 |
+
|
178 |
+
case "source_audio":
|
179 |
+
case "input_audio":
|
180 |
+
case "audio_input":
|
181 |
+
case "driving_audio":
|
182 |
+
case "voice":
|
183 |
+
case "audio":
|
184 |
+
if (typeof value === "string" && value.length) {
|
185 |
+
return {
|
186 |
+
hasInputAudio: true,
|
187 |
+
inputAudio: value
|
188 |
+
}
|
189 |
+
} else {
|
190 |
+
return {
|
191 |
+
hasInputAudio: true,
|
192 |
+
// indexPositiveTextPrompt: index,
|
193 |
+
}
|
194 |
+
}
|
195 |
+
|
196 |
+
case "prompt":
|
197 |
+
case "positive":
|
198 |
+
case "positiveprompt":
|
199 |
+
case "positive_prompt":
|
200 |
+
case "input_prompt":
|
201 |
+
case "input_text":
|
202 |
+
case "prompt_text":
|
203 |
+
case "text_prompt":
|
204 |
+
case "text":
|
205 |
+
if (typeof value === "string" && value.length) {
|
206 |
+
return {
|
207 |
+
hasPositiveTextPrompt: true,
|
208 |
+
inputPositiveTextPrompt: value
|
209 |
+
}
|
210 |
+
} else {
|
211 |
+
return {
|
212 |
+
hasPositiveTextPrompt: true,
|
213 |
+
// indexPositiveTextPrompt: index,
|
214 |
+
}
|
215 |
+
}
|
216 |
+
}
|
217 |
+
return {}
|
218 |
+
}
|
src/lib/hf/callGradioApi.ts
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Client } from "@gradio/client"
|
2 |
+
|
3 |
+
import { getGradioApiInfo } from "./getGradioApiInfo"
|
4 |
+
import { parseHuggingFaceHubId } from "./parseHuggingFaceHubId"
|
5 |
+
import { adaptAnyInputsToGradioInputs } from "./adapter/adaptAnyInputsToGradioInputs"
|
6 |
+
import { getCurrentOwner } from "./getCurrentOwner"
|
7 |
+
|
8 |
+
/**
|
9 |
+
*
|
10 |
+
* @param param0
|
11 |
+
* @returns
|
12 |
+
*/
|
13 |
+
export async function callGradioApi<T>({
|
14 |
+
url,
|
15 |
+
inputs,
|
16 |
+
apiKey
|
17 |
+
}: {
|
18 |
+
url: string
|
19 |
+
inputs: Record<string, string | number | boolean | undefined | null>
|
20 |
+
apiKey?: string
|
21 |
+
}): Promise<T> {
|
22 |
+
|
23 |
+
// console.log(`callGradioApi called on: `, { url, apiKey })
|
24 |
+
// we can support either a call to the original space, or to the current user space
|
25 |
+
|
26 |
+
const { owner: previousOwner, id } = parseHuggingFaceHubId(url, "spaces")
|
27 |
+
|
28 |
+
// console.log(`then: `, { previousOwner, id })
|
29 |
+
|
30 |
+
const owner = apiKey ? (await getCurrentOwner(apiKey)) : previousOwner
|
31 |
+
|
32 |
+
const ownerAndId = `${owner}/${id}`
|
33 |
+
// console.log(`then: `, { owner, ownerAndId })
|
34 |
+
// TODO: if the user doesn't have forked the space yet we should ask the user to do sp
|
35 |
+
|
36 |
+
/*
|
37 |
+
// first step is to check if the user already has this space
|
38 |
+
const gradioSpaces = await getSpaces({ apiKey, sdk: "gradio" })
|
39 |
+
const gradioSpace = gradioSpaces.find(s => s.name === id)
|
40 |
+
|
41 |
+
if (gradioSpace) {
|
42 |
+
// good, there is already a space for that
|
43 |
+
console.log(`good, user did the homework and forked the space to their own account`)
|
44 |
+
} else {
|
45 |
+
// bad, there is no space for that
|
46 |
+
console.log(`bad, user should fork the space`)
|
47 |
+
throw new Error(`Couldn't find "${ownerAndId}". Please make sure you created or duplicated the space correctly.`)
|
48 |
+
}
|
49 |
+
*/
|
50 |
+
|
51 |
+
const gradioApiInfo = await getGradioApiInfo({
|
52 |
+
url: ownerAndId,
|
53 |
+
apiKey
|
54 |
+
})
|
55 |
+
|
56 |
+
// console.log(`gradioApiInfo: `, gradioApiInfo)
|
57 |
+
|
58 |
+
const gradioEndpointInputs = adaptAnyInputsToGradioInputs({
|
59 |
+
inputs,
|
60 |
+
gradioApiInfo
|
61 |
+
})
|
62 |
+
|
63 |
+
// console.log(`gradioEndpointInputs: `, gradioEndpointInputs)
|
64 |
+
|
65 |
+
const app = await Client.connect(ownerAndId, {
|
66 |
+
hf_token: apiKey as any
|
67 |
+
})
|
68 |
+
// console.log(`app: `, app)
|
69 |
+
|
70 |
+
const output = await app.predict(
|
71 |
+
gradioEndpointInputs.endpoint,
|
72 |
+
gradioEndpointInputs.inputs
|
73 |
+
)
|
74 |
+
console.log(`output: `, output)
|
75 |
+
|
76 |
+
return output.data as unknown as T
|
77 |
+
}
|
src/lib/hf/cloneSpace.ts
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { listSpaces, Credentials, whoAmI, SpaceSdk } from "@huggingface/hub"
|
2 |
+
|
3 |
+
export async function cloneSpace({
|
4 |
+
id,
|
5 |
+
apiKey
|
6 |
+
}: {
|
7 |
+
id: string
|
8 |
+
apiKey: string
|
9 |
+
}) {
|
10 |
+
|
11 |
+
}
|
src/lib/hf/getCurrentOwner.ts
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Credentials, whoAmI } from "@huggingface/hub"
|
2 |
+
|
3 |
+
export async function getCurrentOwner(apiKey: string): Promise<string> {
|
4 |
+
|
5 |
+
const accessToken = apiKey || ""
|
6 |
+
|
7 |
+
if (!accessToken) {
|
8 |
+
throw new Error(`cannot list spaces without a Hugging Face access token`)
|
9 |
+
}
|
10 |
+
|
11 |
+
const credentials: Credentials = { accessToken }
|
12 |
+
|
13 |
+
let username = ""
|
14 |
+
try {
|
15 |
+
const { name } = await whoAmI({ credentials })
|
16 |
+
username = name
|
17 |
+
if (!username) {
|
18 |
+
throw new Error(`returned username is empty`)
|
19 |
+
}
|
20 |
+
} catch (err) {
|
21 |
+
throw new Error(`cannot list spaces: ${err}`)
|
22 |
+
}
|
23 |
+
|
24 |
+
return username
|
25 |
+
}
|
src/lib/hf/getGradioApiInfo.ts
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { Client } from "@gradio/client"
|
2 |
+
|
3 |
+
import { GradioApiInfo } from "./types"
|
4 |
+
import { parseHuggingFaceHubId } from "./parseHuggingFaceHubId"
|
5 |
+
|
6 |
+
export async function getGradioApiInfo({
|
7 |
+
url,
|
8 |
+
apiKey,
|
9 |
+
}: {
|
10 |
+
url: string
|
11 |
+
apiKey?: string
|
12 |
+
}): Promise<GradioApiInfo> {
|
13 |
+
const { ownerAndId } = parseHuggingFaceHubId(url, "spaces")
|
14 |
+
|
15 |
+
const app = await Client.connect(ownerAndId, {
|
16 |
+
hf_token: apiKey as any
|
17 |
+
})
|
18 |
+
const apiInfo: GradioApiInfo = await app.view_api()
|
19 |
+
return apiInfo
|
20 |
+
}
|
src/lib/hf/getSpaceStatus.ts
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { HFSpaceStatus } from "./types"
|
2 |
+
import { parseHuggingFaceHubId } from "./parseHuggingFaceHubId"
|
3 |
+
|
4 |
+
|
5 |
+
export async function getSpaceStatus({
|
6 |
+
id,
|
7 |
+
apiKey,
|
8 |
+
}: {
|
9 |
+
id: String
|
10 |
+
apiKey: string
|
11 |
+
}): Promise<HFSpaceStatus> {
|
12 |
+
|
13 |
+
const { category, ownerAndId } = parseHuggingFaceHubId(id)
|
14 |
+
if (category !== "spaces") {
|
15 |
+
throw new Error(`cannot get the running status of ${category} "${ownerAndId}": this is not a space!`)
|
16 |
+
}
|
17 |
+
const res = await fetch(`https://huggingface.co/api/spaces/${ownerAndId}`, {
|
18 |
+
method: "GET",
|
19 |
+
headers: {
|
20 |
+
Authorization: `Bearer ${apiKey}`
|
21 |
+
}
|
22 |
+
})
|
23 |
+
|
24 |
+
if (res.status !== 200) {
|
25 |
+
throw new Error("failed to get the space data")
|
26 |
+
}
|
27 |
+
|
28 |
+
try {
|
29 |
+
const data = await res.json() as HFSpaceStatus
|
30 |
+
return data
|
31 |
+
} catch (err) {
|
32 |
+
throw new Error(`failed to parse space data: ${err}`)
|
33 |
+
}
|
34 |
+
}
|
src/lib/hf/{getMyGradioSpaces.ts → getSpaces.ts}
RENAMED
@@ -1,13 +1,15 @@
|
|
1 |
-
import { listSpaces, Credentials, whoAmI,
|
2 |
-
import {
|
3 |
|
4 |
-
export async function
|
5 |
-
|
|
|
6 |
}: {
|
7 |
-
|
8 |
-
|
|
|
9 |
|
10 |
-
const accessToken =
|
11 |
|
12 |
if (!accessToken) {
|
13 |
throw new Error(`cannot list spaces without a Hugging Face access token`)
|
@@ -26,9 +28,7 @@ export async function getMyGradioSpaces({
|
|
26 |
throw new Error(`cannot list spaces: ${err}`)
|
27 |
}
|
28 |
|
29 |
-
|
30 |
-
let maxNbSpaces = 10
|
31 |
-
let gradioSpaces: GradioSpace[] = []
|
32 |
|
33 |
for await (const space of listSpaces({
|
34 |
search: {
|
@@ -42,14 +42,10 @@ export async function getMyGradioSpaces({
|
|
42 |
],
|
43 |
credentials
|
44 |
})) {
|
45 |
-
if (
|
46 |
-
space.sdk !== "gradio"
|
47 |
-
) { continue }
|
48 |
-
|
49 |
-
console.log("MySpace:", gradioSpaces)
|
50 |
|
51 |
-
|
|
|
52 |
}
|
53 |
|
54 |
-
return
|
55 |
}
|
|
|
1 |
+
import { listSpaces, Credentials, whoAmI, SpaceSdk } from "@huggingface/hub"
|
2 |
+
import { HFSpace } from "./types"
|
3 |
|
4 |
+
export async function getSpaces({
|
5 |
+
apiKey,
|
6 |
+
sdk = "gradio"
|
7 |
}: {
|
8 |
+
apiKey: string
|
9 |
+
sdk?: SpaceSdk
|
10 |
+
}): Promise<HFSpace[]> {
|
11 |
|
12 |
+
const accessToken = apiKey || ""
|
13 |
|
14 |
if (!accessToken) {
|
15 |
throw new Error(`cannot list spaces without a Hugging Face access token`)
|
|
|
28 |
throw new Error(`cannot list spaces: ${err}`)
|
29 |
}
|
30 |
|
31 |
+
let results: HFSpace[] = []
|
|
|
|
|
32 |
|
33 |
for await (const space of listSpaces({
|
34 |
search: {
|
|
|
42 |
],
|
43 |
credentials
|
44 |
})) {
|
|
|
|
|
|
|
|
|
|
|
45 |
|
46 |
+
if (sdk && space.sdk != sdk) { continue }
|
47 |
+
results.push(space)
|
48 |
}
|
49 |
|
50 |
+
return results
|
51 |
}
|
src/lib/hf/parseHuggingFaceHubId.ts
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { HFHubCategory } from "./types"
|
2 |
+
|
3 |
+
export function parseHuggingFaceHubId(input?: any, defaultCategory: HFHubCategory = "models"): {
|
4 |
+
category: HFHubCategory
|
5 |
+
owner: string
|
6 |
+
id: string
|
7 |
+
categoryAndOwnerAndId: string
|
8 |
+
ownerAndId: string
|
9 |
+
} {
|
10 |
+
let inputStr = `${input || ""}`
|
11 |
+
|
12 |
+
if (inputStr.includes(".co/")) {
|
13 |
+
inputStr = inputStr.split(".co/").pop() || ""
|
14 |
+
} else if (inputStr.includes(".com/")) {
|
15 |
+
inputStr = inputStr.split(".com/").pop() || ""
|
16 |
+
}
|
17 |
+
|
18 |
+
let parts = inputStr.split("/")
|
19 |
+
if (parts.length < 2 || parts.length > 3) { throw new Error(`input seems invalid, cannot extract chunks`) }
|
20 |
+
|
21 |
+
if (parts.length === 2) {
|
22 |
+
parts = [defaultCategory, parts[0], parts[1]]
|
23 |
+
}
|
24 |
+
|
25 |
+
const [category, owner, id] = parts
|
26 |
+
|
27 |
+
|
28 |
+
return {
|
29 |
+
category: category as HFHubCategory,
|
30 |
+
owner,
|
31 |
+
id,
|
32 |
+
categoryAndOwnerAndId: `${category}/${owner}/${id}`,
|
33 |
+
ownerAndId: `${owner}/${id}`,
|
34 |
+
}
|
35 |
+
}
|
src/lib/hf/runSpace.ts
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { getSpaces } from "./getSpaces"
|
2 |
+
import { parseHuggingFaceHubId } from "./parseHuggingFaceHubId"
|
3 |
+
|
4 |
+
export async function runGradioSpace<I, O>({
|
5 |
+
url,
|
6 |
+
params,
|
7 |
+
apiKey,
|
8 |
+
sleepAfter = "hour"
|
9 |
+
}: {
|
10 |
+
url: string
|
11 |
+
params?: I
|
12 |
+
apiKey: string
|
13 |
+
sleepAfter?: "hour" | "day"
|
14 |
+
}): Promise<O> {
|
15 |
+
const { id } = parseHuggingFaceHubId(url)
|
16 |
+
|
17 |
+
let gradioSpaces = await getSpaces({ apiKey, sdk: "gradio" })
|
18 |
+
|
19 |
+
if (gradioSpaces.find(s => s.name === id)) {
|
20 |
+
console.log("runGradioSpace: good, we already have cloned the space")
|
21 |
+
} else {
|
22 |
+
console.log("runGradioSpace: hm, we need to clone the space")
|
23 |
+
console.log("runGradioSpace: we might want to ask the user for confirmation here")
|
24 |
+
|
25 |
+
}
|
26 |
+
return {} as O
|
27 |
+
}
|
src/lib/hf/types.ts
CHANGED
@@ -1,5 +1,65 @@
|
|
1 |
-
import { SpaceEntry} from "@huggingface/hub"
|
2 |
import { ApiSpaceInfo } from "@huggingface/hub/dist/src/types/api/api-space"
|
|
|
3 |
|
4 |
-
export type
|
5 |
-
SpaceEntry & Pick<ApiSpaceInfo, "cardData" | "runtime" | "tags" | "models">
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { SpaceEntry, SpaceRuntime } from "@huggingface/hub"
|
2 |
import { ApiSpaceInfo } from "@huggingface/hub/dist/src/types/api/api-space"
|
3 |
+
import { ApiInfo, EndpointInfo, JsApiData } from "@gradio/client/dist/types"
|
4 |
|
5 |
+
export type HFSpace =
|
6 |
+
SpaceEntry & Pick<ApiSpaceInfo, "cardData" | "runtime" | "tags" | "models">
|
7 |
+
|
8 |
+
export interface HFSpaceStatus {
|
9 |
+
_id: string
|
10 |
+
id: string
|
11 |
+
author: string
|
12 |
+
sha: string
|
13 |
+
lastModified: string
|
14 |
+
private: boolean
|
15 |
+
gated: boolean
|
16 |
+
disabled: boolean
|
17 |
+
host: string
|
18 |
+
subdomain: string
|
19 |
+
tags: string[]
|
20 |
+
likes: number
|
21 |
+
sdk: string
|
22 |
+
runtime: SpaceRuntime
|
23 |
+
createdAt: string
|
24 |
+
}
|
25 |
+
|
26 |
+
export type HFHubCategory = "spaces" | "models"
|
27 |
+
|
28 |
+
export type GradioApiInfo = ApiInfo<JsApiData>
|
29 |
+
|
30 |
+
export type SupportedFields = {
|
31 |
+
inputPositiveTextPrompt: string
|
32 |
+
hasPositiveTextPrompt: boolean
|
33 |
+
|
34 |
+
inputNegativeTextPrompt: string
|
35 |
+
hasNegativeTextPrompt: boolean
|
36 |
+
|
37 |
+
inputImage: string
|
38 |
+
hasInputImage: boolean
|
39 |
+
|
40 |
+
inputAudio: string
|
41 |
+
hasInputAudio: boolean
|
42 |
+
|
43 |
+
inputWidth: string | number
|
44 |
+
hasInputWidth: boolean
|
45 |
+
|
46 |
+
inputHeight: string | number
|
47 |
+
hasInputHeight: boolean
|
48 |
+
|
49 |
+
inputSteps: string | number
|
50 |
+
hasInputSteps: boolean
|
51 |
+
|
52 |
+
inputGuidance: string | number
|
53 |
+
hasInputGuidance: boolean
|
54 |
+
|
55 |
+
inputSeed: string | number
|
56 |
+
hasInputSeed: boolean
|
57 |
+
}
|
58 |
+
|
59 |
+
export type GradioEndpoint = {
|
60 |
+
isNamed: boolean
|
61 |
+
name: string
|
62 |
+
endpoint: EndpointInfo<JsApiData>
|
63 |
+
fields: Record<string, Partial<SupportedFields>>
|
64 |
+
score: number
|
65 |
+
}
|
src/lib/hf/useMyGradioSpaces.ts
CHANGED
@@ -2,18 +2,18 @@ import { useEffect, useState } from "react"
|
|
2 |
|
3 |
import { useSettings } from "@/controllers/settings"
|
4 |
|
5 |
-
import {
|
6 |
-
import {
|
7 |
|
8 |
-
export function useMyGradioSpaces() {
|
9 |
-
const [gradioSpaces, setGradioSpaces] = useState<
|
10 |
|
11 |
const huggingFaceApiKey = useSettings(s => s.huggingFaceApiKey)
|
12 |
|
13 |
useEffect(() => {
|
14 |
const fn = async () => {
|
15 |
try {
|
16 |
-
const newSpaces = await
|
17 |
setGradioSpaces(newSpaces)
|
18 |
} catch (err) {
|
19 |
console.error(`failed to load the Gradio spaces (most likely your HF token is invalid)`, err)
|
|
|
2 |
|
3 |
import { useSettings } from "@/controllers/settings"
|
4 |
|
5 |
+
import { getSpaces } from "./getSpaces"
|
6 |
+
import { HFSpace } from "./types"
|
7 |
|
8 |
+
export function useMyGradioSpaces(): HFSpace[] {
|
9 |
+
const [gradioSpaces, setGradioSpaces] = useState<HFSpace[]>([])
|
10 |
|
11 |
const huggingFaceApiKey = useSettings(s => s.huggingFaceApiKey)
|
12 |
|
13 |
useEffect(() => {
|
14 |
const fn = async () => {
|
15 |
try {
|
16 |
+
const newSpaces = await getSpaces({ apiKey: huggingFaceApiKey, sdk: "gradio" })
|
17 |
setGradioSpaces(newSpaces)
|
18 |
} catch (err) {
|
19 |
console.error(`failed to load the Gradio spaces (most likely your HF token is invalid)`, err)
|
src/lib/utils/getResolveRequestPrompts.ts
DELETED
@@ -1,67 +0,0 @@
|
|
1 |
-
import { ClapEntity, ClapSegment, ClapSegmentCategory } from "@aitube/clap"
|
2 |
-
import { getVideoPrompt } from "@aitube/engine"
|
3 |
-
|
4 |
-
import { SettingsState } from "@/controllers/settings"
|
5 |
-
|
6 |
-
export function getResolveRequestPrompts({
|
7 |
-
settings,
|
8 |
-
segment,
|
9 |
-
segments,
|
10 |
-
entities,
|
11 |
-
}: {
|
12 |
-
settings: SettingsState
|
13 |
-
segment: ClapSegment
|
14 |
-
segments: ClapSegment[]
|
15 |
-
entities: Record<string, ClapEntity>
|
16 |
-
}): {
|
17 |
-
positivePrompt: string
|
18 |
-
negativePrompt: string
|
19 |
-
} {
|
20 |
-
|
21 |
-
const videoPrompt = getVideoPrompt(
|
22 |
-
segments,
|
23 |
-
entities
|
24 |
-
)
|
25 |
-
|
26 |
-
const positivePrompt = [
|
27 |
-
segment.category === ClapSegmentCategory.VIDEO
|
28 |
-
? settings.videoPromptPrefix
|
29 |
-
: segment.category === ClapSegmentCategory.STORYBOARD
|
30 |
-
? settings.imagePromptPrefix
|
31 |
-
: "",
|
32 |
-
videoPrompt,
|
33 |
-
segment.category === ClapSegmentCategory.VIDEO
|
34 |
-
? settings.videoPromptSuffix
|
35 |
-
: segment.category === ClapSegmentCategory.STORYBOARD
|
36 |
-
? settings.imagePromptSuffix
|
37 |
-
: ""
|
38 |
-
].map(x => x.trim()).filter(x => x).join(", ")
|
39 |
-
|
40 |
-
const negativePrompt = [
|
41 |
-
segment.category === ClapSegmentCategory.VIDEO
|
42 |
-
? settings.videoNegativePrompt
|
43 |
-
: segment.category === ClapSegmentCategory.STORYBOARD
|
44 |
-
? settings.imageNegativePrompt
|
45 |
-
: ""
|
46 |
-
].map(x => x.trim()).filter(x => x).join(", ")
|
47 |
-
|
48 |
-
/*
|
49 |
-
console.log(`getResolveRequestPrompts:`, {
|
50 |
-
segments: segments.map(s => ({
|
51 |
-
...s,
|
52 |
-
assetUrl: "<HIDDEN>"
|
53 |
-
})),
|
54 |
-
videoPrompt,
|
55 |
-
positivePrompt,
|
56 |
-
negativePrompt,
|
57 |
-
// entity: entities['6c1e99b5-02af-47fc-8e6a-0a5bcb9ee8b1'],
|
58 |
-
})
|
59 |
-
throw new Error("uh uh")
|
60 |
-
*/
|
61 |
-
|
62 |
-
|
63 |
-
return {
|
64 |
-
positivePrompt,
|
65 |
-
negativePrompt
|
66 |
-
}
|
67 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/lib/utils/getTypeAndExtension.ts
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import { ClapOutputType } from "@aitube/clap"
|
2 |
+
|
3 |
+
/**
|
4 |
+
* break a base64 string into sub-components
|
5 |
+
*/
|
6 |
+
export function getTypeAndExtension(base64: string = ""): {
|
7 |
+
|
8 |
+
// category eg. video, audio, text
|
9 |
+
category: string
|
10 |
+
|
11 |
+
// file format eg. video/mp4 text/html audio/wave
|
12 |
+
assetFileFormat: string
|
13 |
+
|
14 |
+
// file extension eg. .mp4 .html .wav
|
15 |
+
extension: string
|
16 |
+
|
17 |
+
outputType: ClapOutputType
|
18 |
+
} {
|
19 |
+
// Regular expression to extract the MIME type and the base64 data
|
20 |
+
const matches = base64.match(/^data:([A-Za-z-+0-9/]+);base64,(.+)$/)
|
21 |
+
|
22 |
+
if (!matches || matches.length !== 3) {
|
23 |
+
throw new Error("Invalid base64 string")
|
24 |
+
}
|
25 |
+
|
26 |
+
const assetFileFormat = matches[1] || ""
|
27 |
+
|
28 |
+
// this should be enough for most media formats (jpeg, png, webp, mp4)
|
29 |
+
const [category, extension] = assetFileFormat.split("/")
|
30 |
+
|
31 |
+
let outputType = ClapOutputType.TEXT
|
32 |
+
|
33 |
+
if (category === "audio") {
|
34 |
+
outputType = ClapOutputType.AUDIO
|
35 |
+
} else if (category === "image") {
|
36 |
+
outputType = ClapOutputType.IMAGE
|
37 |
+
} else if (category === "video") {
|
38 |
+
outputType = ClapOutputType.VIDEO
|
39 |
+
}
|
40 |
+
|
41 |
+
return {
|
42 |
+
category,
|
43 |
+
assetFileFormat,
|
44 |
+
extension,
|
45 |
+
outputType,
|
46 |
+
}
|
47 |
+
}
|