sidthegirlkid commited on
Commit
3464d09
1 Parent(s): 259a258

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. LLaMa_2_Prompting_Guide_with_Gradio.ipynb +0 -0
  2. node_modules/.bin/node-gyp-build +3 -0
  3. node_modules/.bin/node-gyp-build-optional +3 -0
  4. node_modules/.bin/node-gyp-build-test +3 -0
  5. node_modules/.package-lock.json +75 -0
  6. node_modules/@gradio/client/CHANGELOG.md +251 -0
  7. node_modules/@gradio/client/LICENSE +201 -0
  8. node_modules/@gradio/client/README.md +339 -0
  9. node_modules/@gradio/client/dist/client.d.ts +74 -0
  10. node_modules/@gradio/client/dist/client.d.ts.map +1 -0
  11. node_modules/@gradio/client/dist/index.d.ts +4 -0
  12. node_modules/@gradio/client/dist/index.d.ts.map +1 -0
  13. node_modules/@gradio/client/dist/index.js +1598 -0
  14. node_modules/@gradio/client/dist/types.d.ts +105 -0
  15. node_modules/@gradio/client/dist/types.d.ts.map +1 -0
  16. node_modules/@gradio/client/dist/upload.d.ts +25 -0
  17. node_modules/@gradio/client/dist/upload.d.ts.map +1 -0
  18. node_modules/@gradio/client/dist/utils.d.ts +33 -0
  19. node_modules/@gradio/client/dist/utils.d.ts.map +1 -0
  20. node_modules/@gradio/client/dist/wrapper-6f348d45.js +0 -0
  21. node_modules/@gradio/client/package.json +33 -0
  22. node_modules/@gradio/client/src/client.node-test.ts +172 -0
  23. node_modules/@gradio/client/src/client.ts +1702 -0
  24. node_modules/@gradio/client/src/globals.d.ts +29 -0
  25. node_modules/@gradio/client/src/index.ts +14 -0
  26. node_modules/@gradio/client/src/types.ts +119 -0
  27. node_modules/@gradio/client/src/upload.ts +117 -0
  28. node_modules/@gradio/client/src/utils.ts +300 -0
  29. node_modules/@gradio/client/tsconfig.json +14 -0
  30. node_modules/@gradio/client/vite.config.js +38 -0
  31. node_modules/bufferutil/LICENSE +20 -0
  32. node_modules/bufferutil/README.md +78 -0
  33. node_modules/bufferutil/binding.gyp +29 -0
  34. node_modules/bufferutil/fallback.js +34 -0
  35. node_modules/bufferutil/index.js +7 -0
  36. node_modules/bufferutil/package.json +36 -0
  37. node_modules/bufferutil/prebuilds/darwin-x64+arm64/node.napi.node +0 -0
  38. node_modules/bufferutil/prebuilds/linux-x64/node.napi.node +0 -0
  39. node_modules/bufferutil/prebuilds/win32-ia32/node.napi.node +0 -0
  40. node_modules/bufferutil/prebuilds/win32-x64/node.napi.node +0 -0
  41. node_modules/bufferutil/src/bufferutil.c +171 -0
  42. node_modules/node-gyp-build/LICENSE +21 -0
  43. node_modules/node-gyp-build/README.md +58 -0
  44. node_modules/node-gyp-build/bin.js +78 -0
  45. node_modules/node-gyp-build/build-test.js +19 -0
  46. node_modules/node-gyp-build/index.js +6 -0
  47. node_modules/node-gyp-build/node-gyp-build.js +207 -0
  48. node_modules/node-gyp-build/optional.js +7 -0
  49. node_modules/node-gyp-build/package.json +29 -0
  50. node_modules/semiver/dist/semiver.js +12 -0
LLaMa_2_Prompting_Guide_with_Gradio.ipynb CHANGED
The diff for this file is too large to render. See raw diff
 
node_modules/.bin/node-gyp-build ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a5b3c80f8428029f0d705edaa981ead4a0d00fde4f8562f2115cd025744db2e
3
+ size 2035
node_modules/.bin/node-gyp-build-optional ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e0b3a3a04166e6ecf1020cb31c0c4a54432c16d6d88714bd4de2214cf67dec81
3
+ size 143
node_modules/.bin/node-gyp-build-test ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34c19ff8b6675d6d27c63a7df44d77a442805eeea8756d1c89e0264f4a3028f6
3
+ size 398
node_modules/.package-lock.json ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "ngi-webathon",
3
+ "lockfileVersion": 3,
4
+ "requires": true,
5
+ "packages": {
6
+ "node_modules/@gradio/client": {
7
+ "version": "0.12.0",
8
+ "resolved": "https://registry.npmjs.org/@gradio/client/-/client-0.12.0.tgz",
9
+ "integrity": "sha512-GcwedS4JPu6W/8d8tnH8bS3wZAcLFKXqXh1ziCChx0w/o/cgS0+iuA4gLy7FltiZ1Ufi9+OVp1z/hcXaTnNwBQ==",
10
+ "dev": true,
11
+ "dependencies": {
12
+ "bufferutil": "^4.0.7",
13
+ "semiver": "^1.1.0",
14
+ "ws": "^8.13.0"
15
+ },
16
+ "engines": {
17
+ "node": ">=18.0.0"
18
+ }
19
+ },
20
+ "node_modules/bufferutil": {
21
+ "version": "4.0.8",
22
+ "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.8.tgz",
23
+ "integrity": "sha512-4T53u4PdgsXqKaIctwF8ifXlRTTmEPJ8iEPWFdGZvcf7sbwYo6FKFEX9eNNAnzFZ7EzJAQ3CJeOtCRA4rDp7Pw==",
24
+ "dev": true,
25
+ "hasInstallScript": true,
26
+ "dependencies": {
27
+ "node-gyp-build": "^4.3.0"
28
+ },
29
+ "engines": {
30
+ "node": ">=6.14.2"
31
+ }
32
+ },
33
+ "node_modules/node-gyp-build": {
34
+ "version": "4.8.0",
35
+ "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.0.tgz",
36
+ "integrity": "sha512-u6fs2AEUljNho3EYTJNBfImO5QTo/J/1Etd+NVdCj7qWKUSN/bSLkZwhDv7I+w/MSC6qJ4cknepkAYykDdK8og==",
37
+ "dev": true,
38
+ "bin": {
39
+ "node-gyp-build": "bin.js",
40
+ "node-gyp-build-optional": "optional.js",
41
+ "node-gyp-build-test": "build-test.js"
42
+ }
43
+ },
44
+ "node_modules/semiver": {
45
+ "version": "1.1.0",
46
+ "resolved": "https://registry.npmjs.org/semiver/-/semiver-1.1.0.tgz",
47
+ "integrity": "sha512-QNI2ChmuioGC1/xjyYwyZYADILWyW6AmS1UH6gDj/SFUUUS4MBAWs/7mxnkRPc/F4iHezDP+O8t0dO8WHiEOdg==",
48
+ "dev": true,
49
+ "engines": {
50
+ "node": ">=6"
51
+ }
52
+ },
53
+ "node_modules/ws": {
54
+ "version": "8.16.0",
55
+ "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz",
56
+ "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==",
57
+ "dev": true,
58
+ "engines": {
59
+ "node": ">=10.0.0"
60
+ },
61
+ "peerDependencies": {
62
+ "bufferutil": "^4.0.1",
63
+ "utf-8-validate": ">=5.0.2"
64
+ },
65
+ "peerDependenciesMeta": {
66
+ "bufferutil": {
67
+ "optional": true
68
+ },
69
+ "utf-8-validate": {
70
+ "optional": true
71
+ }
72
+ }
73
+ }
74
+ }
75
+ }
node_modules/@gradio/client/CHANGELOG.md ADDED
@@ -0,0 +1,251 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # @gradio/client
2
+
3
+ ## 0.12.0
4
+
5
+ ### Features
6
+
7
+ - [#7183](https://github.com/gradio-app/gradio/pull/7183) [`49d9c48`](https://github.com/gradio-app/gradio/commit/49d9c48537aa706bf72628e3640389470138bdc6) - [WIP] Refactor file normalization to be in the backend and remove it from the frontend of each component. Thanks [@abidlabs](https://github.com/abidlabs)!
8
+
9
+ ## 0.11.0
10
+
11
+ ### Features
12
+
13
+ - [#7102](https://github.com/gradio-app/gradio/pull/7102) [`68a54a7`](https://github.com/gradio-app/gradio/commit/68a54a7a310d8d7072fdae930bf1cfdf12c45a7f) - Improve chatbot streaming performance with diffs. Thanks [@aliabid94](https://github.com/aliabid94)!/n Note that this PR changes the API format for generator functions, which would be a breaking change for any clients reading the EventStream directly
14
+
15
+ ## 0.10.1
16
+
17
+ ### Fixes
18
+
19
+ - [#7055](https://github.com/gradio-app/gradio/pull/7055) [`3c3cf86`](https://github.com/gradio-app/gradio/commit/3c3cf8618a8cad1ef66a7f96664923d2c9f5e0e2) - Fix UI freeze on rapid generators. Thanks [@aliabid94](https://github.com/aliabid94)!
20
+
21
+ ## 0.10.0
22
+
23
+ ### Features
24
+
25
+ - [#6931](https://github.com/gradio-app/gradio/pull/6931) [`6c863af`](https://github.com/gradio-app/gradio/commit/6c863af92fa9ceb5c638857eb22cc5ddb718d549) - Fix functional tests. Thanks [@aliabid94](https://github.com/aliabid94)!
26
+ - [#6820](https://github.com/gradio-app/gradio/pull/6820) [`649cd4d`](https://github.com/gradio-app/gradio/commit/649cd4d68041d11fcbe31f8efa455345ac49fc74) - Use `EventSource_factory` in `open_stream()` for Wasm. Thanks [@whitphx](https://github.com/whitphx)!
27
+
28
+ ## 0.9.4
29
+
30
+ ### Fixes
31
+
32
+ - [#6863](https://github.com/gradio-app/gradio/pull/6863) [`d406855`](https://github.com/gradio-app/gradio/commit/d4068557953746662235d595ec435c42ceb24414) - Fix JS Client when app is running behind a proxy. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
33
+
34
+ ## 0.9.3
35
+
36
+ ### Features
37
+
38
+ - [#6814](https://github.com/gradio-app/gradio/pull/6814) [`828fb9e`](https://github.com/gradio-app/gradio/commit/828fb9e6ce15b6ea08318675a2361117596a1b5d) - Refactor queue so that there are separate queues for each concurrency id. Thanks [@aliabid94](https://github.com/aliabid94)!
39
+
40
+ ## 0.9.2
41
+
42
+ ### Features
43
+
44
+ - [#6798](https://github.com/gradio-app/gradio/pull/6798) [`245d58e`](https://github.com/gradio-app/gradio/commit/245d58eff788e8d44a59d37a2d9b26d0f08a62b4) - Improve how server/js client handle unexpected errors. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
45
+
46
+ ## 0.9.1
47
+
48
+ ### Fixes
49
+
50
+ - [#6693](https://github.com/gradio-app/gradio/pull/6693) [`34f9431`](https://github.com/gradio-app/gradio/commit/34f943101bf7dd6b8a8974a6131c1ed7c4a0dac0) - Python client properly handles hearbeat and log messages. Also handles responses longer than 65k. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
51
+
52
+ ## 0.9.0
53
+
54
+ ### Features
55
+
56
+ - [#6398](https://github.com/gradio-app/gradio/pull/6398) [`67ddd40`](https://github.com/gradio-app/gradio/commit/67ddd40b4b70d3a37cb1637c33620f8d197dbee0) - Lite v4. Thanks [@whitphx](https://github.com/whitphx)!
57
+
58
+ ### Fixes
59
+
60
+ - [#6556](https://github.com/gradio-app/gradio/pull/6556) [`d76bcaa`](https://github.com/gradio-app/gradio/commit/d76bcaaaf0734aaf49a680f94ea9d4d22a602e70) - Fix api event drops. Thanks [@aliabid94](https://github.com/aliabid94)!
61
+
62
+ ## 0.8.2
63
+
64
+ ### Features
65
+
66
+ - [#6511](https://github.com/gradio-app/gradio/pull/6511) [`71f1a1f99`](https://github.com/gradio-app/gradio/commit/71f1a1f9931489d465c2c1302a5c8d768a3cd23a) - Mark `FileData.orig_name` optional on the frontend aligning the type definition on the Python side. Thanks [@whitphx](https://github.com/whitphx)!
67
+
68
+ ## 0.8.1
69
+
70
+ ### Fixes
71
+
72
+ - [#6383](https://github.com/gradio-app/gradio/pull/6383) [`324867f63`](https://github.com/gradio-app/gradio/commit/324867f63c920113d89a565892aa596cf8b1e486) - Fix event target. Thanks [@aliabid94](https://github.com/aliabid94)!
73
+
74
+ ## 0.8.0
75
+
76
+ ### Features
77
+
78
+ - [#6307](https://github.com/gradio-app/gradio/pull/6307) [`f1409f95e`](https://github.com/gradio-app/gradio/commit/f1409f95ed39c5565bed6a601e41f94e30196a57) - Provide status updates on file uploads. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
79
+
80
+ ## 0.7.2
81
+
82
+ ### Fixes
83
+
84
+ - [#6327](https://github.com/gradio-app/gradio/pull/6327) [`bca6c2c80`](https://github.com/gradio-app/gradio/commit/bca6c2c80f7e5062427019de45c282238388af95) - Restore query parameters in request. Thanks [@aliabid94](https://github.com/aliabid94)!
85
+
86
+ ## 0.7.1
87
+
88
+ ### Features
89
+
90
+ - [#6137](https://github.com/gradio-app/gradio/pull/6137) [`2ba14b284`](https://github.com/gradio-app/gradio/commit/2ba14b284f908aa13859f4337167a157075a68eb) - JS Param. Thanks [@dawoodkhan82](https://github.com/dawoodkhan82)!
91
+
92
+ ## 0.7.0
93
+
94
+ ### Features
95
+
96
+ - [#5498](https://github.com/gradio-app/gradio/pull/5498) [`287fe6782`](https://github.com/gradio-app/gradio/commit/287fe6782825479513e79a5cf0ba0fbfe51443d7) - fix circular dependency with client + upload. Thanks [@pngwn](https://github.com/pngwn)!
97
+ - [#5498](https://github.com/gradio-app/gradio/pull/5498) [`287fe6782`](https://github.com/gradio-app/gradio/commit/287fe6782825479513e79a5cf0ba0fbfe51443d7) - Image v4. Thanks [@pngwn](https://github.com/pngwn)!
98
+ - [#5498](https://github.com/gradio-app/gradio/pull/5498) [`287fe6782`](https://github.com/gradio-app/gradio/commit/287fe6782825479513e79a5cf0ba0fbfe51443d7) - Swap websockets for SSE. Thanks [@pngwn](https://github.com/pngwn)!
99
+
100
+ ## 0.7.0-beta.1
101
+
102
+ ### Features
103
+
104
+ - [#6143](https://github.com/gradio-app/gradio/pull/6143) [`e4f7b4b40`](https://github.com/gradio-app/gradio/commit/e4f7b4b409323b01aa01b39e15ce6139e29aa073) - fix circular dependency with client + upload. Thanks [@pngwn](https://github.com/pngwn)!
105
+ - [#6094](https://github.com/gradio-app/gradio/pull/6094) [`c476bd5a5`](https://github.com/gradio-app/gradio/commit/c476bd5a5b70836163b9c69bf4bfe068b17fbe13) - Image v4. Thanks [@pngwn](https://github.com/pngwn)!
106
+ - [#6069](https://github.com/gradio-app/gradio/pull/6069) [`bf127e124`](https://github.com/gradio-app/gradio/commit/bf127e1241a41401e144874ea468dff8474eb505) - Swap websockets for SSE. Thanks [@aliabid94](https://github.com/aliabid94)!
107
+
108
+ ## 0.7.0-beta.0
109
+
110
+ ### Features
111
+
112
+ - [#6016](https://github.com/gradio-app/gradio/pull/6016) [`83e947676`](https://github.com/gradio-app/gradio/commit/83e947676d327ca2ab6ae2a2d710c78961c771a0) - Format js in v4 branch. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
113
+
114
+ ### Fixes
115
+
116
+ - [#6046](https://github.com/gradio-app/gradio/pull/6046) [`dbb7de5e0`](https://github.com/gradio-app/gradio/commit/dbb7de5e02c53fee05889d696d764d212cb96c74) - fix tests. Thanks [@pngwn](https://github.com/pngwn)!
117
+
118
+ ## 0.6.0
119
+
120
+ ### Features
121
+
122
+ - [#5972](https://github.com/gradio-app/gradio/pull/5972) [`11a300791`](https://github.com/gradio-app/gradio/commit/11a3007916071f0791844b0a37f0fb4cec69cea3) - Lite: Support opening the entrypoint HTML page directly in browser via the `file:` protocol. Thanks [@whitphx](https://github.com/whitphx)!
123
+
124
+ ## 0.5.2
125
+
126
+ ### Fixes
127
+
128
+ - [#5840](https://github.com/gradio-app/gradio/pull/5840) [`4e62b8493`](https://github.com/gradio-app/gradio/commit/4e62b8493dfce50bafafe49f1a5deb929d822103) - Ensure websocket polyfill doesn't load if there is already a `global.Webocket` property set. Thanks [@Jay2theWhy](https://github.com/Jay2theWhy)!
129
+
130
+ ## 0.5.1
131
+
132
+ ### Fixes
133
+
134
+ - [#5816](https://github.com/gradio-app/gradio/pull/5816) [`796145e2c`](https://github.com/gradio-app/gradio/commit/796145e2c48c4087bec17f8ec0be4ceee47170cb) - Fix calls to the component server so that `gr.FileExplorer` works on Spaces. Thanks [@abidlabs](https://github.com/abidlabs)!
135
+
136
+ ## 0.5.0
137
+
138
+ ### Highlights
139
+
140
+ #### new `FileExplorer` component ([#5672](https://github.com/gradio-app/gradio/pull/5672) [`e4a307ed6`](https://github.com/gradio-app/gradio/commit/e4a307ed6cde3bbdf4ff2f17655739addeec941e))
141
+
142
+ Thanks to a new capability that allows components to communicate directly with the server _without_ passing data via the value, we have created a new `FileExplorer` component.
143
+
144
+ This component allows you to populate the explorer by passing a glob, but only provides the selected file(s) in your prediction function.
145
+
146
+ Users can then navigate the virtual filesystem and select files which will be accessible in your predict function. This component will allow developers to build more complex spaces, with more flexible input options.
147
+
148
+ ![output](https://github.com/pngwn/MDsveX/assets/12937446/ef108f0b-0e84-4292-9984-9dc66b3e144d)
149
+
150
+ For more information check the [`FileExplorer` documentation](https://gradio.app/docs/fileexplorer).
151
+
152
+ Thanks [@aliabid94](https://github.com/aliabid94)!
153
+
154
+ ### Features
155
+
156
+ - [#5787](https://github.com/gradio-app/gradio/pull/5787) [`caeee8bf7`](https://github.com/gradio-app/gradio/commit/caeee8bf7821fd5fe2f936ed82483bed00f613ec) - ensure the client does not depend on `window` when running in a node environment. Thanks [@gibiee](https://github.com/gibiee)!
157
+
158
+ ### Fixes
159
+
160
+ - [#5776](https://github.com/gradio-app/gradio/pull/5776) [`c0fef4454`](https://github.com/gradio-app/gradio/commit/c0fef44541bfa61568bdcfcdfc7d7d79869ab1df) - Revert replica proxy logic and instead implement using the `root` variable. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
161
+
162
+ ## 0.4.2
163
+
164
+ ### Features
165
+
166
+ - [#5124](https://github.com/gradio-app/gradio/pull/5124) [`6e56a0d9b`](https://github.com/gradio-app/gradio/commit/6e56a0d9b0c863e76c69e1183d9d40196922b4cd) - Lite: Websocket queueing. Thanks [@whitphx](https://github.com/whitphx)!
167
+
168
+ ## 0.4.1
169
+
170
+ ### Fixes
171
+
172
+ - [#5705](https://github.com/gradio-app/gradio/pull/5705) [`78e7cf516`](https://github.com/gradio-app/gradio/commit/78e7cf5163e8d205e8999428fce4c02dbdece25f) - ensure internal data has updated before dispatching `success` or `then` events. Thanks [@pngwn](https://github.com/pngwn)!
173
+
174
+ ## 0.4.0
175
+
176
+ ### Features
177
+
178
+ - [#5682](https://github.com/gradio-app/gradio/pull/5682) [`c57f1b75e`](https://github.com/gradio-app/gradio/commit/c57f1b75e272c76b0af4d6bd0c7f44743ff34f26) - Fix functional tests. Thanks [@abidlabs](https://github.com/abidlabs)!
179
+ - [#5681](https://github.com/gradio-app/gradio/pull/5681) [`40de3d217`](https://github.com/gradio-app/gradio/commit/40de3d2178b61ebe424b6f6228f94c0c6f679bea) - add query parameters to the `gr.Request` object through the `query_params` attribute. Thanks [@DarhkVoyd](https://github.com/DarhkVoyd)!
180
+ - [#5653](https://github.com/gradio-app/gradio/pull/5653) [`ea0e00b20`](https://github.com/gradio-app/gradio/commit/ea0e00b207b4b90a10e9d054c4202d4e705a29ba) - Prevent Clients from accessing API endpoints that set `api_name=False`. Thanks [@abidlabs](https://github.com/abidlabs)!
181
+
182
+ ## 0.3.1
183
+
184
+ ### Fixes
185
+
186
+ - [#5412](https://github.com/gradio-app/gradio/pull/5412) [`26fef8c7`](https://github.com/gradio-app/gradio/commit/26fef8c7f85a006c7e25cdbed1792df19c512d02) - Skip view_api request in js client when auth enabled. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
187
+
188
+ ## 0.3.0
189
+
190
+ ### Features
191
+
192
+ - [#5267](https://github.com/gradio-app/gradio/pull/5267) [`119c8343`](https://github.com/gradio-app/gradio/commit/119c834331bfae60d4742c8f20e9cdecdd67e8c2) - Faster reload mode. Thanks [@freddyaboulton](https://github.com/freddyaboulton)!
193
+
194
+ ## 0.2.1
195
+
196
+ ### Features
197
+
198
+ - [#5173](https://github.com/gradio-app/gradio/pull/5173) [`730f0c1d`](https://github.com/gradio-app/gradio/commit/730f0c1d54792eb11359e40c9f2326e8a6e39203) - Ensure gradio client works as expected for functions that return nothing. Thanks [@raymondtri](https://github.com/raymondtri)!
199
+
200
+ ## 0.2.0
201
+
202
+ ### Features
203
+
204
+ - [#5133](https://github.com/gradio-app/gradio/pull/5133) [`61129052`](https://github.com/gradio-app/gradio/commit/61129052ed1391a75c825c891d57fa0ad6c09fc8) - Update dependency esbuild to ^0.19.0. Thanks [@renovate](https://github.com/apps/renovate)!
205
+ - [#5035](https://github.com/gradio-app/gradio/pull/5035) [`8b4eb8ca`](https://github.com/gradio-app/gradio/commit/8b4eb8cac9ea07bde31b44e2006ca2b7b5f4de36) - JS Client: Fixes cannot read properties of null (reading 'is_file'). Thanks [@raymondtri](https://github.com/raymondtri)!
206
+
207
+ ### Fixes
208
+
209
+ - [#5075](https://github.com/gradio-app/gradio/pull/5075) [`67265a58`](https://github.com/gradio-app/gradio/commit/67265a58027ef1f9e4c0eb849a532f72eaebde48) - Allow supporting >1000 files in `gr.File()` and `gr.UploadButton()`. Thanks [@abidlabs](https://github.com/abidlabs)!
210
+
211
+ ## 0.1.4
212
+
213
+ ### Patch Changes
214
+
215
+ - [#4717](https://github.com/gradio-app/gradio/pull/4717) [`ab5d1ea0`](https://github.com/gradio-app/gradio/commit/ab5d1ea0de87ed888779b66fd2a705583bd29e02) Thanks [@whitphx](https://github.com/whitphx)! - Fix the package description
216
+
217
+ ## 0.1.3
218
+
219
+ ### Patch Changes
220
+
221
+ - [#4357](https://github.com/gradio-app/gradio/pull/4357) [`0dbd8f7f`](https://github.com/gradio-app/gradio/commit/0dbd8f7fee4b4877f783fa7bc493f98bbfc3d01d) Thanks [@pngwn](https://github.com/pngwn)! - Various internal refactors and cleanups.
222
+
223
+ ## 0.1.2
224
+
225
+ ### Patch Changes
226
+
227
+ - [#4273](https://github.com/gradio-app/gradio/pull/4273) [`1d0f0a9d`](https://github.com/gradio-app/gradio/commit/1d0f0a9db096552e67eb2197c932342587e9e61e) Thanks [@pngwn](https://github.com/pngwn)! - Ensure websocket error messages are correctly handled.
228
+
229
+ - [#4315](https://github.com/gradio-app/gradio/pull/4315) [`b525b122`](https://github.com/gradio-app/gradio/commit/b525b122dd8569bbaf7e06db5b90d622d2e9073d) Thanks [@whitphx](https://github.com/whitphx)! - Refacor types.
230
+
231
+ - [#4271](https://github.com/gradio-app/gradio/pull/4271) [`1151c525`](https://github.com/gradio-app/gradio/commit/1151c5253554cb87ebd4a44a8a470ac215ff782b) Thanks [@pngwn](https://github.com/pngwn)! - Ensure the full root path is always respected when making requests to a gradio app server.
232
+
233
+ ## 0.1.1
234
+
235
+ ### Patch Changes
236
+
237
+ - [#4201](https://github.com/gradio-app/gradio/pull/4201) [`da5b4ee1`](https://github.com/gradio-app/gradio/commit/da5b4ee11721175858ded96e5710225369097f74) Thanks [@pngwn](https://github.com/pngwn)! - Ensure semiver is bundled so CDN links work correctly.
238
+
239
+ - [#4202](https://github.com/gradio-app/gradio/pull/4202) [`a26e9afd`](https://github.com/gradio-app/gradio/commit/a26e9afde319382993e6ddc77cc4e56337a31248) Thanks [@pngwn](https://github.com/pngwn)! - Ensure all URLs returned by the client are complete URLs with the correct host instead of an absolute path relative to a server.
240
+
241
+ ## 0.1.0
242
+
243
+ ### Minor Changes
244
+
245
+ - [#4185](https://github.com/gradio-app/gradio/pull/4185) [`67239ca9`](https://github.com/gradio-app/gradio/commit/67239ca9b2fe3796853fbf7bf865c9e4b383200d) Thanks [@pngwn](https://github.com/pngwn)! - Update client for initial release
246
+
247
+ ### Patch Changes
248
+
249
+ - [#3692](https://github.com/gradio-app/gradio/pull/3692) [`48e8b113`](https://github.com/gradio-app/gradio/commit/48e8b113f4b55e461d9da4f153bf72aeb4adf0f1) Thanks [@pngwn](https://github.com/pngwn)! - Ensure client works in node, create ESM bundle and generate typescript declaration files.
250
+
251
+ - [#3605](https://github.com/gradio-app/gradio/pull/3605) [`ae4277a9`](https://github.com/gradio-app/gradio/commit/ae4277a9a83d49bdadfe523b0739ba988128e73b) Thanks [@pngwn](https://github.com/pngwn)! - Update readme.
node_modules/@gradio/client/LICENSE ADDED
@@ -0,0 +1,201 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ Version 2.0, January 2004
3
+ http://www.apache.org/licenses/
4
+
5
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6
+
7
+ 1. Definitions.
8
+
9
+ "License" shall mean the terms and conditions for use, reproduction,
10
+ and distribution as defined by Sections 1 through 9 of this document.
11
+
12
+ "Licensor" shall mean the copyright owner or entity authorized by
13
+ the copyright owner that is granting the License.
14
+
15
+ "Legal Entity" shall mean the union of the acting entity and all
16
+ other entities that control, are controlled by, or are under common
17
+ control with that entity. For the purposes of this definition,
18
+ "control" means (i) the power, direct or indirect, to cause the
19
+ direction or management of such entity, whether by contract or
20
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
21
+ outstanding shares, or (iii) beneficial ownership of such entity.
22
+
23
+ "You" (or "Your") shall mean an individual or Legal Entity
24
+ exercising permissions granted by this License.
25
+
26
+ "Source" form shall mean the preferred form for making modifications,
27
+ including but not limited to software source code, documentation
28
+ source, and configuration files.
29
+
30
+ "Object" form shall mean any form resulting from mechanical
31
+ transformation or translation of a Source form, including but
32
+ not limited to compiled object code, generated documentation,
33
+ and conversions to other media types.
34
+
35
+ "Work" shall mean the work of authorship, whether in Source or
36
+ Object form, made available under the License, as indicated by a
37
+ copyright notice that is included in or attached to the work
38
+ (an example is provided in the Appendix below).
39
+
40
+ "Derivative Works" shall mean any work, whether in Source or Object
41
+ form, that is based on (or derived from) the Work and for which the
42
+ editorial revisions, annotations, elaborations, or other modifications
43
+ represent, as a whole, an original work of authorship. For the purposes
44
+ of this License, Derivative Works shall not include works that remain
45
+ separable from, or merely link (or bind by name) to the interfaces of,
46
+ the Work and Derivative Works thereof.
47
+
48
+ "Contribution" shall mean any work of authorship, including
49
+ the original version of the Work and any modifications or additions
50
+ to that Work or Derivative Works thereof, that is intentionally
51
+ submitted to Licensor for inclusion in the Work by the copyright owner
52
+ or by an individual or Legal Entity authorized to submit on behalf of
53
+ the copyright owner. For the purposes of this definition, "submitted"
54
+ means any form of electronic, verbal, or written communication sent
55
+ to the Licensor or its representatives, including but not limited to
56
+ communication on electronic mailing lists, source code control systems,
57
+ and issue tracking systems that are managed by, or on behalf of, the
58
+ Licensor for the purpose of discussing and improving the Work, but
59
+ excluding communication that is conspicuously marked or otherwise
60
+ designated in writing by the copyright owner as "Not a Contribution."
61
+
62
+ "Contributor" shall mean Licensor and any individual or Legal Entity
63
+ on behalf of whom a Contribution has been received by Licensor and
64
+ subsequently incorporated within the Work.
65
+
66
+ 2. Grant of Copyright License. Subject to the terms and conditions of
67
+ this License, each Contributor hereby grants to You a perpetual,
68
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69
+ copyright license to reproduce, prepare Derivative Works of,
70
+ publicly display, publicly perform, sublicense, and distribute the
71
+ Work and such Derivative Works in Source or Object form.
72
+
73
+ 3. Grant of Patent License. Subject to the terms and conditions of
74
+ this License, each Contributor hereby grants to You a perpetual,
75
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76
+ (except as stated in this section) patent license to make, have made,
77
+ use, offer to sell, sell, import, and otherwise transfer the Work,
78
+ where such license applies only to those patent claims licensable
79
+ by such Contributor that are necessarily infringed by their
80
+ Contribution(s) alone or by combination of their Contribution(s)
81
+ with the Work to which such Contribution(s) was submitted. If You
82
+ institute patent litigation against any entity (including a
83
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
84
+ or a Contribution incorporated within the Work constitutes direct
85
+ or contributory patent infringement, then any patent licenses
86
+ granted to You under this License for that Work shall terminate
87
+ as of the date such litigation is filed.
88
+
89
+ 4. Redistribution. You may reproduce and distribute copies of the
90
+ Work or Derivative Works thereof in any medium, with or without
91
+ modifications, and in Source or Object form, provided that You
92
+ meet the following conditions:
93
+
94
+ (a) You must give any other recipients of the Work or
95
+ Derivative Works a copy of this License; and
96
+
97
+ (b) You must cause any modified files to carry prominent notices
98
+ stating that You changed the files; and
99
+
100
+ (c) You must retain, in the Source form of any Derivative Works
101
+ that You distribute, all copyright, patent, trademark, and
102
+ attribution notices from the Source form of the Work,
103
+ excluding those notices that do not pertain to any part of
104
+ the Derivative Works; and
105
+
106
+ (d) If the Work includes a "NOTICE" text file as part of its
107
+ distribution, then any Derivative Works that You distribute must
108
+ include a readable copy of the attribution notices contained
109
+ within such NOTICE file, excluding those notices that do not
110
+ pertain to any part of the Derivative Works, in at least one
111
+ of the following places: within a NOTICE text file distributed
112
+ as part of the Derivative Works; within the Source form or
113
+ documentation, if provided along with the Derivative Works; or,
114
+ within a display generated by the Derivative Works, if and
115
+ wherever such third-party notices normally appear. The contents
116
+ of the NOTICE file are for informational purposes only and
117
+ do not modify the License. You may add Your own attribution
118
+ notices within Derivative Works that You distribute, alongside
119
+ or as an addendum to the NOTICE text from the Work, provided
120
+ that such additional attribution notices cannot be construed
121
+ as modifying the License.
122
+
123
+ You may add Your own copyright statement to Your modifications and
124
+ may provide additional or different license terms and conditions
125
+ for use, reproduction, or distribution of Your modifications, or
126
+ for any such Derivative Works as a whole, provided Your use,
127
+ reproduction, and distribution of the Work otherwise complies with
128
+ the conditions stated in this License.
129
+
130
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
131
+ any Contribution intentionally submitted for inclusion in the Work
132
+ by You to the Licensor shall be under the terms and conditions of
133
+ this License, without any additional terms or conditions.
134
+ Notwithstanding the above, nothing herein shall supersede or modify
135
+ the terms of any separate license agreement you may have executed
136
+ with Licensor regarding such Contributions.
137
+
138
+ 6. Trademarks. This License does not grant permission to use the trade
139
+ names, trademarks, service marks, or product names of the Licensor,
140
+ except as required for reasonable and customary use in describing the
141
+ origin of the Work and reproducing the content of the NOTICE file.
142
+
143
+ 7. Disclaimer of Warranty. Unless required by applicable law or
144
+ agreed to in writing, Licensor provides the Work (and each
145
+ Contributor provides its Contributions) on an "AS IS" BASIS,
146
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147
+ implied, including, without limitation, any warranties or conditions
148
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149
+ PARTICULAR PURPOSE. You are solely responsible for determining the
150
+ appropriateness of using or redistributing the Work and assume any
151
+ risks associated with Your exercise of permissions under this License.
152
+
153
+ 8. Limitation of Liability. In no event and under no legal theory,
154
+ whether in tort (including negligence), contract, or otherwise,
155
+ unless required by applicable law (such as deliberate and grossly
156
+ negligent acts) or agreed to in writing, shall any Contributor be
157
+ liable to You for damages, including any direct, indirect, special,
158
+ incidental, or consequential damages of any character arising as a
159
+ result of this License or out of the use or inability to use the
160
+ Work (including but not limited to damages for loss of goodwill,
161
+ work stoppage, computer failure or malfunction, or any and all
162
+ other commercial damages or losses), even if such Contributor
163
+ has been advised of the possibility of such damages.
164
+
165
+ 9. Accepting Warranty or Additional Liability. While redistributing
166
+ the Work or Derivative Works thereof, You may choose to offer,
167
+ and charge a fee for, acceptance of support, warranty, indemnity,
168
+ or other liability obligations and/or rights consistent with this
169
+ License. However, in accepting such obligations, You may act only
170
+ on Your own behalf and on Your sole responsibility, not on behalf
171
+ of any other Contributor, and only if You agree to indemnify,
172
+ defend, and hold each Contributor harmless for any liability
173
+ incurred by, or claims asserted against, such Contributor by reason
174
+ of your accepting any such warranty or additional liability.
175
+
176
+ END OF TERMS AND CONDITIONS
177
+
178
+ APPENDIX: How to apply the Apache License to your work.
179
+
180
+ To apply the Apache License to your work, attach the following
181
+ boilerplate notice, with the fields enclosed by brackets "[]"
182
+ replaced with your own identifying information. (Don't include
183
+ the brackets!) The text should be enclosed in the appropriate
184
+ comment syntax for the file format. We also recommend that a
185
+ file or class name and description of purpose be included on the
186
+ same "printed page" as the copyright notice for easier
187
+ identification within third-party archives.
188
+
189
+ Copyright [yyyy] [name of copyright owner]
190
+
191
+ Licensed under the Apache License, Version 2.0 (the "License");
192
+ you may not use this file except in compliance with the License.
193
+ You may obtain a copy of the License at
194
+
195
+ http://www.apache.org/licenses/LICENSE-2.0
196
+
197
+ Unless required by applicable law or agreed to in writing, software
198
+ distributed under the License is distributed on an "AS IS" BASIS,
199
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200
+ See the License for the specific language governing permissions and
201
+ limitations under the License.
node_modules/@gradio/client/README.md ADDED
@@ -0,0 +1,339 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## JavaScript Client Library
2
+
3
+ A javascript (and typescript) client to call Gradio APIs.
4
+
5
+ ## Installation
6
+
7
+ The Gradio JavaScript client is available on npm as `@gradio/client`. You can install it as below:
8
+
9
+ ```sh
10
+ npm i @gradio/client
11
+ ```
12
+
13
+ ## Usage
14
+
15
+ The JavaScript Gradio Client exposes two named imports, `client` and `duplicate`.
16
+
17
+ ### `client`
18
+
19
+ The client function connects to the API of a hosted Gradio space and returns an object that allows you to make calls to that API.
20
+
21
+ The simplest example looks like this:
22
+
23
+ ```ts
24
+ import { client } from "@gradio/client";
25
+
26
+ const app = await client("user/space-name");
27
+ const result = await app.predict("/predict");
28
+ ```
29
+
30
+ This function accepts two arguments: `source` and `options`:
31
+
32
+ #### `source`
33
+
34
+ This is the url or name of the gradio app whose API you wish to connect to. This parameter is required and should always be a string. For example:
35
+
36
+ ```ts
37
+ client("user/space-name");
38
+ ```
39
+
40
+ #### `options`
41
+
42
+ The options object can optionally be passed a second parameter. This object has two properties, `hf_token` and `status_callback`.
43
+
44
+ ##### `hf_token`
45
+
46
+ This should be a Hugging Face personal access token and is required if you wish to make calls to a private gradio api. This option is optional and should be a string starting with `"hf_"`.
47
+
48
+ Example:
49
+
50
+ ```ts
51
+ import { client } from "@gradio/client";
52
+
53
+ const app = await client("user/space-name", { hf_token: "hf_..." });
54
+ ```
55
+
56
+ ##### `status_callback`
57
+
58
+ This should be a function which will notify your of the status of a space if it is not running. If the gradio API you are connecting to is awake and running or is not hosted on Hugging Face space then this function will do nothing.
59
+
60
+ **Additional context**
61
+
62
+ Applications hosted on Hugging Face spaces can be in a number of different states. As spaces are a GitOps tool and will rebuild when new changes are pushed to the repository, they have various building, running and error states. If a space is not 'running' then the function passed as the `status_callback` will notify you of the current state of the space and the status of the space as it changes. Spaces that are building or sleeping can take longer than usual to respond, so you can use this information to give users feedback about the progress of their action.
63
+
64
+ ```ts
65
+ import { client, type SpaceStatus } from "@gradio/client";
66
+
67
+ const app = await client("user/space-name", {
68
+ // The space_status parameter does not need to be manually annotated, this is just for illustration.
69
+ space_status: (space_status: SpaceStatus) => console.log(space_status)
70
+ });
71
+ ```
72
+
73
+ ```ts
74
+ interface SpaceStatusNormal {
75
+ status: "sleeping" | "running" | "building" | "error" | "stopped";
76
+ detail:
77
+ | "SLEEPING"
78
+ | "RUNNING"
79
+ | "RUNNING_BUILDING"
80
+ | "BUILDING"
81
+ | "NOT_FOUND";
82
+ load_status: "pending" | "error" | "complete" | "generating";
83
+ message: string;
84
+ }
85
+
86
+ interface SpaceStatusError {
87
+ status: "space_error";
88
+ detail: "NO_APP_FILE" | "CONFIG_ERROR" | "BUILD_ERROR" | "RUNTIME_ERROR";
89
+ load_status: "error";
90
+ message: string;
91
+ discussions_enabled: boolean;
92
+
93
+ type SpaceStatus = SpaceStatusNormal | SpaceStatusError;
94
+ ```
95
+
96
+ The gradio client returns an object with a number of methods and properties:
97
+
98
+ #### `predict`
99
+
100
+ The `predict` method allows you to call an api endpoint and get a prediction result:
101
+
102
+ ```ts
103
+ import { client } from "@gradio/client";
104
+
105
+ const app = await client("user/space-name");
106
+ const result = await app.predict("/predict");
107
+ ```
108
+
109
+ `predict` accepts two parameters, `endpoint` and `payload`. It returns a promise that resolves to the prediction result.
110
+
111
+ ##### `endpoint`
112
+
113
+ This is the endpoint for an api request and is required. The default endpoint for a `gradio.Interface` is `"/predict"`. Explicitly named endpoints have a custom name. The endpoint names can be found on the "View API" page of a space.
114
+
115
+ ```ts
116
+ import { client } from "@gradio/client";
117
+
118
+ const app = await client("user/space-name");
119
+ const result = await app.predict("/predict");
120
+ ```
121
+
122
+ ##### `payload`
123
+
124
+ The `payload` argument is generally optional but this depends on the API itself. If the API endpoint depends on values being passed in then it is required for the API request to succeed. The data that should be passed in is detailed on the "View API" page of a space, or accessible via the `view_api()` method of the client.
125
+
126
+ ```ts
127
+ import { client } from "@gradio/client";
128
+
129
+ const app = await client("user/space-name");
130
+ const result = await app.predict("/predict", [1, "Hello", "friends"]);
131
+ ```
132
+
133
+ #### `submit`
134
+
135
+ The `submit` method provides a more flexible way to call an API endpoint, providing you with status updates about the current progress of the prediction as well as supporting more complex endpoint types.
136
+
137
+ ```ts
138
+ import { client } from "@gradio/client";
139
+
140
+ const app = await client("user/space-name");
141
+ const submission = app.submit("/predict", payload);
142
+ ```
143
+
144
+ The `submit` method accepts the same [`endpoint`](#endpoint) and [`payload`](#payload) arguments as `predict`.
145
+
146
+ The `submit` method does not return a promise and should not be awaited, instead it returns an object with a `on`, `off`, and `cancel` methods.
147
+
148
+ ##### `on`
149
+
150
+ The `on` method allows you to subscribe to events related to the submitted API request. There are two types of event that can be subscribed to: `"data"` updates and `"status"` updates.
151
+
152
+ `"data"` updates are issued when the API computes a value, the callback provided as the second argument will be called when such a value is sent to the client. The shape of the data depends on the way the API itself is constructed. This event may fire more than once if that endpoint supports emmitting new values over time.
153
+
154
+ `"status` updates are issued when the status of a request changes. This information allows you to offer feedback to users when the queue position of the request changes, or when the request changes from queued to processing.
155
+
156
+ The status payload look like this:
157
+
158
+ ```ts
159
+ interface Status {
160
+ queue: boolean;
161
+ code?: string;
162
+ success?: boolean;
163
+ stage: "pending" | "error" | "complete" | "generating";
164
+ size?: number;
165
+ position?: number;
166
+ eta?: number;
167
+ message?: string;
168
+ progress_data?: Array<{
169
+ progress: number | null;
170
+ index: number | null;
171
+ length: number | null;
172
+ unit: string | null;
173
+ desc: string | null;
174
+ }>;
175
+ time?: Date;
176
+ }
177
+ ```
178
+
179
+ Usage of these subscribe callback looks like this:
180
+
181
+ ```ts
182
+ import { client } from "@gradio/client";
183
+
184
+ const app = await client("user/space-name");
185
+ const submission = app
186
+ .submit("/predict", payload)
187
+ .on("data", (data) => console.log(data))
188
+ .on("status", (status: Status) => console.log(status));
189
+ ```
190
+
191
+ ##### `off`
192
+
193
+ The `off` method unsubscribes from a specific event of the submitted job and works similarly to `document.removeEventListener`; both the event name and the original callback must be passed in to successfully unsubscribe:
194
+
195
+ ```ts
196
+ import { client } from "@gradio/client";
197
+
198
+ const app = await client("user/space-name");
199
+ const handle_data = (data) => console.log(data);
200
+
201
+ const submission = app.submit("/predict", payload).on("data", handle_data);
202
+
203
+ // later
204
+ submission.off("/predict", handle_data);
205
+ ```
206
+
207
+ ##### `destroy`
208
+
209
+ The `destroy` method will remove all subscriptions to a job, regardless of whether or not they are `"data"` or `"status"` events. This is a convenience method for when you do not want to unsubscribe use the `off` method.
210
+
211
+ ```js
212
+ import { client } from "@gradio/client";
213
+
214
+ const app = await client("user/space-name");
215
+ const handle_data = (data) => console.log(data);
216
+
217
+ const submission = app.submit("/predict", payload).on("data", handle_data);
218
+
219
+ // later
220
+ submission.destroy();
221
+ ```
222
+
223
+ ##### `cancel`
224
+
225
+ Certain types of gradio function can run repeatedly and in some cases indefinitely. the `cancel` method will stop such an endpoints and prevent the API from issuing additional updates.
226
+
227
+ ```ts
228
+ import { client } from "@gradio/client";
229
+
230
+ const app = await client("user/space-name");
231
+ const submission = app
232
+ .submit("/predict", payload)
233
+ .on("data", (data) => console.log(data));
234
+
235
+ // later
236
+
237
+ submission.cancel();
238
+ ```
239
+
240
+ #### `view_api`
241
+
242
+ The `view_api` method provides details about the API you are connected to. It returns a JavaScript object of all named endpoints, unnamed endpoints and what values they accept and return. This method does not accept arguments.
243
+
244
+ ```ts
245
+ import { client } from "@gradio/client";
246
+
247
+ const app = await client("user/space-name");
248
+ const api_info = await app.view_api();
249
+
250
+ console.log(api_info);
251
+ ```
252
+
253
+ #### `config`
254
+
255
+ The `config` property contains the configuration for the gradio application you are connected to. This object may contain useful meta information about the application.
256
+
257
+ ```ts
258
+ import { client } from "@gradio/client";
259
+
260
+ const app = await client("user/space-name");
261
+ console.log(app.config);
262
+ ```
263
+
264
+ ### `duplicate`
265
+
266
+ The duplicate function will attempt to duplicate the space that is referenced and return an instance of `client` connected to that space. If the space has already been duplicated then it will not create a new duplicate and will instead connect to the existing duplicated space. The huggingface token that is passed in will dictate the user under which the space is created.
267
+
268
+ `duplicate` accepts the same arguments as `client` with the addition of a `private` options property dictating whether the duplicated space should be private or public. A huggingface token is required for duplication to work.
269
+
270
+ ```ts
271
+ import { duplicate } from "@gradio/client";
272
+
273
+ const app = await duplicate("user/space-name", {
274
+ hf_token: "hf_..."
275
+ });
276
+ ```
277
+
278
+ This function accepts two arguments: `source` and `options`:
279
+
280
+ #### `source`
281
+
282
+ The space to duplicate and connect to. [See `client`'s `source` parameter](#source).
283
+
284
+ #### `options`
285
+
286
+ Accepts all options that `client` accepts, except `hf_token` is required. [See `client`'s `options` parameter](#source).
287
+
288
+ `duplicate` also accepts one additional `options` property.
289
+
290
+ ##### `private`
291
+
292
+ This is an optional property specific to `duplicate`'s options object and will determine whether the space should be public or private. Spaces duplicated via the `duplicate` method are public by default.
293
+
294
+ ```ts
295
+ import { duplicate } from "@gradio/client";
296
+
297
+ const app = await duplicate("user/space-name", {
298
+ hf_token: "hf_...",
299
+ private: true
300
+ });
301
+ ```
302
+
303
+ ##### `timeout`
304
+
305
+ This is an optional property specific to `duplicate`'s options object and will set the timeout in minutes before the duplicated space will go to sleep.
306
+
307
+ ```ts
308
+ import { duplicate } from "@gradio/client";
309
+
310
+ const app = await duplicate("user/space-name", {
311
+ hf_token: "hf_...",
312
+ private: true,
313
+ timeout: 5
314
+ });
315
+ ```
316
+
317
+ ##### `hardware`
318
+
319
+ This is an optional property specific to `duplicate`'s options object and will set the hardware for the duplicated space. By default the hardware used will match that of the original space. If this cannot be obtained it will default to `"cpu-basic"`. For hardware upgrades (beyond the basic CPU tier), you may be required to provide [billing information on Hugging Face](https://huggingface.co/settings/billing).
320
+
321
+ Possible hardware options are:
322
+
323
+ - `"cpu-basic"`
324
+ - `"cpu-upgrade"`
325
+ - `"t4-small"`
326
+ - `"t4-medium"`
327
+ - `"a10g-small"`
328
+ - `"a10g-large"`
329
+ - `"a100-large"`
330
+
331
+ ```ts
332
+ import { duplicate } from "@gradio/client";
333
+
334
+ const app = await duplicate("user/space-name", {
335
+ hf_token: "hf_...",
336
+ private: true,
337
+ hardware: "a10g-small"
338
+ });
339
+ ```
node_modules/@gradio/client/dist/client.d.ts ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { hardware_types } from "./utils.js";
2
+ import type { EventType, EventListener, PostResponse, UploadResponse, SpaceStatusCallback } from "./types.js";
3
+ import type { Config } from "./types.js";
4
+ type event = <K extends EventType>(eventType: K, listener: EventListener<K>) => SubmitReturn;
5
+ type predict = (endpoint: string | number, data?: unknown[], event_data?: unknown) => Promise<unknown>;
6
+ type client_return = {
7
+ predict: predict;
8
+ config: Config;
9
+ submit: (endpoint: string | number, data?: unknown[], event_data?: unknown, trigger_id?: number | null) => SubmitReturn;
10
+ component_server: (component_id: number, fn_name: string, data: unknown[]) => any;
11
+ view_api: (c?: Config) => Promise<ApiInfo<JsApiData>>;
12
+ };
13
+ type SubmitReturn = {
14
+ on: event;
15
+ off: event;
16
+ cancel: () => Promise<void>;
17
+ destroy: () => void;
18
+ };
19
+ export declare let NodeBlob: any;
20
+ export declare function duplicate(app_reference: string, options: {
21
+ hf_token: `hf_${string}`;
22
+ private?: boolean;
23
+ status_callback: SpaceStatusCallback;
24
+ hardware?: (typeof hardware_types)[number];
25
+ timeout?: number;
26
+ }): Promise<client_return>;
27
+ interface Client {
28
+ post_data: (url: string, body: unknown, token?: `hf_${string}`) => Promise<[PostResponse, number]>;
29
+ upload_files: (root: string, files: File[], token?: `hf_${string}`, upload_id?: string) => Promise<UploadResponse>;
30
+ client: (app_reference: string, options: {
31
+ hf_token?: `hf_${string}`;
32
+ status_callback?: SpaceStatusCallback;
33
+ }) => Promise<client_return>;
34
+ handle_blob: (endpoint: string, data: unknown[], api_info: ApiInfo<JsApiData>, token?: `hf_${string}`) => Promise<unknown[]>;
35
+ }
36
+ export declare function api_factory(fetch_implementation: typeof fetch, EventSource_factory: (url: URL) => EventSource): Client;
37
+ export declare const post_data: (url: string, body: unknown, token?: `hf_${string}`) => Promise<[PostResponse, number]>, upload_files: (root: string, files: File[], token?: `hf_${string}`, upload_id?: string) => Promise<UploadResponse>, client: (app_reference: string, options: {
38
+ hf_token?: `hf_${string}`;
39
+ status_callback?: SpaceStatusCallback;
40
+ }) => Promise<client_return>, handle_blob: (endpoint: string, data: unknown[], api_info: ApiInfo<JsApiData>, token?: `hf_${string}`) => Promise<unknown[]>;
41
+ interface ApiData {
42
+ label: string;
43
+ type: {
44
+ type: any;
45
+ description: string;
46
+ };
47
+ component: string;
48
+ example_input?: any;
49
+ }
50
+ interface JsApiData {
51
+ label: string;
52
+ type: string;
53
+ component: string;
54
+ example_input: any;
55
+ }
56
+ interface EndpointInfo<T extends ApiData | JsApiData> {
57
+ parameters: T[];
58
+ returns: T[];
59
+ }
60
+ interface ApiInfo<T extends ApiData | JsApiData> {
61
+ named_endpoints: {
62
+ [key: string]: EndpointInfo<T>;
63
+ };
64
+ unnamed_endpoints: {
65
+ [key: string]: EndpointInfo<T>;
66
+ };
67
+ }
68
+ export declare function walk_and_store_blobs(param: any, type?: any, path?: any[], root?: boolean, api_info?: any): Promise<{
69
+ path: string[];
70
+ type: string;
71
+ blob: Blob | false;
72
+ }[]>;
73
+ export {};
74
+ //# sourceMappingURL=client.d.ts.map
node_modules/@gradio/client/dist/client.d.ts.map ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version":3,"file":"client.d.ts","sourceRoot":"","sources":["../src/client.ts"],"names":[],"mappings":"AAIA,OAAO,EAQN,cAAc,EAGd,MAAM,YAAY,CAAC;AAEpB,OAAO,KAAK,EACX,SAAS,EACT,aAAa,EAIb,YAAY,EACZ,cAAc,EAGd,mBAAmB,EACnB,MAAM,YAAY,CAAC;AAIpB,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,YAAY,CAAC;AAEzC,KAAK,KAAK,GAAG,CAAC,CAAC,SAAS,SAAS,EAChC,SAAS,EAAE,CAAC,EACZ,QAAQ,EAAE,aAAa,CAAC,CAAC,CAAC,KACtB,YAAY,CAAC;AAClB,KAAK,OAAO,GAAG,CACd,QAAQ,EAAE,MAAM,GAAG,MAAM,EACzB,IAAI,CAAC,EAAE,OAAO,EAAE,EAChB,UAAU,CAAC,EAAE,OAAO,KAChB,OAAO,CAAC,OAAO,CAAC,CAAC;AAEtB,KAAK,aAAa,GAAG;IACpB,OAAO,EAAE,OAAO,CAAC;IACjB,MAAM,EAAE,MAAM,CAAC;IACf,MAAM,EAAE,CACP,QAAQ,EAAE,MAAM,GAAG,MAAM,EACzB,IAAI,CAAC,EAAE,OAAO,EAAE,EAChB,UAAU,CAAC,EAAE,OAAO,EACpB,UAAU,CAAC,EAAE,MAAM,GAAG,IAAI,KACtB,YAAY,CAAC;IAClB,gBAAgB,EAAE,CACjB,YAAY,EAAE,MAAM,EACpB,OAAO,EAAE,MAAM,EACf,IAAI,EAAE,OAAO,EAAE,KACX,GAAG,CAAC;IACT,QAAQ,EAAE,CAAC,CAAC,CAAC,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC;CACtD,CAAC;AAEF,KAAK,YAAY,GAAG;IACnB,EAAE,EAAE,KAAK,CAAC;IACV,GAAG,EAAE,KAAK,CAAC;IACX,MAAM,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;IAC5B,OAAO,EAAE,MAAM,IAAI,CAAC;CACpB,CAAC;AAKF,eAAO,IAAI,QAAQ,KAAA,CAAC;AAEpB,wBAAsB,SAAS,CAC9B,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE;IACR,QAAQ,EAAE,MAAM,MAAM,EAAE,CAAC;IACzB,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,eAAe,EAAE,mBAAmB,CAAC;IACrC,QAAQ,CAAC,EAAE,CAAC,OAAO,cAAc,CAAC,CAAC,MAAM,CAAC,CAAC;IAC3C,OAAO,CAAC,EAAE,MAAM,CAAC;CACjB,GACC,OAAO,CAAC,aAAa,CAAC,CAmExB;AAED,UAAU,MAAM;IACf,SAAS,EAAE,CACV,GAAG,EAAE,MAAM,EACX,IAAI,EAAE,OAAO,EACb,KAAK,CAAC,EAAE,MAAM,MAAM,EAAE,KAClB,OAAO,CAAC,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,CAAC;IACrC,YAAY,EAAE,CACb,IAAI,EAAE,MAAM,EACZ,KAAK,EAAE,IAAI,EAAE,EACb,KAAK,CAAC,EAAE,MAAM,MAAM,EAAE,EACtB,SAAS,CAAC,EAAE,MAAM,KACd,OAAO,CAAC,cAAc,CAAC,CAAC;IAC7B,MAAM,EAAE,CACP,aAAa,EAAE,MAAM,EACrB,OAAO,EAAE;QACR,QAAQ,CAAC,EAAE,MAAM,MAAM,EAAE,CAAC;QAC1B,eAAe,CAAC,EAAE,mBAAmB,CAAC;KACtC,KACG,OAAO,CAAC,aAAa,CAAC,CAAC;IAC5B,WAAW,EAAE,CACZ,QAAQ,EAAE,MAAM,EAChB,IAAI,EAAE,OAAO,EAAE,EACf,QAAQ,EAAE,OAAO,CAAC,SAAS,CAAC,EAC5B,KAAK,CAAC,EAAE,MAAM,MAAM,EAAE,KAClB,OAAO,CAAC,OAAO,EAAE,CAAC,CAAC;CACxB;AAED,wBAAgB,WAAW,CAC1B,oBAAoB,EAAE,OAAO,KAAK,EAClC,mBAAmB,EAAE,CAAC,GAAG,EAAE,GAAG,KAAK,WAAW,GAC5C,MAAM,CAmgCR;AAED,eAAO,MAAQ,SAAS,QAjiCjB,MAAM,QACL,OAAO,UACL,MAAM,MAAM,EAAE,KAClB,QAAQ,CAAC,YAAY,EAAE,MAAM,CAAC,CAAC,EA8hCX,YAAY,SA5hC9B,MAAM,SACL,IAAI,EAAE,UACL,MAAM,MAAM,EAAE,cACV,MAAM,KACd,QAAQ,cAAc,CAAC,EAwhCW,MAAM,kBAthC7B,MAAM,WACZ;IACR,QAAQ,CAAC,EAAE,MAAM,MAAM,EAAE,CAAC;IAC1B,eAAe,CAAC,EAAE,mBAAmB,CAAC;CACtC,KACG,QAAQ,aAAa,CAAC,EAihCoB,WAAW,aA/gC/C,MAAM,QACV,OAAO,EAAE,YACL,QAAQ,SAAS,CAAC,UACpB,MAAM,MAAM,EAAE,KAClB,QAAQ,OAAO,EAAE,CA8gCtB,CAAC;AAEF,UAAU,OAAO;IAChB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE;QACL,IAAI,EAAE,GAAG,CAAC;QACV,WAAW,EAAE,MAAM,CAAC;KACpB,CAAC;IACF,SAAS,EAAE,MAAM,CAAC;IAClB,aAAa,CAAC,EAAE,GAAG,CAAC;CACpB;AAED,UAAU,SAAS;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,EAAE,MAAM,CAAC;IAClB,aAAa,EAAE,GAAG,CAAC;CACnB;AAED,UAAU,YAAY,CAAC,CAAC,SAAS,OAAO,GAAG,SAAS;IACnD,UAAU,EAAE,CAAC,EAAE,CAAC;IAChB,OAAO,EAAE,CAAC,EAAE,CAAC;CACb;AACD,UAAU,OAAO,CAAC,CAAC,SAAS,OAAO,GAAG,SAAS;IAC9C,eAAe,EAAE;QAChB,CAAC,GAAG,EAAE,MAAM,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;KAC/B,CAAC;IACF,iBAAiB,EAAE;QAClB,CAAC,GAAG,EAAE,MAAM,GAAG,YAAY,CAAC,CAAC,CAAC,CAAC;KAC/B,CAAC;CACF;AAiID,wBAAsB,oBAAoB,CACzC,KAAK,KAAA,EACL,IAAI,MAAY,EAChB,IAAI,QAAK,EACT,IAAI,UAAQ,EACZ,QAAQ,MAAY,GAClB,OAAO,CACT;IACC,IAAI,EAAE,MAAM,EAAE,CAAC;IACf,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,IAAI,GAAG,KAAK,CAAC;CACnB,EAAE,CACH,CAmDA"}
node_modules/@gradio/client/dist/index.d.ts ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ export { client, post_data, upload_files, duplicate, api_factory } from "./client.js";
2
+ export type { SpaceStatus } from "./types.js";
3
+ export { FileData, upload, get_fetchable_url_or_file, prepare_files } from "./upload.js";
4
+ //# sourceMappingURL=index.d.ts.map
node_modules/@gradio/client/dist/index.d.ts.map ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EACN,MAAM,EACN,SAAS,EACT,YAAY,EACZ,SAAS,EACT,WAAW,EACX,MAAM,aAAa,CAAC;AACrB,YAAY,EAAE,WAAW,EAAE,MAAM,YAAY,CAAC;AAC9C,OAAO,EACN,QAAQ,EACR,MAAM,EACN,yBAAyB,EACzB,aAAa,EACb,MAAM,aAAa,CAAC"}
node_modules/@gradio/client/dist/index.js ADDED
@@ -0,0 +1,1598 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ var fn = new Intl.Collator(0, { numeric: 1 }).compare;
2
+ function semiver(a, b, bool) {
3
+ a = a.split(".");
4
+ b = b.split(".");
5
+ return fn(a[0], b[0]) || fn(a[1], b[1]) || (b[2] = b.slice(2).join("."), bool = /[.-]/.test(a[2] = a.slice(2).join(".")), bool == /[.-]/.test(b[2]) ? fn(a[2], b[2]) : bool ? -1 : 1);
6
+ }
7
+ function resolve_root(base_url, root_path, prioritize_base) {
8
+ if (root_path.startsWith("http://") || root_path.startsWith("https://")) {
9
+ return prioritize_base ? base_url : root_path;
10
+ }
11
+ return base_url + root_path;
12
+ }
13
+ function determine_protocol(endpoint) {
14
+ if (endpoint.startsWith("http")) {
15
+ const { protocol, host } = new URL(endpoint);
16
+ if (host.endsWith("hf.space")) {
17
+ return {
18
+ ws_protocol: "wss",
19
+ host,
20
+ http_protocol: protocol
21
+ };
22
+ }
23
+ return {
24
+ ws_protocol: protocol === "https:" ? "wss" : "ws",
25
+ http_protocol: protocol,
26
+ host
27
+ };
28
+ } else if (endpoint.startsWith("file:")) {
29
+ return {
30
+ ws_protocol: "ws",
31
+ http_protocol: "http:",
32
+ host: "lite.local"
33
+ // Special fake hostname only used for this case. This matches the hostname allowed in `is_self_host()` in `js/wasm/network/host.ts`.
34
+ };
35
+ }
36
+ return {
37
+ ws_protocol: "wss",
38
+ http_protocol: "https:",
39
+ host: endpoint
40
+ };
41
+ }
42
+ const RE_SPACE_NAME = /^[^\/]*\/[^\/]*$/;
43
+ const RE_SPACE_DOMAIN = /.*hf\.space\/{0,1}$/;
44
+ async function process_endpoint(app_reference, token) {
45
+ const headers = {};
46
+ if (token) {
47
+ headers.Authorization = `Bearer ${token}`;
48
+ }
49
+ const _app_reference = app_reference.trim();
50
+ if (RE_SPACE_NAME.test(_app_reference)) {
51
+ try {
52
+ const res = await fetch(
53
+ `https://huggingface.co/api/spaces/${_app_reference}/host`,
54
+ { headers }
55
+ );
56
+ if (res.status !== 200)
57
+ throw new Error("Space metadata could not be loaded.");
58
+ const _host = (await res.json()).host;
59
+ return {
60
+ space_id: app_reference,
61
+ ...determine_protocol(_host)
62
+ };
63
+ } catch (e) {
64
+ throw new Error("Space metadata could not be loaded." + e.message);
65
+ }
66
+ }
67
+ if (RE_SPACE_DOMAIN.test(_app_reference)) {
68
+ const { ws_protocol, http_protocol, host } = determine_protocol(_app_reference);
69
+ return {
70
+ space_id: host.replace(".hf.space", ""),
71
+ ws_protocol,
72
+ http_protocol,
73
+ host
74
+ };
75
+ }
76
+ return {
77
+ space_id: false,
78
+ ...determine_protocol(_app_reference)
79
+ };
80
+ }
81
+ function map_names_to_ids(fns) {
82
+ let apis = {};
83
+ fns.forEach(({ api_name }, i) => {
84
+ if (api_name)
85
+ apis[api_name] = i;
86
+ });
87
+ return apis;
88
+ }
89
+ const RE_DISABLED_DISCUSSION = /^(?=[^]*\b[dD]iscussions{0,1}\b)(?=[^]*\b[dD]isabled\b)[^]*$/;
90
+ async function discussions_enabled(space_id) {
91
+ try {
92
+ const r = await fetch(
93
+ `https://huggingface.co/api/spaces/${space_id}/discussions`,
94
+ {
95
+ method: "HEAD"
96
+ }
97
+ );
98
+ const error = r.headers.get("x-error-message");
99
+ if (error && RE_DISABLED_DISCUSSION.test(error))
100
+ return false;
101
+ return true;
102
+ } catch (e) {
103
+ return false;
104
+ }
105
+ }
106
+ async function get_space_hardware(space_id, token) {
107
+ const headers = {};
108
+ if (token) {
109
+ headers.Authorization = `Bearer ${token}`;
110
+ }
111
+ try {
112
+ const res = await fetch(
113
+ `https://huggingface.co/api/spaces/${space_id}/runtime`,
114
+ { headers }
115
+ );
116
+ if (res.status !== 200)
117
+ throw new Error("Space hardware could not be obtained.");
118
+ const { hardware } = await res.json();
119
+ return hardware;
120
+ } catch (e) {
121
+ throw new Error(e.message);
122
+ }
123
+ }
124
+ async function set_space_hardware(space_id, new_hardware, token) {
125
+ const headers = {};
126
+ if (token) {
127
+ headers.Authorization = `Bearer ${token}`;
128
+ }
129
+ try {
130
+ const res = await fetch(
131
+ `https://huggingface.co/api/spaces/${space_id}/hardware`,
132
+ { headers, body: JSON.stringify(new_hardware) }
133
+ );
134
+ if (res.status !== 200)
135
+ throw new Error(
136
+ "Space hardware could not be set. Please ensure the space hardware provided is valid and that a Hugging Face token is passed in."
137
+ );
138
+ const { hardware } = await res.json();
139
+ return hardware;
140
+ } catch (e) {
141
+ throw new Error(e.message);
142
+ }
143
+ }
144
+ async function set_space_timeout(space_id, timeout, token) {
145
+ const headers = {};
146
+ if (token) {
147
+ headers.Authorization = `Bearer ${token}`;
148
+ }
149
+ try {
150
+ const res = await fetch(
151
+ `https://huggingface.co/api/spaces/${space_id}/hardware`,
152
+ { headers, body: JSON.stringify({ seconds: timeout }) }
153
+ );
154
+ if (res.status !== 200)
155
+ throw new Error(
156
+ "Space hardware could not be set. Please ensure the space hardware provided is valid and that a Hugging Face token is passed in."
157
+ );
158
+ const { hardware } = await res.json();
159
+ return hardware;
160
+ } catch (e) {
161
+ throw new Error(e.message);
162
+ }
163
+ }
164
+ const hardware_types = [
165
+ "cpu-basic",
166
+ "cpu-upgrade",
167
+ "t4-small",
168
+ "t4-medium",
169
+ "a10g-small",
170
+ "a10g-large",
171
+ "a100-large"
172
+ ];
173
+ function apply_edit(target, path, action, value) {
174
+ if (path.length === 0) {
175
+ if (action === "replace") {
176
+ return value;
177
+ } else if (action === "append") {
178
+ return target + value;
179
+ }
180
+ throw new Error(`Unsupported action: ${action}`);
181
+ }
182
+ let current = target;
183
+ for (let i = 0; i < path.length - 1; i++) {
184
+ current = current[path[i]];
185
+ }
186
+ const last_path = path[path.length - 1];
187
+ switch (action) {
188
+ case "replace":
189
+ current[last_path] = value;
190
+ break;
191
+ case "append":
192
+ current[last_path] += value;
193
+ break;
194
+ case "add":
195
+ if (Array.isArray(current)) {
196
+ current.splice(Number(last_path), 0, value);
197
+ } else {
198
+ current[last_path] = value;
199
+ }
200
+ break;
201
+ case "delete":
202
+ if (Array.isArray(current)) {
203
+ current.splice(Number(last_path), 1);
204
+ } else {
205
+ delete current[last_path];
206
+ }
207
+ break;
208
+ default:
209
+ throw new Error(`Unknown action: ${action}`);
210
+ }
211
+ return target;
212
+ }
213
+ function apply_diff(obj, diff) {
214
+ diff.forEach(([action, path, value]) => {
215
+ obj = apply_edit(obj, path, action, value);
216
+ });
217
+ return obj;
218
+ }
219
+ function is_url(str) {
220
+ try {
221
+ const url = new URL(str);
222
+ return url.protocol === "http:" || url.protocol === "https:";
223
+ } catch {
224
+ return false;
225
+ }
226
+ }
227
+ function get_fetchable_url_or_file(path, server_url, proxy_url) {
228
+ if (path == null) {
229
+ return proxy_url ? `/proxy=${proxy_url}file=` : `${server_url}/file=`;
230
+ }
231
+ if (is_url(path)) {
232
+ return path;
233
+ }
234
+ return proxy_url ? `/proxy=${proxy_url}file=${path}` : `${server_url}/file=${path}`;
235
+ }
236
+ async function upload(file_data, root, upload_id, upload_fn = upload_files) {
237
+ let files = (Array.isArray(file_data) ? file_data : [file_data]).map(
238
+ (file_data2) => file_data2.blob
239
+ );
240
+ return await Promise.all(
241
+ await upload_fn(root, files, void 0, upload_id).then(
242
+ async (response) => {
243
+ if (response.error) {
244
+ throw new Error(response.error);
245
+ } else {
246
+ if (response.files) {
247
+ return response.files.map((f, i) => {
248
+ const file = new FileData({
249
+ ...file_data[i],
250
+ path: f,
251
+ url: root + "/file=" + f
252
+ });
253
+ return file;
254
+ });
255
+ }
256
+ return [];
257
+ }
258
+ }
259
+ )
260
+ );
261
+ }
262
+ async function prepare_files(files, is_stream) {
263
+ return files.map(
264
+ (f, i) => new FileData({
265
+ path: f.name,
266
+ orig_name: f.name,
267
+ blob: f,
268
+ size: f.size,
269
+ mime_type: f.type,
270
+ is_stream
271
+ })
272
+ );
273
+ }
274
+ class FileData {
275
+ constructor({
276
+ path,
277
+ url,
278
+ orig_name,
279
+ size,
280
+ blob,
281
+ is_stream,
282
+ mime_type,
283
+ alt_text
284
+ }) {
285
+ this.path = path;
286
+ this.url = url;
287
+ this.orig_name = orig_name;
288
+ this.size = size;
289
+ this.blob = url ? void 0 : blob;
290
+ this.is_stream = is_stream;
291
+ this.mime_type = mime_type;
292
+ this.alt_text = alt_text;
293
+ }
294
+ }
295
+ const QUEUE_FULL_MSG = "This application is too busy. Keep trying!";
296
+ const BROKEN_CONNECTION_MSG = "Connection errored out.";
297
+ let NodeBlob;
298
+ async function duplicate(app_reference, options) {
299
+ const { hf_token, private: _private, hardware, timeout } = options;
300
+ if (hardware && !hardware_types.includes(hardware)) {
301
+ throw new Error(
302
+ `Invalid hardware type provided. Valid types are: ${hardware_types.map((v) => `"${v}"`).join(",")}.`
303
+ );
304
+ }
305
+ const headers = {
306
+ Authorization: `Bearer ${hf_token}`
307
+ };
308
+ const user = (await (await fetch(`https://huggingface.co/api/whoami-v2`, {
309
+ headers
310
+ })).json()).name;
311
+ const space_name = app_reference.split("/")[1];
312
+ const body = {
313
+ repository: `${user}/${space_name}`
314
+ };
315
+ if (_private) {
316
+ body.private = true;
317
+ }
318
+ try {
319
+ const response = await fetch(
320
+ `https://huggingface.co/api/spaces/${app_reference}/duplicate`,
321
+ {
322
+ method: "POST",
323
+ headers: { "Content-Type": "application/json", ...headers },
324
+ body: JSON.stringify(body)
325
+ }
326
+ );
327
+ if (response.status === 409) {
328
+ return client(`${user}/${space_name}`, options);
329
+ }
330
+ const duplicated_space = await response.json();
331
+ let original_hardware;
332
+ if (!hardware) {
333
+ original_hardware = await get_space_hardware(app_reference, hf_token);
334
+ }
335
+ const requested_hardware = hardware || original_hardware || "cpu-basic";
336
+ await set_space_hardware(
337
+ `${user}/${space_name}`,
338
+ requested_hardware,
339
+ hf_token
340
+ );
341
+ await set_space_timeout(`${user}/${space_name}`, timeout || 300, hf_token);
342
+ return client(duplicated_space.url, options);
343
+ } catch (e) {
344
+ throw new Error(e);
345
+ }
346
+ }
347
+ function api_factory(fetch_implementation, EventSource_factory) {
348
+ return { post_data: post_data2, upload_files: upload_files2, client: client2, handle_blob: handle_blob2 };
349
+ async function post_data2(url, body, token) {
350
+ const headers = { "Content-Type": "application/json" };
351
+ if (token) {
352
+ headers.Authorization = `Bearer ${token}`;
353
+ }
354
+ try {
355
+ var response = await fetch_implementation(url, {
356
+ method: "POST",
357
+ body: JSON.stringify(body),
358
+ headers
359
+ });
360
+ } catch (e) {
361
+ return [{ error: BROKEN_CONNECTION_MSG }, 500];
362
+ }
363
+ let output;
364
+ let status;
365
+ try {
366
+ output = await response.json();
367
+ status = response.status;
368
+ } catch (e) {
369
+ output = { error: `Could not parse server response: ${e}` };
370
+ status = 500;
371
+ }
372
+ return [output, status];
373
+ }
374
+ async function upload_files2(root, files, token, upload_id) {
375
+ const headers = {};
376
+ if (token) {
377
+ headers.Authorization = `Bearer ${token}`;
378
+ }
379
+ const chunkSize = 1e3;
380
+ const uploadResponses = [];
381
+ for (let i = 0; i < files.length; i += chunkSize) {
382
+ const chunk = files.slice(i, i + chunkSize);
383
+ const formData = new FormData();
384
+ chunk.forEach((file) => {
385
+ formData.append("files", file);
386
+ });
387
+ try {
388
+ const upload_url = upload_id ? `${root}/upload?upload_id=${upload_id}` : `${root}/upload`;
389
+ var response = await fetch_implementation(upload_url, {
390
+ method: "POST",
391
+ body: formData,
392
+ headers
393
+ });
394
+ } catch (e) {
395
+ return { error: BROKEN_CONNECTION_MSG };
396
+ }
397
+ const output = await response.json();
398
+ uploadResponses.push(...output);
399
+ }
400
+ return { files: uploadResponses };
401
+ }
402
+ async function client2(app_reference, options = {}) {
403
+ return new Promise(async (res) => {
404
+ const { status_callback, hf_token } = options;
405
+ const return_obj = {
406
+ predict,
407
+ submit,
408
+ view_api,
409
+ component_server
410
+ };
411
+ if ((typeof window === "undefined" || !("WebSocket" in window)) && !global.Websocket) {
412
+ const ws = await import("./wrapper-6f348d45.js");
413
+ NodeBlob = (await import("node:buffer")).Blob;
414
+ global.WebSocket = ws.WebSocket;
415
+ }
416
+ const { ws_protocol, http_protocol, host, space_id } = await process_endpoint(app_reference, hf_token);
417
+ const session_hash = Math.random().toString(36).substring(2);
418
+ const last_status = {};
419
+ let stream_open = false;
420
+ let pending_stream_messages = {};
421
+ let pending_diff_streams = {};
422
+ let event_stream = null;
423
+ const event_callbacks = {};
424
+ const unclosed_events = /* @__PURE__ */ new Set();
425
+ let config;
426
+ let api_map = {};
427
+ let jwt = false;
428
+ if (hf_token && space_id) {
429
+ jwt = await get_jwt(space_id, hf_token);
430
+ }
431
+ async function config_success(_config) {
432
+ config = _config;
433
+ api_map = map_names_to_ids((_config == null ? void 0 : _config.dependencies) || []);
434
+ if (config.auth_required) {
435
+ return {
436
+ config,
437
+ ...return_obj
438
+ };
439
+ }
440
+ try {
441
+ api = await view_api(config);
442
+ } catch (e) {
443
+ console.error(`Could not get api details: ${e.message}`);
444
+ }
445
+ return {
446
+ config,
447
+ ...return_obj
448
+ };
449
+ }
450
+ let api;
451
+ async function handle_space_sucess(status) {
452
+ if (status_callback)
453
+ status_callback(status);
454
+ if (status.status === "running")
455
+ try {
456
+ config = await resolve_config(
457
+ fetch_implementation,
458
+ `${http_protocol}//${host}`,
459
+ hf_token
460
+ );
461
+ const _config = await config_success(config);
462
+ res(_config);
463
+ } catch (e) {
464
+ console.error(e);
465
+ if (status_callback) {
466
+ status_callback({
467
+ status: "error",
468
+ message: "Could not load this space.",
469
+ load_status: "error",
470
+ detail: "NOT_FOUND"
471
+ });
472
+ }
473
+ }
474
+ }
475
+ try {
476
+ config = await resolve_config(
477
+ fetch_implementation,
478
+ `${http_protocol}//${host}`,
479
+ hf_token
480
+ );
481
+ const _config = await config_success(config);
482
+ res(_config);
483
+ } catch (e) {
484
+ console.error(e);
485
+ if (space_id) {
486
+ check_space_status(
487
+ space_id,
488
+ RE_SPACE_NAME.test(space_id) ? "space_name" : "subdomain",
489
+ handle_space_sucess
490
+ );
491
+ } else {
492
+ if (status_callback)
493
+ status_callback({
494
+ status: "error",
495
+ message: "Could not load this space.",
496
+ load_status: "error",
497
+ detail: "NOT_FOUND"
498
+ });
499
+ }
500
+ }
501
+ function predict(endpoint, data, event_data) {
502
+ let data_returned = false;
503
+ let status_complete = false;
504
+ let dependency;
505
+ if (typeof endpoint === "number") {
506
+ dependency = config.dependencies[endpoint];
507
+ } else {
508
+ const trimmed_endpoint = endpoint.replace(/^\//, "");
509
+ dependency = config.dependencies[api_map[trimmed_endpoint]];
510
+ }
511
+ if (dependency.types.continuous) {
512
+ throw new Error(
513
+ "Cannot call predict on this function as it may run forever. Use submit instead"
514
+ );
515
+ }
516
+ return new Promise((res2, rej) => {
517
+ const app = submit(endpoint, data, event_data);
518
+ let result;
519
+ app.on("data", (d) => {
520
+ if (status_complete) {
521
+ app.destroy();
522
+ res2(d);
523
+ }
524
+ data_returned = true;
525
+ result = d;
526
+ }).on("status", (status) => {
527
+ if (status.stage === "error")
528
+ rej(status);
529
+ if (status.stage === "complete") {
530
+ status_complete = true;
531
+ if (data_returned) {
532
+ app.destroy();
533
+ res2(result);
534
+ }
535
+ }
536
+ });
537
+ });
538
+ }
539
+ function submit(endpoint, data, event_data, trigger_id = null) {
540
+ let fn_index;
541
+ let api_info;
542
+ if (typeof endpoint === "number") {
543
+ fn_index = endpoint;
544
+ api_info = api.unnamed_endpoints[fn_index];
545
+ } else {
546
+ const trimmed_endpoint = endpoint.replace(/^\//, "");
547
+ fn_index = api_map[trimmed_endpoint];
548
+ api_info = api.named_endpoints[endpoint.trim()];
549
+ }
550
+ if (typeof fn_index !== "number") {
551
+ throw new Error(
552
+ "There is no endpoint matching that name of fn_index matching that number."
553
+ );
554
+ }
555
+ let websocket;
556
+ let eventSource;
557
+ let protocol = config.protocol ?? "ws";
558
+ const _endpoint = typeof endpoint === "number" ? "/predict" : endpoint;
559
+ let payload;
560
+ let event_id = null;
561
+ let complete = false;
562
+ const listener_map = {};
563
+ let url_params = "";
564
+ if (typeof window !== "undefined") {
565
+ url_params = new URLSearchParams(window.location.search).toString();
566
+ }
567
+ handle_blob2(`${config.root}`, data, api_info, hf_token).then(
568
+ (_payload) => {
569
+ payload = {
570
+ data: _payload || [],
571
+ event_data,
572
+ fn_index,
573
+ trigger_id
574
+ };
575
+ if (skip_queue(fn_index, config)) {
576
+ fire_event({
577
+ type: "status",
578
+ endpoint: _endpoint,
579
+ stage: "pending",
580
+ queue: false,
581
+ fn_index,
582
+ time: /* @__PURE__ */ new Date()
583
+ });
584
+ post_data2(
585
+ `${config.root}/run${_endpoint.startsWith("/") ? _endpoint : `/${_endpoint}`}${url_params ? "?" + url_params : ""}`,
586
+ {
587
+ ...payload,
588
+ session_hash
589
+ },
590
+ hf_token
591
+ ).then(([output, status_code]) => {
592
+ const data2 = output.data;
593
+ if (status_code == 200) {
594
+ fire_event({
595
+ type: "data",
596
+ endpoint: _endpoint,
597
+ fn_index,
598
+ data: data2,
599
+ time: /* @__PURE__ */ new Date()
600
+ });
601
+ fire_event({
602
+ type: "status",
603
+ endpoint: _endpoint,
604
+ fn_index,
605
+ stage: "complete",
606
+ eta: output.average_duration,
607
+ queue: false,
608
+ time: /* @__PURE__ */ new Date()
609
+ });
610
+ } else {
611
+ fire_event({
612
+ type: "status",
613
+ stage: "error",
614
+ endpoint: _endpoint,
615
+ fn_index,
616
+ message: output.error,
617
+ queue: false,
618
+ time: /* @__PURE__ */ new Date()
619
+ });
620
+ }
621
+ }).catch((e) => {
622
+ fire_event({
623
+ type: "status",
624
+ stage: "error",
625
+ message: e.message,
626
+ endpoint: _endpoint,
627
+ fn_index,
628
+ queue: false,
629
+ time: /* @__PURE__ */ new Date()
630
+ });
631
+ });
632
+ } else if (protocol == "ws") {
633
+ fire_event({
634
+ type: "status",
635
+ stage: "pending",
636
+ queue: true,
637
+ endpoint: _endpoint,
638
+ fn_index,
639
+ time: /* @__PURE__ */ new Date()
640
+ });
641
+ let url = new URL(`${ws_protocol}://${resolve_root(
642
+ host,
643
+ config.path,
644
+ true
645
+ )}
646
+ /queue/join${url_params ? "?" + url_params : ""}`);
647
+ if (jwt) {
648
+ url.searchParams.set("__sign", jwt);
649
+ }
650
+ websocket = new WebSocket(url);
651
+ websocket.onclose = (evt) => {
652
+ if (!evt.wasClean) {
653
+ fire_event({
654
+ type: "status",
655
+ stage: "error",
656
+ broken: true,
657
+ message: BROKEN_CONNECTION_MSG,
658
+ queue: true,
659
+ endpoint: _endpoint,
660
+ fn_index,
661
+ time: /* @__PURE__ */ new Date()
662
+ });
663
+ }
664
+ };
665
+ websocket.onmessage = function(event) {
666
+ const _data = JSON.parse(event.data);
667
+ const { type, status, data: data2 } = handle_message(
668
+ _data,
669
+ last_status[fn_index]
670
+ );
671
+ if (type === "update" && status && !complete) {
672
+ fire_event({
673
+ type: "status",
674
+ endpoint: _endpoint,
675
+ fn_index,
676
+ time: /* @__PURE__ */ new Date(),
677
+ ...status
678
+ });
679
+ if (status.stage === "error") {
680
+ websocket.close();
681
+ }
682
+ } else if (type === "hash") {
683
+ websocket.send(JSON.stringify({ fn_index, session_hash }));
684
+ return;
685
+ } else if (type === "data") {
686
+ websocket.send(JSON.stringify({ ...payload, session_hash }));
687
+ } else if (type === "complete") {
688
+ complete = status;
689
+ } else if (type === "log") {
690
+ fire_event({
691
+ type: "log",
692
+ log: data2.log,
693
+ level: data2.level,
694
+ endpoint: _endpoint,
695
+ fn_index
696
+ });
697
+ } else if (type === "generating") {
698
+ fire_event({
699
+ type: "status",
700
+ time: /* @__PURE__ */ new Date(),
701
+ ...status,
702
+ stage: status == null ? void 0 : status.stage,
703
+ queue: true,
704
+ endpoint: _endpoint,
705
+ fn_index
706
+ });
707
+ }
708
+ if (data2) {
709
+ fire_event({
710
+ type: "data",
711
+ time: /* @__PURE__ */ new Date(),
712
+ data: data2.data,
713
+ endpoint: _endpoint,
714
+ fn_index
715
+ });
716
+ if (complete) {
717
+ fire_event({
718
+ type: "status",
719
+ time: /* @__PURE__ */ new Date(),
720
+ ...complete,
721
+ stage: status == null ? void 0 : status.stage,
722
+ queue: true,
723
+ endpoint: _endpoint,
724
+ fn_index
725
+ });
726
+ websocket.close();
727
+ }
728
+ }
729
+ };
730
+ if (semiver(config.version || "2.0.0", "3.6") < 0) {
731
+ addEventListener(
732
+ "open",
733
+ () => websocket.send(JSON.stringify({ hash: session_hash }))
734
+ );
735
+ }
736
+ } else if (protocol == "sse") {
737
+ fire_event({
738
+ type: "status",
739
+ stage: "pending",
740
+ queue: true,
741
+ endpoint: _endpoint,
742
+ fn_index,
743
+ time: /* @__PURE__ */ new Date()
744
+ });
745
+ var params = new URLSearchParams({
746
+ fn_index: fn_index.toString(),
747
+ session_hash
748
+ }).toString();
749
+ let url = new URL(
750
+ `${config.root}/queue/join?${url_params ? url_params + "&" : ""}${params}`
751
+ );
752
+ eventSource = EventSource_factory(url);
753
+ eventSource.onmessage = async function(event) {
754
+ const _data = JSON.parse(event.data);
755
+ const { type, status, data: data2 } = handle_message(
756
+ _data,
757
+ last_status[fn_index]
758
+ );
759
+ if (type === "update" && status && !complete) {
760
+ fire_event({
761
+ type: "status",
762
+ endpoint: _endpoint,
763
+ fn_index,
764
+ time: /* @__PURE__ */ new Date(),
765
+ ...status
766
+ });
767
+ if (status.stage === "error") {
768
+ eventSource.close();
769
+ }
770
+ } else if (type === "data") {
771
+ event_id = _data.event_id;
772
+ let [_, status2] = await post_data2(
773
+ `${config.root}/queue/data`,
774
+ {
775
+ ...payload,
776
+ session_hash,
777
+ event_id
778
+ },
779
+ hf_token
780
+ );
781
+ if (status2 !== 200) {
782
+ fire_event({
783
+ type: "status",
784
+ stage: "error",
785
+ message: BROKEN_CONNECTION_MSG,
786
+ queue: true,
787
+ endpoint: _endpoint,
788
+ fn_index,
789
+ time: /* @__PURE__ */ new Date()
790
+ });
791
+ eventSource.close();
792
+ }
793
+ } else if (type === "complete") {
794
+ complete = status;
795
+ } else if (type === "log") {
796
+ fire_event({
797
+ type: "log",
798
+ log: data2.log,
799
+ level: data2.level,
800
+ endpoint: _endpoint,
801
+ fn_index
802
+ });
803
+ } else if (type === "generating") {
804
+ fire_event({
805
+ type: "status",
806
+ time: /* @__PURE__ */ new Date(),
807
+ ...status,
808
+ stage: status == null ? void 0 : status.stage,
809
+ queue: true,
810
+ endpoint: _endpoint,
811
+ fn_index
812
+ });
813
+ }
814
+ if (data2) {
815
+ fire_event({
816
+ type: "data",
817
+ time: /* @__PURE__ */ new Date(),
818
+ data: data2.data,
819
+ endpoint: _endpoint,
820
+ fn_index
821
+ });
822
+ if (complete) {
823
+ fire_event({
824
+ type: "status",
825
+ time: /* @__PURE__ */ new Date(),
826
+ ...complete,
827
+ stage: status == null ? void 0 : status.stage,
828
+ queue: true,
829
+ endpoint: _endpoint,
830
+ fn_index
831
+ });
832
+ eventSource.close();
833
+ }
834
+ }
835
+ };
836
+ } else if (protocol == "sse_v1" || protocol == "sse_v2") {
837
+ fire_event({
838
+ type: "status",
839
+ stage: "pending",
840
+ queue: true,
841
+ endpoint: _endpoint,
842
+ fn_index,
843
+ time: /* @__PURE__ */ new Date()
844
+ });
845
+ post_data2(
846
+ `${config.root}/queue/join?${url_params}`,
847
+ {
848
+ ...payload,
849
+ session_hash
850
+ },
851
+ hf_token
852
+ ).then(([response, status]) => {
853
+ if (status === 503) {
854
+ fire_event({
855
+ type: "status",
856
+ stage: "error",
857
+ message: QUEUE_FULL_MSG,
858
+ queue: true,
859
+ endpoint: _endpoint,
860
+ fn_index,
861
+ time: /* @__PURE__ */ new Date()
862
+ });
863
+ } else if (status !== 200) {
864
+ fire_event({
865
+ type: "status",
866
+ stage: "error",
867
+ message: BROKEN_CONNECTION_MSG,
868
+ queue: true,
869
+ endpoint: _endpoint,
870
+ fn_index,
871
+ time: /* @__PURE__ */ new Date()
872
+ });
873
+ } else {
874
+ event_id = response.event_id;
875
+ let callback = async function(_data) {
876
+ try {
877
+ const { type, status: status2, data: data2 } = handle_message(
878
+ _data,
879
+ last_status[fn_index]
880
+ );
881
+ if (type == "heartbeat") {
882
+ return;
883
+ }
884
+ if (type === "update" && status2 && !complete) {
885
+ fire_event({
886
+ type: "status",
887
+ endpoint: _endpoint,
888
+ fn_index,
889
+ time: /* @__PURE__ */ new Date(),
890
+ ...status2
891
+ });
892
+ } else if (type === "complete") {
893
+ complete = status2;
894
+ } else if (type == "unexpected_error") {
895
+ console.error("Unexpected error", status2 == null ? void 0 : status2.message);
896
+ fire_event({
897
+ type: "status",
898
+ stage: "error",
899
+ message: (status2 == null ? void 0 : status2.message) || "An Unexpected Error Occurred!",
900
+ queue: true,
901
+ endpoint: _endpoint,
902
+ fn_index,
903
+ time: /* @__PURE__ */ new Date()
904
+ });
905
+ } else if (type === "log") {
906
+ fire_event({
907
+ type: "log",
908
+ log: data2.log,
909
+ level: data2.level,
910
+ endpoint: _endpoint,
911
+ fn_index
912
+ });
913
+ return;
914
+ } else if (type === "generating") {
915
+ fire_event({
916
+ type: "status",
917
+ time: /* @__PURE__ */ new Date(),
918
+ ...status2,
919
+ stage: status2 == null ? void 0 : status2.stage,
920
+ queue: true,
921
+ endpoint: _endpoint,
922
+ fn_index
923
+ });
924
+ if (data2 && protocol === "sse_v2") {
925
+ apply_diff_stream(event_id, data2);
926
+ }
927
+ }
928
+ if (data2) {
929
+ fire_event({
930
+ type: "data",
931
+ time: /* @__PURE__ */ new Date(),
932
+ data: data2.data,
933
+ endpoint: _endpoint,
934
+ fn_index
935
+ });
936
+ if (complete) {
937
+ fire_event({
938
+ type: "status",
939
+ time: /* @__PURE__ */ new Date(),
940
+ ...complete,
941
+ stage: status2 == null ? void 0 : status2.stage,
942
+ queue: true,
943
+ endpoint: _endpoint,
944
+ fn_index
945
+ });
946
+ }
947
+ }
948
+ if ((status2 == null ? void 0 : status2.stage) === "complete" || (status2 == null ? void 0 : status2.stage) === "error") {
949
+ if (event_callbacks[event_id]) {
950
+ delete event_callbacks[event_id];
951
+ }
952
+ if (event_id in pending_diff_streams) {
953
+ delete pending_diff_streams[event_id];
954
+ }
955
+ }
956
+ } catch (e) {
957
+ console.error("Unexpected client exception", e);
958
+ fire_event({
959
+ type: "status",
960
+ stage: "error",
961
+ message: "An Unexpected Error Occurred!",
962
+ queue: true,
963
+ endpoint: _endpoint,
964
+ fn_index,
965
+ time: /* @__PURE__ */ new Date()
966
+ });
967
+ close_stream();
968
+ }
969
+ };
970
+ if (event_id in pending_stream_messages) {
971
+ pending_stream_messages[event_id].forEach(
972
+ (msg) => callback(msg)
973
+ );
974
+ delete pending_stream_messages[event_id];
975
+ }
976
+ event_callbacks[event_id] = callback;
977
+ unclosed_events.add(event_id);
978
+ if (!stream_open) {
979
+ open_stream();
980
+ }
981
+ }
982
+ });
983
+ }
984
+ }
985
+ );
986
+ function apply_diff_stream(event_id2, data2) {
987
+ let is_first_generation = !pending_diff_streams[event_id2];
988
+ if (is_first_generation) {
989
+ pending_diff_streams[event_id2] = [];
990
+ data2.data.forEach((value, i) => {
991
+ pending_diff_streams[event_id2][i] = value;
992
+ });
993
+ } else {
994
+ data2.data.forEach((value, i) => {
995
+ let new_data = apply_diff(
996
+ pending_diff_streams[event_id2][i],
997
+ value
998
+ );
999
+ pending_diff_streams[event_id2][i] = new_data;
1000
+ data2.data[i] = new_data;
1001
+ });
1002
+ }
1003
+ }
1004
+ function fire_event(event) {
1005
+ const narrowed_listener_map = listener_map;
1006
+ const listeners = narrowed_listener_map[event.type] || [];
1007
+ listeners == null ? void 0 : listeners.forEach((l) => l(event));
1008
+ }
1009
+ function on(eventType, listener) {
1010
+ const narrowed_listener_map = listener_map;
1011
+ const listeners = narrowed_listener_map[eventType] || [];
1012
+ narrowed_listener_map[eventType] = listeners;
1013
+ listeners == null ? void 0 : listeners.push(listener);
1014
+ return { on, off, cancel, destroy };
1015
+ }
1016
+ function off(eventType, listener) {
1017
+ const narrowed_listener_map = listener_map;
1018
+ let listeners = narrowed_listener_map[eventType] || [];
1019
+ listeners = listeners == null ? void 0 : listeners.filter((l) => l !== listener);
1020
+ narrowed_listener_map[eventType] = listeners;
1021
+ return { on, off, cancel, destroy };
1022
+ }
1023
+ async function cancel() {
1024
+ const _status = {
1025
+ stage: "complete",
1026
+ queue: false,
1027
+ time: /* @__PURE__ */ new Date()
1028
+ };
1029
+ complete = _status;
1030
+ fire_event({
1031
+ ..._status,
1032
+ type: "status",
1033
+ endpoint: _endpoint,
1034
+ fn_index
1035
+ });
1036
+ let cancel_request = {};
1037
+ if (protocol === "ws") {
1038
+ if (websocket && websocket.readyState === 0) {
1039
+ websocket.addEventListener("open", () => {
1040
+ websocket.close();
1041
+ });
1042
+ } else {
1043
+ websocket.close();
1044
+ }
1045
+ cancel_request = { fn_index, session_hash };
1046
+ } else {
1047
+ eventSource.close();
1048
+ cancel_request = { event_id };
1049
+ }
1050
+ try {
1051
+ await fetch_implementation(`${config.root}/reset`, {
1052
+ headers: { "Content-Type": "application/json" },
1053
+ method: "POST",
1054
+ body: JSON.stringify(cancel_request)
1055
+ });
1056
+ } catch (e) {
1057
+ console.warn(
1058
+ "The `/reset` endpoint could not be called. Subsequent endpoint results may be unreliable."
1059
+ );
1060
+ }
1061
+ }
1062
+ function destroy() {
1063
+ for (const event_type in listener_map) {
1064
+ listener_map[event_type].forEach((fn2) => {
1065
+ off(event_type, fn2);
1066
+ });
1067
+ }
1068
+ }
1069
+ return {
1070
+ on,
1071
+ off,
1072
+ cancel,
1073
+ destroy
1074
+ };
1075
+ }
1076
+ function open_stream() {
1077
+ stream_open = true;
1078
+ let params = new URLSearchParams({
1079
+ session_hash
1080
+ }).toString();
1081
+ let url = new URL(`${config.root}/queue/data?${params}`);
1082
+ event_stream = EventSource_factory(url);
1083
+ event_stream.onmessage = async function(event) {
1084
+ let _data = JSON.parse(event.data);
1085
+ const event_id = _data.event_id;
1086
+ if (!event_id) {
1087
+ await Promise.all(
1088
+ Object.keys(event_callbacks).map(
1089
+ (event_id2) => event_callbacks[event_id2](_data)
1090
+ )
1091
+ );
1092
+ } else if (event_callbacks[event_id]) {
1093
+ if (_data.msg === "process_completed") {
1094
+ unclosed_events.delete(event_id);
1095
+ if (unclosed_events.size === 0) {
1096
+ close_stream();
1097
+ }
1098
+ }
1099
+ let fn2 = event_callbacks[event_id];
1100
+ window.setTimeout(fn2, 0, _data);
1101
+ } else {
1102
+ if (!pending_stream_messages[event_id]) {
1103
+ pending_stream_messages[event_id] = [];
1104
+ }
1105
+ pending_stream_messages[event_id].push(_data);
1106
+ }
1107
+ };
1108
+ event_stream.onerror = async function(event) {
1109
+ await Promise.all(
1110
+ Object.keys(event_callbacks).map(
1111
+ (event_id) => event_callbacks[event_id]({
1112
+ msg: "unexpected_error",
1113
+ message: BROKEN_CONNECTION_MSG
1114
+ })
1115
+ )
1116
+ );
1117
+ close_stream();
1118
+ };
1119
+ }
1120
+ function close_stream() {
1121
+ stream_open = false;
1122
+ event_stream == null ? void 0 : event_stream.close();
1123
+ }
1124
+ async function component_server(component_id, fn_name, data) {
1125
+ var _a;
1126
+ const headers = { "Content-Type": "application/json" };
1127
+ if (hf_token) {
1128
+ headers.Authorization = `Bearer ${hf_token}`;
1129
+ }
1130
+ let root_url;
1131
+ let component = config.components.find(
1132
+ (comp) => comp.id === component_id
1133
+ );
1134
+ if ((_a = component == null ? void 0 : component.props) == null ? void 0 : _a.root_url) {
1135
+ root_url = component.props.root_url;
1136
+ } else {
1137
+ root_url = config.root;
1138
+ }
1139
+ const response = await fetch_implementation(
1140
+ `${root_url}/component_server/`,
1141
+ {
1142
+ method: "POST",
1143
+ body: JSON.stringify({
1144
+ data,
1145
+ component_id,
1146
+ fn_name,
1147
+ session_hash
1148
+ }),
1149
+ headers
1150
+ }
1151
+ );
1152
+ if (!response.ok) {
1153
+ throw new Error(
1154
+ "Could not connect to component server: " + response.statusText
1155
+ );
1156
+ }
1157
+ const output = await response.json();
1158
+ return output;
1159
+ }
1160
+ async function view_api(config2) {
1161
+ if (api)
1162
+ return api;
1163
+ const headers = { "Content-Type": "application/json" };
1164
+ if (hf_token) {
1165
+ headers.Authorization = `Bearer ${hf_token}`;
1166
+ }
1167
+ let response;
1168
+ if (semiver(config2.version || "2.0.0", "3.30") < 0) {
1169
+ response = await fetch_implementation(
1170
+ "https://gradio-space-api-fetcher-v2.hf.space/api",
1171
+ {
1172
+ method: "POST",
1173
+ body: JSON.stringify({
1174
+ serialize: false,
1175
+ config: JSON.stringify(config2)
1176
+ }),
1177
+ headers
1178
+ }
1179
+ );
1180
+ } else {
1181
+ response = await fetch_implementation(`${config2.root}/info`, {
1182
+ headers
1183
+ });
1184
+ }
1185
+ if (!response.ok) {
1186
+ throw new Error(BROKEN_CONNECTION_MSG);
1187
+ }
1188
+ let api_info = await response.json();
1189
+ if ("api" in api_info) {
1190
+ api_info = api_info.api;
1191
+ }
1192
+ if (api_info.named_endpoints["/predict"] && !api_info.unnamed_endpoints["0"]) {
1193
+ api_info.unnamed_endpoints[0] = api_info.named_endpoints["/predict"];
1194
+ }
1195
+ const x = transform_api_info(api_info, config2, api_map);
1196
+ return x;
1197
+ }
1198
+ });
1199
+ }
1200
+ async function handle_blob2(endpoint, data, api_info, token) {
1201
+ const blob_refs = await walk_and_store_blobs(
1202
+ data,
1203
+ void 0,
1204
+ [],
1205
+ true,
1206
+ api_info
1207
+ );
1208
+ return Promise.all(
1209
+ blob_refs.map(async ({ path, blob, type }) => {
1210
+ if (blob) {
1211
+ const file_url = (await upload_files2(endpoint, [blob], token)).files[0];
1212
+ return { path, file_url, type, name: blob == null ? void 0 : blob.name };
1213
+ }
1214
+ return { path, type };
1215
+ })
1216
+ ).then((r) => {
1217
+ r.forEach(({ path, file_url, type, name }) => {
1218
+ if (type === "Gallery") {
1219
+ update_object(data, file_url, path);
1220
+ } else if (file_url) {
1221
+ const file = new FileData({ path: file_url, orig_name: name });
1222
+ update_object(data, file, path);
1223
+ }
1224
+ });
1225
+ return data;
1226
+ });
1227
+ }
1228
+ }
1229
+ const { post_data, upload_files, client, handle_blob } = api_factory(
1230
+ fetch,
1231
+ (...args) => new EventSource(...args)
1232
+ );
1233
+ function get_type(type, component, serializer, signature_type) {
1234
+ switch (type.type) {
1235
+ case "string":
1236
+ return "string";
1237
+ case "boolean":
1238
+ return "boolean";
1239
+ case "number":
1240
+ return "number";
1241
+ }
1242
+ if (serializer === "JSONSerializable" || serializer === "StringSerializable") {
1243
+ return "any";
1244
+ } else if (serializer === "ListStringSerializable") {
1245
+ return "string[]";
1246
+ } else if (component === "Image") {
1247
+ return signature_type === "parameter" ? "Blob | File | Buffer" : "string";
1248
+ } else if (serializer === "FileSerializable") {
1249
+ if ((type == null ? void 0 : type.type) === "array") {
1250
+ return signature_type === "parameter" ? "(Blob | File | Buffer)[]" : `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}[]`;
1251
+ }
1252
+ return signature_type === "parameter" ? "Blob | File | Buffer" : `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}`;
1253
+ } else if (serializer === "GallerySerializable") {
1254
+ return signature_type === "parameter" ? "[(Blob | File | Buffer), (string | null)][]" : `[{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}, (string | null))][]`;
1255
+ }
1256
+ }
1257
+ function get_description(type, serializer) {
1258
+ if (serializer === "GallerySerializable") {
1259
+ return "array of [file, label] tuples";
1260
+ } else if (serializer === "ListStringSerializable") {
1261
+ return "array of strings";
1262
+ } else if (serializer === "FileSerializable") {
1263
+ return "array of files or single file";
1264
+ }
1265
+ return type.description;
1266
+ }
1267
+ function transform_api_info(api_info, config, api_map) {
1268
+ const new_data = {
1269
+ named_endpoints: {},
1270
+ unnamed_endpoints: {}
1271
+ };
1272
+ for (const key in api_info) {
1273
+ const cat = api_info[key];
1274
+ for (const endpoint in cat) {
1275
+ const dep_index = config.dependencies[endpoint] ? endpoint : api_map[endpoint.replace("/", "")];
1276
+ const info = cat[endpoint];
1277
+ new_data[key][endpoint] = {};
1278
+ new_data[key][endpoint].parameters = {};
1279
+ new_data[key][endpoint].returns = {};
1280
+ new_data[key][endpoint].type = config.dependencies[dep_index].types;
1281
+ new_data[key][endpoint].parameters = info.parameters.map(
1282
+ ({ label, component, type, serializer }) => ({
1283
+ label,
1284
+ component,
1285
+ type: get_type(type, component, serializer, "parameter"),
1286
+ description: get_description(type, serializer)
1287
+ })
1288
+ );
1289
+ new_data[key][endpoint].returns = info.returns.map(
1290
+ ({ label, component, type, serializer }) => ({
1291
+ label,
1292
+ component,
1293
+ type: get_type(type, component, serializer, "return"),
1294
+ description: get_description(type, serializer)
1295
+ })
1296
+ );
1297
+ }
1298
+ }
1299
+ return new_data;
1300
+ }
1301
+ async function get_jwt(space, token) {
1302
+ try {
1303
+ const r = await fetch(`https://huggingface.co/api/spaces/${space}/jwt`, {
1304
+ headers: {
1305
+ Authorization: `Bearer ${token}`
1306
+ }
1307
+ });
1308
+ const jwt = (await r.json()).token;
1309
+ return jwt || false;
1310
+ } catch (e) {
1311
+ console.error(e);
1312
+ return false;
1313
+ }
1314
+ }
1315
+ function update_object(object, newValue, stack) {
1316
+ while (stack.length > 1) {
1317
+ object = object[stack.shift()];
1318
+ }
1319
+ object[stack.shift()] = newValue;
1320
+ }
1321
+ async function walk_and_store_blobs(param, type = void 0, path = [], root = false, api_info = void 0) {
1322
+ if (Array.isArray(param)) {
1323
+ let blob_refs = [];
1324
+ await Promise.all(
1325
+ param.map(async (v, i) => {
1326
+ var _a;
1327
+ let new_path = path.slice();
1328
+ new_path.push(i);
1329
+ const array_refs = await walk_and_store_blobs(
1330
+ param[i],
1331
+ root ? ((_a = api_info == null ? void 0 : api_info.parameters[i]) == null ? void 0 : _a.component) || void 0 : type,
1332
+ new_path,
1333
+ false,
1334
+ api_info
1335
+ );
1336
+ blob_refs = blob_refs.concat(array_refs);
1337
+ })
1338
+ );
1339
+ return blob_refs;
1340
+ } else if (globalThis.Buffer && param instanceof globalThis.Buffer) {
1341
+ const is_image = type === "Image";
1342
+ return [
1343
+ {
1344
+ path,
1345
+ blob: is_image ? false : new NodeBlob([param]),
1346
+ type
1347
+ }
1348
+ ];
1349
+ } else if (typeof param === "object") {
1350
+ let blob_refs = [];
1351
+ for (let key in param) {
1352
+ if (param.hasOwnProperty(key)) {
1353
+ let new_path = path.slice();
1354
+ new_path.push(key);
1355
+ blob_refs = blob_refs.concat(
1356
+ await walk_and_store_blobs(
1357
+ param[key],
1358
+ void 0,
1359
+ new_path,
1360
+ false,
1361
+ api_info
1362
+ )
1363
+ );
1364
+ }
1365
+ }
1366
+ return blob_refs;
1367
+ }
1368
+ return [];
1369
+ }
1370
+ function skip_queue(id, config) {
1371
+ var _a, _b, _c, _d;
1372
+ return !(((_b = (_a = config == null ? void 0 : config.dependencies) == null ? void 0 : _a[id]) == null ? void 0 : _b.queue) === null ? config.enable_queue : (_d = (_c = config == null ? void 0 : config.dependencies) == null ? void 0 : _c[id]) == null ? void 0 : _d.queue) || false;
1373
+ }
1374
+ async function resolve_config(fetch_implementation, endpoint, token) {
1375
+ const headers = {};
1376
+ if (token) {
1377
+ headers.Authorization = `Bearer ${token}`;
1378
+ }
1379
+ if (typeof window !== "undefined" && window.gradio_config && location.origin !== "http://localhost:9876" && !window.gradio_config.dev_mode) {
1380
+ const path = window.gradio_config.root;
1381
+ const config = window.gradio_config;
1382
+ config.root = resolve_root(endpoint, config.root, false);
1383
+ return { ...config, path };
1384
+ } else if (endpoint) {
1385
+ let response = await fetch_implementation(`${endpoint}/config`, {
1386
+ headers
1387
+ });
1388
+ if (response.status === 200) {
1389
+ const config = await response.json();
1390
+ config.path = config.path ?? "";
1391
+ config.root = endpoint;
1392
+ return config;
1393
+ }
1394
+ throw new Error("Could not get config.");
1395
+ }
1396
+ throw new Error("No config or app endpoint found");
1397
+ }
1398
+ async function check_space_status(id, type, status_callback) {
1399
+ let endpoint = type === "subdomain" ? `https://huggingface.co/api/spaces/by-subdomain/${id}` : `https://huggingface.co/api/spaces/${id}`;
1400
+ let response;
1401
+ let _status;
1402
+ try {
1403
+ response = await fetch(endpoint);
1404
+ _status = response.status;
1405
+ if (_status !== 200) {
1406
+ throw new Error();
1407
+ }
1408
+ response = await response.json();
1409
+ } catch (e) {
1410
+ status_callback({
1411
+ status: "error",
1412
+ load_status: "error",
1413
+ message: "Could not get space status",
1414
+ detail: "NOT_FOUND"
1415
+ });
1416
+ return;
1417
+ }
1418
+ if (!response || _status !== 200)
1419
+ return;
1420
+ const {
1421
+ runtime: { stage },
1422
+ id: space_name
1423
+ } = response;
1424
+ switch (stage) {
1425
+ case "STOPPED":
1426
+ case "SLEEPING":
1427
+ status_callback({
1428
+ status: "sleeping",
1429
+ load_status: "pending",
1430
+ message: "Space is asleep. Waking it up...",
1431
+ detail: stage
1432
+ });
1433
+ setTimeout(() => {
1434
+ check_space_status(id, type, status_callback);
1435
+ }, 1e3);
1436
+ break;
1437
+ case "PAUSED":
1438
+ status_callback({
1439
+ status: "paused",
1440
+ load_status: "error",
1441
+ message: "This space has been paused by the author. If you would like to try this demo, consider duplicating the space.",
1442
+ detail: stage,
1443
+ discussions_enabled: await discussions_enabled(space_name)
1444
+ });
1445
+ break;
1446
+ case "RUNNING":
1447
+ case "RUNNING_BUILDING":
1448
+ status_callback({
1449
+ status: "running",
1450
+ load_status: "complete",
1451
+ message: "",
1452
+ detail: stage
1453
+ });
1454
+ break;
1455
+ case "BUILDING":
1456
+ status_callback({
1457
+ status: "building",
1458
+ load_status: "pending",
1459
+ message: "Space is building...",
1460
+ detail: stage
1461
+ });
1462
+ setTimeout(() => {
1463
+ check_space_status(id, type, status_callback);
1464
+ }, 1e3);
1465
+ break;
1466
+ default:
1467
+ status_callback({
1468
+ status: "space_error",
1469
+ load_status: "error",
1470
+ message: "This space is experiencing an issue.",
1471
+ detail: stage,
1472
+ discussions_enabled: await discussions_enabled(space_name)
1473
+ });
1474
+ break;
1475
+ }
1476
+ }
1477
+ function handle_message(data, last_status) {
1478
+ const queue = true;
1479
+ switch (data.msg) {
1480
+ case "send_data":
1481
+ return { type: "data" };
1482
+ case "send_hash":
1483
+ return { type: "hash" };
1484
+ case "queue_full":
1485
+ return {
1486
+ type: "update",
1487
+ status: {
1488
+ queue,
1489
+ message: QUEUE_FULL_MSG,
1490
+ stage: "error",
1491
+ code: data.code,
1492
+ success: data.success
1493
+ }
1494
+ };
1495
+ case "heartbeat":
1496
+ return {
1497
+ type: "heartbeat"
1498
+ };
1499
+ case "unexpected_error":
1500
+ return {
1501
+ type: "unexpected_error",
1502
+ status: {
1503
+ queue,
1504
+ message: data.message,
1505
+ stage: "error",
1506
+ success: false
1507
+ }
1508
+ };
1509
+ case "estimation":
1510
+ return {
1511
+ type: "update",
1512
+ status: {
1513
+ queue,
1514
+ stage: last_status || "pending",
1515
+ code: data.code,
1516
+ size: data.queue_size,
1517
+ position: data.rank,
1518
+ eta: data.rank_eta,
1519
+ success: data.success
1520
+ }
1521
+ };
1522
+ case "progress":
1523
+ return {
1524
+ type: "update",
1525
+ status: {
1526
+ queue,
1527
+ stage: "pending",
1528
+ code: data.code,
1529
+ progress_data: data.progress_data,
1530
+ success: data.success
1531
+ }
1532
+ };
1533
+ case "log":
1534
+ return { type: "log", data };
1535
+ case "process_generating":
1536
+ return {
1537
+ type: "generating",
1538
+ status: {
1539
+ queue,
1540
+ message: !data.success ? data.output.error : null,
1541
+ stage: data.success ? "generating" : "error",
1542
+ code: data.code,
1543
+ progress_data: data.progress_data,
1544
+ eta: data.average_duration
1545
+ },
1546
+ data: data.success ? data.output : null
1547
+ };
1548
+ case "process_completed":
1549
+ if ("error" in data.output) {
1550
+ return {
1551
+ type: "update",
1552
+ status: {
1553
+ queue,
1554
+ message: data.output.error,
1555
+ stage: "error",
1556
+ code: data.code,
1557
+ success: data.success
1558
+ }
1559
+ };
1560
+ }
1561
+ return {
1562
+ type: "complete",
1563
+ status: {
1564
+ queue,
1565
+ message: !data.success ? data.output.error : void 0,
1566
+ stage: data.success ? "complete" : "error",
1567
+ code: data.code,
1568
+ progress_data: data.progress_data
1569
+ },
1570
+ data: data.success ? data.output : null
1571
+ };
1572
+ case "process_starts":
1573
+ return {
1574
+ type: "update",
1575
+ status: {
1576
+ queue,
1577
+ stage: "pending",
1578
+ code: data.code,
1579
+ size: data.rank,
1580
+ position: 0,
1581
+ success: data.success,
1582
+ eta: data.eta
1583
+ }
1584
+ };
1585
+ }
1586
+ return { type: "none", status: { stage: "error", queue } };
1587
+ }
1588
+ export {
1589
+ FileData,
1590
+ api_factory,
1591
+ client,
1592
+ duplicate,
1593
+ get_fetchable_url_or_file,
1594
+ post_data,
1595
+ prepare_files,
1596
+ upload,
1597
+ upload_files
1598
+ };
node_modules/@gradio/client/dist/types.d.ts ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export interface Config {
2
+ auth_required: boolean | undefined;
3
+ auth_message: string;
4
+ components: any[];
5
+ css: string | null;
6
+ js: string | null;
7
+ head: string | null;
8
+ dependencies: any[];
9
+ dev_mode: boolean;
10
+ enable_queue: boolean;
11
+ layout: any;
12
+ mode: "blocks" | "interface";
13
+ root: string;
14
+ root_url?: string;
15
+ theme: string;
16
+ title: string;
17
+ version: string;
18
+ space_id: string | null;
19
+ is_colab: boolean;
20
+ show_api: boolean;
21
+ stylesheets: string[];
22
+ path: string;
23
+ protocol?: "sse_v2" | "sse_v1" | "sse" | "ws";
24
+ }
25
+ export interface Payload {
26
+ data: unknown[];
27
+ fn_index?: number;
28
+ event_data?: unknown;
29
+ time?: Date;
30
+ }
31
+ export interface PostResponse {
32
+ error?: string;
33
+ [x: string]: any;
34
+ }
35
+ export interface UploadResponse {
36
+ error?: string;
37
+ files?: string[];
38
+ }
39
+ export interface Status {
40
+ queue: boolean;
41
+ code?: string;
42
+ success?: boolean;
43
+ stage: "pending" | "error" | "complete" | "generating";
44
+ broken?: boolean;
45
+ size?: number;
46
+ position?: number;
47
+ eta?: number;
48
+ message?: string;
49
+ progress_data?: {
50
+ progress: number | null;
51
+ index: number | null;
52
+ length: number | null;
53
+ unit: string | null;
54
+ desc: string | null;
55
+ }[];
56
+ time?: Date;
57
+ }
58
+ export interface LogMessage {
59
+ log: string;
60
+ level: "warning" | "info";
61
+ }
62
+ export interface SpaceStatusNormal {
63
+ status: "sleeping" | "running" | "building" | "error" | "stopped";
64
+ detail: "SLEEPING" | "RUNNING" | "RUNNING_BUILDING" | "BUILDING" | "NOT_FOUND";
65
+ load_status: "pending" | "error" | "complete" | "generating";
66
+ message: string;
67
+ }
68
+ export interface SpaceStatusError {
69
+ status: "space_error" | "paused";
70
+ detail: "NO_APP_FILE" | "CONFIG_ERROR" | "BUILD_ERROR" | "RUNTIME_ERROR" | "PAUSED";
71
+ load_status: "error";
72
+ message: string;
73
+ discussions_enabled: boolean;
74
+ }
75
+ export type SpaceStatus = SpaceStatusNormal | SpaceStatusError;
76
+ export type status_callback_function = (a: Status) => void;
77
+ export type SpaceStatusCallback = (a: SpaceStatus) => void;
78
+ export type EventType = "data" | "status" | "log";
79
+ export interface EventMap {
80
+ data: Payload;
81
+ status: Status;
82
+ log: LogMessage;
83
+ }
84
+ export type Event<K extends EventType> = {
85
+ [P in K]: EventMap[P] & {
86
+ type: P;
87
+ endpoint: string;
88
+ fn_index: number;
89
+ };
90
+ }[K];
91
+ export type EventListener<K extends EventType> = (event: Event<K>) => void;
92
+ export type ListenerMap<K extends EventType> = {
93
+ [P in K]?: EventListener<K>[];
94
+ };
95
+ export interface FileData {
96
+ name: string;
97
+ orig_name?: string;
98
+ size?: number;
99
+ data: string;
100
+ blob?: File;
101
+ is_file?: boolean;
102
+ mime_type?: string;
103
+ alt_text?: string;
104
+ }
105
+ //# sourceMappingURL=types.d.ts.map
node_modules/@gradio/client/dist/types.d.ts.map ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,MAAM;IACtB,aAAa,EAAE,OAAO,GAAG,SAAS,CAAC;IACnC,YAAY,EAAE,MAAM,CAAC;IACrB,UAAU,EAAE,GAAG,EAAE,CAAC;IAClB,GAAG,EAAE,MAAM,GAAG,IAAI,CAAC;IACnB,EAAE,EAAE,MAAM,GAAG,IAAI,CAAC;IAClB,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;IACpB,YAAY,EAAE,GAAG,EAAE,CAAC;IACpB,QAAQ,EAAE,OAAO,CAAC;IAClB,YAAY,EAAE,OAAO,CAAC;IACtB,MAAM,EAAE,GAAG,CAAC;IACZ,IAAI,EAAE,QAAQ,GAAG,WAAW,CAAC;IAC7B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,OAAO,EAAE,MAAM,CAAC;IAChB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,QAAQ,EAAE,OAAO,CAAC;IAClB,QAAQ,EAAE,OAAO,CAAC;IAClB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,QAAQ,GAAG,QAAQ,GAAG,KAAK,GAAG,IAAI,CAAC;CAC9C;AAED,MAAM,WAAW,OAAO;IACvB,IAAI,EAAE,OAAO,EAAE,CAAC;IAChB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,IAAI,CAAC,EAAE,IAAI,CAAC;CACZ;AAED,MAAM,WAAW,YAAY;IAC5B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,CAAC,CAAC,EAAE,MAAM,GAAG,GAAG,CAAC;CACjB;AACD,MAAM,WAAW,cAAc;IAC9B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,KAAK,CAAC,EAAE,MAAM,EAAE,CAAC;CACjB;AAED,MAAM,WAAW,MAAM;IACtB,KAAK,EAAE,OAAO,CAAC;IACf,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,KAAK,EAAE,SAAS,GAAG,OAAO,GAAG,UAAU,GAAG,YAAY,CAAC;IACvD,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,aAAa,CAAC,EAAE;QACf,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;QACxB,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;QACrB,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;QACtB,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;QACpB,IAAI,EAAE,MAAM,GAAG,IAAI,CAAC;KACpB,EAAE,CAAC;IACJ,IAAI,CAAC,EAAE,IAAI,CAAC;CACZ;AAED,MAAM,WAAW,UAAU;IAC1B,GAAG,EAAE,MAAM,CAAC;IACZ,KAAK,EAAE,SAAS,GAAG,MAAM,CAAC;CAC1B;AAED,MAAM,WAAW,iBAAiB;IACjC,MAAM,EAAE,UAAU,GAAG,SAAS,GAAG,UAAU,GAAG,OAAO,GAAG,SAAS,CAAC;IAClE,MAAM,EACH,UAAU,GACV,SAAS,GACT,kBAAkB,GAClB,UAAU,GACV,WAAW,CAAC;IACf,WAAW,EAAE,SAAS,GAAG,OAAO,GAAG,UAAU,GAAG,YAAY,CAAC;IAC7D,OAAO,EAAE,MAAM,CAAC;CAChB;AACD,MAAM,WAAW,gBAAgB;IAChC,MAAM,EAAE,aAAa,GAAG,QAAQ,CAAC;IACjC,MAAM,EACH,aAAa,GACb,cAAc,GACd,aAAa,GACb,eAAe,GACf,QAAQ,CAAC;IACZ,WAAW,EAAE,OAAO,CAAC;IACrB,OAAO,EAAE,MAAM,CAAC;IAChB,mBAAmB,EAAE,OAAO,CAAC;CAC7B;AACD,MAAM,MAAM,WAAW,GAAG,iBAAiB,GAAG,gBAAgB,CAAC;AAE/D,MAAM,MAAM,wBAAwB,GAAG,CAAC,CAAC,EAAE,MAAM,KAAK,IAAI,CAAC;AAC3D,MAAM,MAAM,mBAAmB,GAAG,CAAC,CAAC,EAAE,WAAW,KAAK,IAAI,CAAC;AAE3D,MAAM,MAAM,SAAS,GAAG,MAAM,GAAG,QAAQ,GAAG,KAAK,CAAC;AAElD,MAAM,WAAW,QAAQ;IACxB,IAAI,EAAE,OAAO,CAAC;IACd,MAAM,EAAE,MAAM,CAAC;IACf,GAAG,EAAE,UAAU,CAAC;CAChB;AAED,MAAM,MAAM,KAAK,CAAC,CAAC,SAAS,SAAS,IAAI;KACvC,CAAC,IAAI,CAAC,GAAG,QAAQ,CAAC,CAAC,CAAC,GAAG;QAAE,IAAI,EAAE,CAAC,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAC;QAAC,QAAQ,EAAE,MAAM,CAAA;KAAE;CACvE,CAAC,CAAC,CAAC,CAAC;AACL,MAAM,MAAM,aAAa,CAAC,CAAC,SAAS,SAAS,IAAI,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC,CAAC,KAAK,IAAI,CAAC;AAC3E,MAAM,MAAM,WAAW,CAAC,CAAC,SAAS,SAAS,IAAI;KAC7C,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,aAAa,CAAC,CAAC,CAAC,EAAE;CAC7B,CAAC;AACF,MAAM,WAAW,QAAQ;IACxB,IAAI,EAAE,MAAM,CAAC;IACb,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,OAAO,CAAC,EAAE,OAAO,CAAC;IAClB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;CAClB"}
node_modules/@gradio/client/dist/upload.d.ts ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { upload_files } from "./client";
2
+ export declare function get_fetchable_url_or_file(path: string | null, server_url: string, proxy_url: string | null): string;
3
+ export declare function upload(file_data: FileData[], root: string, upload_id?: string, upload_fn?: typeof upload_files): Promise<(FileData | null)[] | null>;
4
+ export declare function prepare_files(files: File[], is_stream?: boolean): Promise<FileData[]>;
5
+ export declare class FileData {
6
+ path: string;
7
+ url?: string;
8
+ orig_name?: string;
9
+ size?: number;
10
+ blob?: File;
11
+ is_stream?: boolean;
12
+ mime_type?: string;
13
+ alt_text?: string;
14
+ constructor({ path, url, orig_name, size, blob, is_stream, mime_type, alt_text }: {
15
+ path: string;
16
+ url?: string;
17
+ orig_name?: string;
18
+ size?: number;
19
+ blob?: File;
20
+ is_stream?: boolean;
21
+ mime_type?: string;
22
+ alt_text?: string;
23
+ });
24
+ }
25
+ //# sourceMappingURL=upload.d.ts.map
node_modules/@gradio/client/dist/upload.d.ts.map ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version":3,"file":"upload.d.ts","sourceRoot":"","sources":["../src/upload.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAWxC,wBAAgB,yBAAyB,CACxC,IAAI,EAAE,MAAM,GAAG,IAAI,EACnB,UAAU,EAAE,MAAM,EAClB,SAAS,EAAE,MAAM,GAAG,IAAI,GACtB,MAAM,CAUR;AAED,wBAAsB,MAAM,CAC3B,SAAS,EAAE,QAAQ,EAAE,EACrB,IAAI,EAAE,MAAM,EACZ,SAAS,CAAC,EAAE,MAAM,EAClB,SAAS,GAAE,OAAO,YAA2B,GAC3C,OAAO,CAAC,CAAC,QAAQ,GAAG,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,CA2BrC;AAED,wBAAsB,aAAa,CAClC,KAAK,EAAE,IAAI,EAAE,EACb,SAAS,CAAC,EAAE,OAAO,GACjB,OAAO,CAAC,QAAQ,EAAE,CAAC,CAYrB;AAED,qBAAa,QAAQ;IACpB,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,CAAC,EAAE,MAAM,CAAC;IACb,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,IAAI,CAAC;IACZ,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;gBAEN,EACX,IAAI,EACJ,GAAG,EACH,SAAS,EACT,IAAI,EACJ,IAAI,EACJ,SAAS,EACT,SAAS,EACT,QAAQ,EACR,EAAE;QACF,IAAI,EAAE,MAAM,CAAC;QACb,GAAG,CAAC,EAAE,MAAM,CAAC;QACb,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,IAAI,CAAC,EAAE,MAAM,CAAC;QACd,IAAI,CAAC,EAAE,IAAI,CAAC;QACZ,SAAS,CAAC,EAAE,OAAO,CAAC;QACpB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,QAAQ,CAAC,EAAE,MAAM,CAAC;KAClB;CAUD"}
node_modules/@gradio/client/dist/utils.d.ts ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { Config } from "./types.js";
2
+ /**
3
+ * This function is used to resolve the URL for making requests when the app has a root path.
4
+ * The root path could be a path suffix like "/app" which is appended to the end of the base URL. Or
5
+ * it could be a full URL like "https://abidlabs-test-client-replica--gqf2x.hf.space" which is used when hosting
6
+ * Gradio apps on Hugging Face Spaces.
7
+ * @param {string} base_url The base URL at which the Gradio server is hosted
8
+ * @param {string} root_path The root path, which could be a path suffix (e.g. mounted in FastAPI app) or a full URL (e.g. hosted on Hugging Face Spaces)
9
+ * @param {boolean} prioritize_base Whether to prioritize the base URL over the root path. This is used when both the base path and root paths are full URLs. For example, for fetching files the root path should be prioritized, but for making requests, the base URL should be prioritized.
10
+ * @returns {string} the resolved URL
11
+ */
12
+ export declare function resolve_root(base_url: string, root_path: string, prioritize_base: boolean): string;
13
+ export declare function determine_protocol(endpoint: string): {
14
+ ws_protocol: "ws" | "wss";
15
+ http_protocol: "http:" | "https:";
16
+ host: string;
17
+ };
18
+ export declare const RE_SPACE_NAME: RegExp;
19
+ export declare const RE_SPACE_DOMAIN: RegExp;
20
+ export declare function process_endpoint(app_reference: string, token?: `hf_${string}`): Promise<{
21
+ space_id: string | false;
22
+ host: string;
23
+ ws_protocol: "ws" | "wss";
24
+ http_protocol: "http:" | "https:";
25
+ }>;
26
+ export declare function map_names_to_ids(fns: Config["dependencies"]): Record<string, number>;
27
+ export declare function discussions_enabled(space_id: string): Promise<boolean>;
28
+ export declare function get_space_hardware(space_id: string, token: `hf_${string}`): Promise<(typeof hardware_types)[number]>;
29
+ export declare function set_space_hardware(space_id: string, new_hardware: (typeof hardware_types)[number], token: `hf_${string}`): Promise<(typeof hardware_types)[number]>;
30
+ export declare function set_space_timeout(space_id: string, timeout: number, token: `hf_${string}`): Promise<number>;
31
+ export declare const hardware_types: readonly ["cpu-basic", "cpu-upgrade", "t4-small", "t4-medium", "a10g-small", "a10g-large", "a100-large"];
32
+ export declare function apply_diff(obj: any, diff: [string, (number | string)[], any][]): any;
33
+ //# sourceMappingURL=utils.d.ts.map
node_modules/@gradio/client/dist/utils.d.ts.map ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,YAAY,CAAC;AAEzC;;;;;;;;;GASG;AACH,wBAAgB,YAAY,CAC3B,QAAQ,EAAE,MAAM,EAChB,SAAS,EAAE,MAAM,EACjB,eAAe,EAAE,OAAO,GACtB,MAAM,CAKR;AAED,wBAAgB,kBAAkB,CAAC,QAAQ,EAAE,MAAM,GAAG;IACrD,WAAW,EAAE,IAAI,GAAG,KAAK,CAAC;IAC1B,aAAa,EAAE,OAAO,GAAG,QAAQ,CAAC;IAClC,IAAI,EAAE,MAAM,CAAC;CACb,CAgCA;AAED,eAAO,MAAM,aAAa,QAAqB,CAAC;AAChD,eAAO,MAAM,eAAe,QAAwB,CAAC;AACrD,wBAAsB,gBAAgB,CACrC,aAAa,EAAE,MAAM,EACrB,KAAK,CAAC,EAAE,MAAM,MAAM,EAAE,GACpB,OAAO,CAAC;IACV,QAAQ,EAAE,MAAM,GAAG,KAAK,CAAC;IACzB,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,IAAI,GAAG,KAAK,CAAC;IAC1B,aAAa,EAAE,OAAO,GAAG,QAAQ,CAAC;CAClC,CAAC,CA4CD;AAED,wBAAgB,gBAAgB,CAC/B,GAAG,EAAE,MAAM,CAAC,cAAc,CAAC,GACzB,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAQxB;AAID,wBAAsB,mBAAmB,CAAC,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC,CAe5E;AAED,wBAAsB,kBAAkB,CACvC,QAAQ,EAAE,MAAM,EAChB,KAAK,EAAE,MAAM,MAAM,EAAE,GACnB,OAAO,CAAC,CAAC,OAAO,cAAc,CAAC,CAAC,MAAM,CAAC,CAAC,CAqB1C;AAED,wBAAsB,kBAAkB,CACvC,QAAQ,EAAE,MAAM,EAChB,YAAY,EAAE,CAAC,OAAO,cAAc,CAAC,CAAC,MAAM,CAAC,EAC7C,KAAK,EAAE,MAAM,MAAM,EAAE,GACnB,OAAO,CAAC,CAAC,OAAO,cAAc,CAAC,CAAC,MAAM,CAAC,CAAC,CAuB1C;AAED,wBAAsB,iBAAiB,CACtC,QAAQ,EAAE,MAAM,EAChB,OAAO,EAAE,MAAM,EACf,KAAK,EAAE,MAAM,MAAM,EAAE,GACnB,OAAO,CAAC,MAAM,CAAC,CAuBjB;AAED,eAAO,MAAM,cAAc,0GAQjB,CAAC;AAkDX,wBAAgB,UAAU,CACzB,GAAG,EAAE,GAAG,EACR,IAAI,EAAE,CAAC,MAAM,EAAE,CAAC,MAAM,GAAG,MAAM,CAAC,EAAE,EAAE,GAAG,CAAC,EAAE,GACxC,GAAG,CAML"}
node_modules/@gradio/client/dist/wrapper-6f348d45.js ADDED
The diff for this file is too large to render. See raw diff
 
node_modules/@gradio/client/package.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "@gradio/client",
3
+ "version": "0.12.0",
4
+ "description": "Gradio API client",
5
+ "type": "module",
6
+ "main": "dist/index.js",
7
+ "author": "",
8
+ "license": "ISC",
9
+ "exports": {
10
+ ".": {
11
+ "import": "./dist/index.js"
12
+ },
13
+ "./package.json": "./package.json"
14
+ },
15
+ "dependencies": {
16
+ "bufferutil": "^4.0.7",
17
+ "semiver": "^1.1.0",
18
+ "ws": "^8.13.0"
19
+ },
20
+ "devDependencies": {
21
+ "@types/ws": "^8.5.4",
22
+ "esbuild": "^0.19.0"
23
+ },
24
+ "engines": {
25
+ "node": ">=18.0.0"
26
+ },
27
+ "main_changeset": true,
28
+ "scripts": {
29
+ "bundle": "vite build --ssr",
30
+ "generate_types": "tsc",
31
+ "build": "pnpm bundle && pnpm generate_types"
32
+ }
33
+ }
node_modules/@gradio/client/src/client.node-test.ts ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { test, describe, assert } from "vitest";
2
+ import { readFileSync } from "fs";
3
+ import { join, dirname } from "path";
4
+ import { fileURLToPath } from "url";
5
+ import { Blob } from "node:buffer";
6
+
7
+ const __dirname = dirname(fileURLToPath(import.meta.url));
8
+ const image_path = join(
9
+ __dirname,
10
+ "..",
11
+ "..",
12
+ "..",
13
+ "demo",
14
+ "kitchen_sink",
15
+ "files",
16
+ "lion.jpg"
17
+ );
18
+
19
+ import { walk_and_store_blobs, client, handle_blob } from "./client";
20
+
21
+ describe.skip("extract blob parts", () => {
22
+ test("convert Buffer to Blob", async () => {
23
+ const image = readFileSync(image_path);
24
+ await client("gradio/hello_world_main");
25
+ const parts = walk_and_store_blobs({
26
+ data: {
27
+ image
28
+ }
29
+ });
30
+
31
+ assert.isTrue(parts[0].blob instanceof Blob);
32
+ });
33
+
34
+ test("leave node Blob as Blob", async () => {
35
+ const image = new Blob([readFileSync(image_path)]);
36
+
37
+ await client("gradio/hello_world_main");
38
+ const parts = walk_and_store_blobs({
39
+ data: {
40
+ image
41
+ }
42
+ });
43
+
44
+ assert.isTrue(parts[0].blob instanceof Blob);
45
+ });
46
+
47
+ test("handle deep structures", async () => {
48
+ const image = new Blob([readFileSync(image_path)]);
49
+
50
+ await client("gradio/hello_world_main");
51
+ const parts = walk_and_store_blobs({
52
+ a: {
53
+ b: {
54
+ data: {
55
+ image
56
+ }
57
+ }
58
+ }
59
+ });
60
+
61
+ assert.isTrue(parts[0].blob instanceof Blob);
62
+ });
63
+
64
+ test("handle deep structures with arrays", async () => {
65
+ const image = new Blob([readFileSync(image_path)]);
66
+
67
+ await client("gradio/hello_world_main");
68
+ const parts = walk_and_store_blobs({
69
+ a: [
70
+ {
71
+ b: [
72
+ {
73
+ data: [
74
+ {
75
+ image
76
+ }
77
+ ]
78
+ }
79
+ ]
80
+ }
81
+ ]
82
+ });
83
+
84
+ assert.isTrue(parts[0].blob instanceof Blob);
85
+ });
86
+
87
+ test("handle deep structures with arrays 2", async () => {
88
+ const image = new Blob([readFileSync(image_path)]);
89
+
90
+ await client("gradio/hello_world_main");
91
+ const obj = {
92
+ a: [
93
+ {
94
+ b: [
95
+ {
96
+ data: [[image], image, [image, [image]]]
97
+ }
98
+ ]
99
+ }
100
+ ]
101
+ };
102
+ const parts = walk_and_store_blobs(obj);
103
+
104
+ function map_path(
105
+ obj: Record<string, any>,
106
+ parts: { path: string[]; blob: any }[]
107
+ ) {
108
+ const { path, blob } = parts[parts.length - 1];
109
+ let ref = obj;
110
+ path.forEach((p) => (ref = ref[p]));
111
+
112
+ return ref === blob;
113
+ }
114
+
115
+ assert.isTrue(parts[0].blob instanceof Blob);
116
+ // assert.isTrue(map_path(obj, parts));
117
+ });
118
+ });
119
+
120
+ describe("handle_blob", () => {
121
+ test("handle blobs", async () => {
122
+ const image = new Blob([readFileSync(image_path)]);
123
+
124
+ const app = await client("gradio/hello_world_main");
125
+ const obj = [
126
+ {
127
+ a: [
128
+ {
129
+ b: [
130
+ {
131
+ data: [[image], image, [image, [image]]]
132
+ }
133
+ ]
134
+ }
135
+ ]
136
+ }
137
+ ];
138
+
139
+ const parts = await handle_blob(app.config.root, obj, undefined);
140
+ //@ts-ignore
141
+ // assert.isString(parts.data[0].a[0].b[0].data[0][0]);
142
+ });
143
+ });
144
+
145
+ describe.skip("private space", () => {
146
+ test("can access a private space", async () => {
147
+ const image = new Blob([readFileSync(image_path)]);
148
+
149
+ const app = await client("pngwn/hello_world", {
150
+ hf_token: "hf_"
151
+ });
152
+
153
+ console.log(app);
154
+ const obj = [
155
+ {
156
+ a: [
157
+ {
158
+ b: [
159
+ {
160
+ data: [[image], image, [image, [image]]]
161
+ }
162
+ ]
163
+ }
164
+ ]
165
+ }
166
+ ];
167
+
168
+ const parts = await handle_blob(app.config.root, obj, "hf_");
169
+ //@ts-ignore
170
+ assert.isString(parts.data[0].a[0].b[0].data[0][0]);
171
+ });
172
+ });
node_modules/@gradio/client/src/client.ts ADDED
@@ -0,0 +1,1702 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ //@ts-nocheck
2
+
3
+ import semiver from "semiver";
4
+
5
+ import {
6
+ process_endpoint,
7
+ RE_SPACE_NAME,
8
+ map_names_to_ids,
9
+ discussions_enabled,
10
+ get_space_hardware,
11
+ set_space_hardware,
12
+ set_space_timeout,
13
+ hardware_types,
14
+ resolve_root,
15
+ apply_diff
16
+ } from "./utils.js";
17
+
18
+ import type {
19
+ EventType,
20
+ EventListener,
21
+ ListenerMap,
22
+ Event,
23
+ Payload,
24
+ PostResponse,
25
+ UploadResponse,
26
+ Status,
27
+ SpaceStatus,
28
+ SpaceStatusCallback
29
+ } from "./types.js";
30
+
31
+ import { FileData } from "./upload";
32
+
33
+ import type { Config } from "./types.js";
34
+
35
+ type event = <K extends EventType>(
36
+ eventType: K,
37
+ listener: EventListener<K>
38
+ ) => SubmitReturn;
39
+ type predict = (
40
+ endpoint: string | number,
41
+ data?: unknown[],
42
+ event_data?: unknown
43
+ ) => Promise<unknown>;
44
+
45
+ type client_return = {
46
+ predict: predict;
47
+ config: Config;
48
+ submit: (
49
+ endpoint: string | number,
50
+ data?: unknown[],
51
+ event_data?: unknown,
52
+ trigger_id?: number | null
53
+ ) => SubmitReturn;
54
+ component_server: (
55
+ component_id: number,
56
+ fn_name: string,
57
+ data: unknown[]
58
+ ) => any;
59
+ view_api: (c?: Config) => Promise<ApiInfo<JsApiData>>;
60
+ };
61
+
62
+ type SubmitReturn = {
63
+ on: event;
64
+ off: event;
65
+ cancel: () => Promise<void>;
66
+ destroy: () => void;
67
+ };
68
+
69
+ const QUEUE_FULL_MSG = "This application is too busy. Keep trying!";
70
+ const BROKEN_CONNECTION_MSG = "Connection errored out.";
71
+
72
+ export let NodeBlob;
73
+
74
+ export async function duplicate(
75
+ app_reference: string,
76
+ options: {
77
+ hf_token: `hf_${string}`;
78
+ private?: boolean;
79
+ status_callback: SpaceStatusCallback;
80
+ hardware?: (typeof hardware_types)[number];
81
+ timeout?: number;
82
+ }
83
+ ): Promise<client_return> {
84
+ const { hf_token, private: _private, hardware, timeout } = options;
85
+
86
+ if (hardware && !hardware_types.includes(hardware)) {
87
+ throw new Error(
88
+ `Invalid hardware type provided. Valid types are: ${hardware_types
89
+ .map((v) => `"${v}"`)
90
+ .join(",")}.`
91
+ );
92
+ }
93
+ const headers = {
94
+ Authorization: `Bearer ${hf_token}`
95
+ };
96
+
97
+ const user = (
98
+ await (
99
+ await fetch(`https://huggingface.co/api/whoami-v2`, {
100
+ headers
101
+ })
102
+ ).json()
103
+ ).name;
104
+
105
+ const space_name = app_reference.split("/")[1];
106
+ const body: {
107
+ repository: string;
108
+ private?: boolean;
109
+ } = {
110
+ repository: `${user}/${space_name}`
111
+ };
112
+
113
+ if (_private) {
114
+ body.private = true;
115
+ }
116
+
117
+ try {
118
+ const response = await fetch(
119
+ `https://huggingface.co/api/spaces/${app_reference}/duplicate`,
120
+ {
121
+ method: "POST",
122
+ headers: { "Content-Type": "application/json", ...headers },
123
+ body: JSON.stringify(body)
124
+ }
125
+ );
126
+
127
+ if (response.status === 409) {
128
+ return client(`${user}/${space_name}`, options);
129
+ }
130
+ const duplicated_space = await response.json();
131
+
132
+ let original_hardware;
133
+
134
+ if (!hardware) {
135
+ original_hardware = await get_space_hardware(app_reference, hf_token);
136
+ }
137
+
138
+ const requested_hardware = hardware || original_hardware || "cpu-basic";
139
+ await set_space_hardware(
140
+ `${user}/${space_name}`,
141
+ requested_hardware,
142
+ hf_token
143
+ );
144
+
145
+ await set_space_timeout(`${user}/${space_name}`, timeout || 300, hf_token);
146
+ return client(duplicated_space.url, options);
147
+ } catch (e: any) {
148
+ throw new Error(e);
149
+ }
150
+ }
151
+
152
+ interface Client {
153
+ post_data: (
154
+ url: string,
155
+ body: unknown,
156
+ token?: `hf_${string}`
157
+ ) => Promise<[PostResponse, number]>;
158
+ upload_files: (
159
+ root: string,
160
+ files: File[],
161
+ token?: `hf_${string}`,
162
+ upload_id?: string
163
+ ) => Promise<UploadResponse>;
164
+ client: (
165
+ app_reference: string,
166
+ options: {
167
+ hf_token?: `hf_${string}`;
168
+ status_callback?: SpaceStatusCallback;
169
+ }
170
+ ) => Promise<client_return>;
171
+ handle_blob: (
172
+ endpoint: string,
173
+ data: unknown[],
174
+ api_info: ApiInfo<JsApiData>,
175
+ token?: `hf_${string}`
176
+ ) => Promise<unknown[]>;
177
+ }
178
+
179
+ export function api_factory(
180
+ fetch_implementation: typeof fetch,
181
+ EventSource_factory: (url: URL) => EventSource
182
+ ): Client {
183
+ return { post_data, upload_files, client, handle_blob };
184
+
185
+ async function post_data(
186
+ url: string,
187
+ body: unknown,
188
+ token?: `hf_${string}`
189
+ ): Promise<[PostResponse, number]> {
190
+ const headers: {
191
+ Authorization?: string;
192
+ "Content-Type": "application/json";
193
+ } = { "Content-Type": "application/json" };
194
+ if (token) {
195
+ headers.Authorization = `Bearer ${token}`;
196
+ }
197
+ try {
198
+ var response = await fetch_implementation(url, {
199
+ method: "POST",
200
+ body: JSON.stringify(body),
201
+ headers
202
+ });
203
+ } catch (e) {
204
+ return [{ error: BROKEN_CONNECTION_MSG }, 500];
205
+ }
206
+ let output: PostResponse;
207
+ let status: int;
208
+ try {
209
+ output = await response.json();
210
+ status = response.status;
211
+ } catch (e) {
212
+ output = { error: `Could not parse server response: ${e}` };
213
+ status = 500;
214
+ }
215
+ return [output, status];
216
+ }
217
+
218
+ async function upload_files(
219
+ root: string,
220
+ files: (Blob | File)[],
221
+ token?: `hf_${string}`,
222
+ upload_id?: string
223
+ ): Promise<UploadResponse> {
224
+ const headers: {
225
+ Authorization?: string;
226
+ } = {};
227
+ if (token) {
228
+ headers.Authorization = `Bearer ${token}`;
229
+ }
230
+ const chunkSize = 1000;
231
+ const uploadResponses = [];
232
+ for (let i = 0; i < files.length; i += chunkSize) {
233
+ const chunk = files.slice(i, i + chunkSize);
234
+ const formData = new FormData();
235
+ chunk.forEach((file) => {
236
+ formData.append("files", file);
237
+ });
238
+ try {
239
+ const upload_url = upload_id
240
+ ? `${root}/upload?upload_id=${upload_id}`
241
+ : `${root}/upload`;
242
+ var response = await fetch_implementation(upload_url, {
243
+ method: "POST",
244
+ body: formData,
245
+ headers
246
+ });
247
+ } catch (e) {
248
+ return { error: BROKEN_CONNECTION_MSG };
249
+ }
250
+ const output: UploadResponse["files"] = await response.json();
251
+ uploadResponses.push(...output);
252
+ }
253
+ return { files: uploadResponses };
254
+ }
255
+
256
+ async function client(
257
+ app_reference: string,
258
+ options: {
259
+ hf_token?: `hf_${string}`;
260
+ status_callback?: SpaceStatusCallback;
261
+ } = {}
262
+ ): Promise<client_return> {
263
+ return new Promise(async (res) => {
264
+ const { status_callback, hf_token } = options;
265
+ const return_obj = {
266
+ predict,
267
+ submit,
268
+ view_api,
269
+ component_server
270
+ };
271
+
272
+ if (
273
+ (typeof window === "undefined" || !("WebSocket" in window)) &&
274
+ !global.Websocket
275
+ ) {
276
+ const ws = await import("ws");
277
+ NodeBlob = (await import("node:buffer")).Blob;
278
+ //@ts-ignore
279
+ global.WebSocket = ws.WebSocket;
280
+ }
281
+
282
+ const { ws_protocol, http_protocol, host, space_id } =
283
+ await process_endpoint(app_reference, hf_token);
284
+
285
+ const session_hash = Math.random().toString(36).substring(2);
286
+ const last_status: Record<string, Status["stage"]> = {};
287
+ let stream_open = false;
288
+ let pending_stream_messages: Record<string, any[]> = {}; // Event messages may be received by the SSE stream before the initial data POST request is complete. To resolve this race condition, we store the messages in a dictionary and process them when the POST request is complete.
289
+ let pending_diff_streams: Record<string, any[][]> = {};
290
+ let event_stream: EventSource | null = null;
291
+ const event_callbacks: Record<string, () => Promise<void>> = {};
292
+ const unclosed_events: Set<string> = new Set();
293
+ let config: Config;
294
+ let api_map: Record<string, number> = {};
295
+
296
+ let jwt: false | string = false;
297
+
298
+ if (hf_token && space_id) {
299
+ jwt = await get_jwt(space_id, hf_token);
300
+ }
301
+
302
+ async function config_success(_config: Config): Promise<client_return> {
303
+ config = _config;
304
+ api_map = map_names_to_ids(_config?.dependencies || []);
305
+ if (config.auth_required) {
306
+ return {
307
+ config,
308
+ ...return_obj
309
+ };
310
+ }
311
+ try {
312
+ api = await view_api(config);
313
+ } catch (e) {
314
+ console.error(`Could not get api details: ${e.message}`);
315
+ }
316
+
317
+ return {
318
+ config,
319
+ ...return_obj
320
+ };
321
+ }
322
+ let api: ApiInfo<JsApiData>;
323
+ async function handle_space_sucess(status: SpaceStatus): Promise<void> {
324
+ if (status_callback) status_callback(status);
325
+ if (status.status === "running")
326
+ try {
327
+ config = await resolve_config(
328
+ fetch_implementation,
329
+ `${http_protocol}//${host}`,
330
+ hf_token
331
+ );
332
+
333
+ const _config = await config_success(config);
334
+ res(_config);
335
+ } catch (e) {
336
+ console.error(e);
337
+ if (status_callback) {
338
+ status_callback({
339
+ status: "error",
340
+ message: "Could not load this space.",
341
+ load_status: "error",
342
+ detail: "NOT_FOUND"
343
+ });
344
+ }
345
+ }
346
+ }
347
+
348
+ try {
349
+ config = await resolve_config(
350
+ fetch_implementation,
351
+ `${http_protocol}//${host}`,
352
+ hf_token
353
+ );
354
+
355
+ const _config = await config_success(config);
356
+ res(_config);
357
+ } catch (e) {
358
+ console.error(e);
359
+ if (space_id) {
360
+ check_space_status(
361
+ space_id,
362
+ RE_SPACE_NAME.test(space_id) ? "space_name" : "subdomain",
363
+ handle_space_sucess
364
+ );
365
+ } else {
366
+ if (status_callback)
367
+ status_callback({
368
+ status: "error",
369
+ message: "Could not load this space.",
370
+ load_status: "error",
371
+ detail: "NOT_FOUND"
372
+ });
373
+ }
374
+ }
375
+
376
+ function predict(
377
+ endpoint: string,
378
+ data: unknown[],
379
+ event_data?: unknown
380
+ ): Promise<unknown> {
381
+ let data_returned = false;
382
+ let status_complete = false;
383
+ let dependency;
384
+ if (typeof endpoint === "number") {
385
+ dependency = config.dependencies[endpoint];
386
+ } else {
387
+ const trimmed_endpoint = endpoint.replace(/^\//, "");
388
+ dependency = config.dependencies[api_map[trimmed_endpoint]];
389
+ }
390
+
391
+ if (dependency.types.continuous) {
392
+ throw new Error(
393
+ "Cannot call predict on this function as it may run forever. Use submit instead"
394
+ );
395
+ }
396
+
397
+ return new Promise((res, rej) => {
398
+ const app = submit(endpoint, data, event_data);
399
+ let result;
400
+
401
+ app
402
+ .on("data", (d) => {
403
+ // if complete message comes before data, resolve here
404
+ if (status_complete) {
405
+ app.destroy();
406
+ res(d);
407
+ }
408
+ data_returned = true;
409
+ result = d;
410
+ })
411
+ .on("status", (status) => {
412
+ if (status.stage === "error") rej(status);
413
+ if (status.stage === "complete") {
414
+ status_complete = true;
415
+ // if complete message comes after data, resolve here
416
+ if (data_returned) {
417
+ app.destroy();
418
+ res(result);
419
+ }
420
+ }
421
+ });
422
+ });
423
+ }
424
+
425
+ function submit(
426
+ endpoint: string | number,
427
+ data: unknown[],
428
+ event_data?: unknown,
429
+ trigger_id: number | null = null
430
+ ): SubmitReturn {
431
+ let fn_index: number;
432
+ let api_info;
433
+
434
+ if (typeof endpoint === "number") {
435
+ fn_index = endpoint;
436
+ api_info = api.unnamed_endpoints[fn_index];
437
+ } else {
438
+ const trimmed_endpoint = endpoint.replace(/^\//, "");
439
+
440
+ fn_index = api_map[trimmed_endpoint];
441
+ api_info = api.named_endpoints[endpoint.trim()];
442
+ }
443
+
444
+ if (typeof fn_index !== "number") {
445
+ throw new Error(
446
+ "There is no endpoint matching that name of fn_index matching that number."
447
+ );
448
+ }
449
+
450
+ let websocket: WebSocket;
451
+ let eventSource: EventSource;
452
+ let protocol = config.protocol ?? "ws";
453
+
454
+ const _endpoint = typeof endpoint === "number" ? "/predict" : endpoint;
455
+ let payload: Payload;
456
+ let event_id: string | null = null;
457
+ let complete: false | Record<string, any> = false;
458
+ const listener_map: ListenerMap<EventType> = {};
459
+ let url_params = "";
460
+ if (typeof window !== "undefined") {
461
+ url_params = new URLSearchParams(window.location.search).toString();
462
+ }
463
+
464
+ handle_blob(`${config.root}`, data, api_info, hf_token).then(
465
+ (_payload) => {
466
+ payload = {
467
+ data: _payload || [],
468
+ event_data,
469
+ fn_index,
470
+ trigger_id
471
+ };
472
+ if (skip_queue(fn_index, config)) {
473
+ fire_event({
474
+ type: "status",
475
+ endpoint: _endpoint,
476
+ stage: "pending",
477
+ queue: false,
478
+ fn_index,
479
+ time: new Date()
480
+ });
481
+
482
+ post_data(
483
+ `${config.root}/run${
484
+ _endpoint.startsWith("/") ? _endpoint : `/${_endpoint}`
485
+ }${url_params ? "?" + url_params : ""}`,
486
+ {
487
+ ...payload,
488
+ session_hash
489
+ },
490
+ hf_token
491
+ )
492
+ .then(([output, status_code]) => {
493
+ const data = output.data;
494
+ if (status_code == 200) {
495
+ fire_event({
496
+ type: "data",
497
+ endpoint: _endpoint,
498
+ fn_index,
499
+ data: data,
500
+ time: new Date()
501
+ });
502
+
503
+ fire_event({
504
+ type: "status",
505
+ endpoint: _endpoint,
506
+ fn_index,
507
+ stage: "complete",
508
+ eta: output.average_duration,
509
+ queue: false,
510
+ time: new Date()
511
+ });
512
+ } else {
513
+ fire_event({
514
+ type: "status",
515
+ stage: "error",
516
+ endpoint: _endpoint,
517
+ fn_index,
518
+ message: output.error,
519
+ queue: false,
520
+ time: new Date()
521
+ });
522
+ }
523
+ })
524
+ .catch((e) => {
525
+ fire_event({
526
+ type: "status",
527
+ stage: "error",
528
+ message: e.message,
529
+ endpoint: _endpoint,
530
+ fn_index,
531
+ queue: false,
532
+ time: new Date()
533
+ });
534
+ });
535
+ } else if (protocol == "ws") {
536
+ fire_event({
537
+ type: "status",
538
+ stage: "pending",
539
+ queue: true,
540
+ endpoint: _endpoint,
541
+ fn_index,
542
+ time: new Date()
543
+ });
544
+ let url = new URL(`${ws_protocol}://${resolve_root(
545
+ host,
546
+ config.path,
547
+ true
548
+ )}
549
+ /queue/join${url_params ? "?" + url_params : ""}`);
550
+
551
+ if (jwt) {
552
+ url.searchParams.set("__sign", jwt);
553
+ }
554
+
555
+ websocket = new WebSocket(url);
556
+
557
+ websocket.onclose = (evt) => {
558
+ if (!evt.wasClean) {
559
+ fire_event({
560
+ type: "status",
561
+ stage: "error",
562
+ broken: true,
563
+ message: BROKEN_CONNECTION_MSG,
564
+ queue: true,
565
+ endpoint: _endpoint,
566
+ fn_index,
567
+ time: new Date()
568
+ });
569
+ }
570
+ };
571
+
572
+ websocket.onmessage = function (event) {
573
+ const _data = JSON.parse(event.data);
574
+ const { type, status, data } = handle_message(
575
+ _data,
576
+ last_status[fn_index]
577
+ );
578
+
579
+ if (type === "update" && status && !complete) {
580
+ // call 'status' listeners
581
+ fire_event({
582
+ type: "status",
583
+ endpoint: _endpoint,
584
+ fn_index,
585
+ time: new Date(),
586
+ ...status
587
+ });
588
+ if (status.stage === "error") {
589
+ websocket.close();
590
+ }
591
+ } else if (type === "hash") {
592
+ websocket.send(JSON.stringify({ fn_index, session_hash }));
593
+ return;
594
+ } else if (type === "data") {
595
+ websocket.send(JSON.stringify({ ...payload, session_hash }));
596
+ } else if (type === "complete") {
597
+ complete = status;
598
+ } else if (type === "log") {
599
+ fire_event({
600
+ type: "log",
601
+ log: data.log,
602
+ level: data.level,
603
+ endpoint: _endpoint,
604
+ fn_index
605
+ });
606
+ } else if (type === "generating") {
607
+ fire_event({
608
+ type: "status",
609
+ time: new Date(),
610
+ ...status,
611
+ stage: status?.stage!,
612
+ queue: true,
613
+ endpoint: _endpoint,
614
+ fn_index
615
+ });
616
+ }
617
+ if (data) {
618
+ fire_event({
619
+ type: "data",
620
+ time: new Date(),
621
+ data: data.data,
622
+ endpoint: _endpoint,
623
+ fn_index
624
+ });
625
+
626
+ if (complete) {
627
+ fire_event({
628
+ type: "status",
629
+ time: new Date(),
630
+ ...complete,
631
+ stage: status?.stage!,
632
+ queue: true,
633
+ endpoint: _endpoint,
634
+ fn_index
635
+ });
636
+ websocket.close();
637
+ }
638
+ }
639
+ };
640
+
641
+ // different ws contract for gradio versions older than 3.6.0
642
+ //@ts-ignore
643
+ if (semiver(config.version || "2.0.0", "3.6") < 0) {
644
+ addEventListener("open", () =>
645
+ websocket.send(JSON.stringify({ hash: session_hash }))
646
+ );
647
+ }
648
+ } else if (protocol == "sse") {
649
+ fire_event({
650
+ type: "status",
651
+ stage: "pending",
652
+ queue: true,
653
+ endpoint: _endpoint,
654
+ fn_index,
655
+ time: new Date()
656
+ });
657
+ var params = new URLSearchParams({
658
+ fn_index: fn_index.toString(),
659
+ session_hash: session_hash
660
+ }).toString();
661
+ let url = new URL(
662
+ `${config.root}/queue/join?${
663
+ url_params ? url_params + "&" : ""
664
+ }${params}`
665
+ );
666
+
667
+ eventSource = EventSource_factory(url);
668
+
669
+ eventSource.onmessage = async function (event) {
670
+ const _data = JSON.parse(event.data);
671
+ const { type, status, data } = handle_message(
672
+ _data,
673
+ last_status[fn_index]
674
+ );
675
+
676
+ if (type === "update" && status && !complete) {
677
+ // call 'status' listeners
678
+ fire_event({
679
+ type: "status",
680
+ endpoint: _endpoint,
681
+ fn_index,
682
+ time: new Date(),
683
+ ...status
684
+ });
685
+ if (status.stage === "error") {
686
+ eventSource.close();
687
+ }
688
+ } else if (type === "data") {
689
+ event_id = _data.event_id as string;
690
+ let [_, status] = await post_data(
691
+ `${config.root}/queue/data`,
692
+ {
693
+ ...payload,
694
+ session_hash,
695
+ event_id
696
+ },
697
+ hf_token
698
+ );
699
+ if (status !== 200) {
700
+ fire_event({
701
+ type: "status",
702
+ stage: "error",
703
+ message: BROKEN_CONNECTION_MSG,
704
+ queue: true,
705
+ endpoint: _endpoint,
706
+ fn_index,
707
+ time: new Date()
708
+ });
709
+ eventSource.close();
710
+ }
711
+ } else if (type === "complete") {
712
+ complete = status;
713
+ } else if (type === "log") {
714
+ fire_event({
715
+ type: "log",
716
+ log: data.log,
717
+ level: data.level,
718
+ endpoint: _endpoint,
719
+ fn_index
720
+ });
721
+ } else if (type === "generating") {
722
+ fire_event({
723
+ type: "status",
724
+ time: new Date(),
725
+ ...status,
726
+ stage: status?.stage!,
727
+ queue: true,
728
+ endpoint: _endpoint,
729
+ fn_index
730
+ });
731
+ }
732
+ if (data) {
733
+ fire_event({
734
+ type: "data",
735
+ time: new Date(),
736
+ data: data.data,
737
+ endpoint: _endpoint,
738
+ fn_index
739
+ });
740
+
741
+ if (complete) {
742
+ fire_event({
743
+ type: "status",
744
+ time: new Date(),
745
+ ...complete,
746
+ stage: status?.stage!,
747
+ queue: true,
748
+ endpoint: _endpoint,
749
+ fn_index
750
+ });
751
+ eventSource.close();
752
+ }
753
+ }
754
+ };
755
+ } else if (protocol == "sse_v1" || protocol == "sse_v2") {
756
+ // latest API format. v2 introduces sending diffs for intermediate outputs in generative functions, which makes payloads lighter.
757
+ fire_event({
758
+ type: "status",
759
+ stage: "pending",
760
+ queue: true,
761
+ endpoint: _endpoint,
762
+ fn_index,
763
+ time: new Date()
764
+ });
765
+
766
+ post_data(
767
+ `${config.root}/queue/join?${url_params}`,
768
+ {
769
+ ...payload,
770
+ session_hash
771
+ },
772
+ hf_token
773
+ ).then(([response, status]) => {
774
+ if (status === 503) {
775
+ fire_event({
776
+ type: "status",
777
+ stage: "error",
778
+ message: QUEUE_FULL_MSG,
779
+ queue: true,
780
+ endpoint: _endpoint,
781
+ fn_index,
782
+ time: new Date()
783
+ });
784
+ } else if (status !== 200) {
785
+ fire_event({
786
+ type: "status",
787
+ stage: "error",
788
+ message: BROKEN_CONNECTION_MSG,
789
+ queue: true,
790
+ endpoint: _endpoint,
791
+ fn_index,
792
+ time: new Date()
793
+ });
794
+ } else {
795
+ event_id = response.event_id as string;
796
+ let callback = async function (_data: object): void {
797
+ try {
798
+ const { type, status, data } = handle_message(
799
+ _data,
800
+ last_status[fn_index]
801
+ );
802
+
803
+ if (type == "heartbeat") {
804
+ return;
805
+ }
806
+
807
+ if (type === "update" && status && !complete) {
808
+ // call 'status' listeners
809
+ fire_event({
810
+ type: "status",
811
+ endpoint: _endpoint,
812
+ fn_index,
813
+ time: new Date(),
814
+ ...status
815
+ });
816
+ } else if (type === "complete") {
817
+ complete = status;
818
+ } else if (type == "unexpected_error") {
819
+ console.error("Unexpected error", status?.message);
820
+ fire_event({
821
+ type: "status",
822
+ stage: "error",
823
+ message:
824
+ status?.message || "An Unexpected Error Occurred!",
825
+ queue: true,
826
+ endpoint: _endpoint,
827
+ fn_index,
828
+ time: new Date()
829
+ });
830
+ } else if (type === "log") {
831
+ fire_event({
832
+ type: "log",
833
+ log: data.log,
834
+ level: data.level,
835
+ endpoint: _endpoint,
836
+ fn_index
837
+ });
838
+ return;
839
+ } else if (type === "generating") {
840
+ fire_event({
841
+ type: "status",
842
+ time: new Date(),
843
+ ...status,
844
+ stage: status?.stage!,
845
+ queue: true,
846
+ endpoint: _endpoint,
847
+ fn_index
848
+ });
849
+ if (data && protocol === "sse_v2") {
850
+ apply_diff_stream(event_id!, data);
851
+ }
852
+ }
853
+ if (data) {
854
+ fire_event({
855
+ type: "data",
856
+ time: new Date(),
857
+ data: data.data,
858
+ endpoint: _endpoint,
859
+ fn_index
860
+ });
861
+
862
+ if (complete) {
863
+ fire_event({
864
+ type: "status",
865
+ time: new Date(),
866
+ ...complete,
867
+ stage: status?.stage!,
868
+ queue: true,
869
+ endpoint: _endpoint,
870
+ fn_index
871
+ });
872
+ }
873
+ }
874
+
875
+ if (
876
+ status?.stage === "complete" ||
877
+ status?.stage === "error"
878
+ ) {
879
+ if (event_callbacks[event_id]) {
880
+ delete event_callbacks[event_id];
881
+ }
882
+ if (event_id in pending_diff_streams) {
883
+ delete pending_diff_streams[event_id];
884
+ }
885
+ }
886
+ } catch (e) {
887
+ console.error("Unexpected client exception", e);
888
+ fire_event({
889
+ type: "status",
890
+ stage: "error",
891
+ message: "An Unexpected Error Occurred!",
892
+ queue: true,
893
+ endpoint: _endpoint,
894
+ fn_index,
895
+ time: new Date()
896
+ });
897
+ close_stream();
898
+ }
899
+ };
900
+ if (event_id in pending_stream_messages) {
901
+ pending_stream_messages[event_id].forEach((msg) =>
902
+ callback(msg)
903
+ );
904
+ delete pending_stream_messages[event_id];
905
+ }
906
+ event_callbacks[event_id] = callback;
907
+ unclosed_events.add(event_id);
908
+ if (!stream_open) {
909
+ open_stream();
910
+ }
911
+ }
912
+ });
913
+ }
914
+ }
915
+ );
916
+
917
+ function apply_diff_stream(event_id: string, data: any): void {
918
+ let is_first_generation = !pending_diff_streams[event_id];
919
+ if (is_first_generation) {
920
+ pending_diff_streams[event_id] = [];
921
+ data.data.forEach((value: any, i: number) => {
922
+ pending_diff_streams[event_id][i] = value;
923
+ });
924
+ } else {
925
+ data.data.forEach((value: any, i: number) => {
926
+ let new_data = apply_diff(
927
+ pending_diff_streams[event_id][i],
928
+ value
929
+ );
930
+ pending_diff_streams[event_id][i] = new_data;
931
+ data.data[i] = new_data;
932
+ });
933
+ }
934
+ }
935
+
936
+ function fire_event<K extends EventType>(event: Event<K>): void {
937
+ const narrowed_listener_map: ListenerMap<K> = listener_map;
938
+ const listeners = narrowed_listener_map[event.type] || [];
939
+ listeners?.forEach((l) => l(event));
940
+ }
941
+
942
+ function on<K extends EventType>(
943
+ eventType: K,
944
+ listener: EventListener<K>
945
+ ): SubmitReturn {
946
+ const narrowed_listener_map: ListenerMap<K> = listener_map;
947
+ const listeners = narrowed_listener_map[eventType] || [];
948
+ narrowed_listener_map[eventType] = listeners;
949
+ listeners?.push(listener);
950
+
951
+ return { on, off, cancel, destroy };
952
+ }
953
+
954
+ function off<K extends EventType>(
955
+ eventType: K,
956
+ listener: EventListener<K>
957
+ ): SubmitReturn {
958
+ const narrowed_listener_map: ListenerMap<K> = listener_map;
959
+ let listeners = narrowed_listener_map[eventType] || [];
960
+ listeners = listeners?.filter((l) => l !== listener);
961
+ narrowed_listener_map[eventType] = listeners;
962
+
963
+ return { on, off, cancel, destroy };
964
+ }
965
+
966
+ async function cancel(): Promise<void> {
967
+ const _status: Status = {
968
+ stage: "complete",
969
+ queue: false,
970
+ time: new Date()
971
+ };
972
+ complete = _status;
973
+ fire_event({
974
+ ..._status,
975
+ type: "status",
976
+ endpoint: _endpoint,
977
+ fn_index: fn_index
978
+ });
979
+
980
+ let cancel_request = {};
981
+ if (protocol === "ws") {
982
+ if (websocket && websocket.readyState === 0) {
983
+ websocket.addEventListener("open", () => {
984
+ websocket.close();
985
+ });
986
+ } else {
987
+ websocket.close();
988
+ }
989
+ cancel_request = { fn_index, session_hash };
990
+ } else {
991
+ eventSource.close();
992
+ cancel_request = { event_id };
993
+ }
994
+
995
+ try {
996
+ await fetch_implementation(`${config.root}/reset`, {
997
+ headers: { "Content-Type": "application/json" },
998
+ method: "POST",
999
+ body: JSON.stringify(cancel_request)
1000
+ });
1001
+ } catch (e) {
1002
+ console.warn(
1003
+ "The `/reset` endpoint could not be called. Subsequent endpoint results may be unreliable."
1004
+ );
1005
+ }
1006
+ }
1007
+
1008
+ function destroy(): void {
1009
+ for (const event_type in listener_map) {
1010
+ listener_map[event_type as "data" | "status"].forEach((fn) => {
1011
+ off(event_type as "data" | "status", fn);
1012
+ });
1013
+ }
1014
+ }
1015
+
1016
+ return {
1017
+ on,
1018
+ off,
1019
+ cancel,
1020
+ destroy
1021
+ };
1022
+ }
1023
+
1024
+ function open_stream(): void {
1025
+ stream_open = true;
1026
+ let params = new URLSearchParams({
1027
+ session_hash: session_hash
1028
+ }).toString();
1029
+ let url = new URL(`${config.root}/queue/data?${params}`);
1030
+ event_stream = EventSource_factory(url);
1031
+ event_stream.onmessage = async function (event) {
1032
+ let _data = JSON.parse(event.data);
1033
+ const event_id = _data.event_id;
1034
+ if (!event_id) {
1035
+ await Promise.all(
1036
+ Object.keys(event_callbacks).map((event_id) =>
1037
+ event_callbacks[event_id](_data)
1038
+ )
1039
+ );
1040
+ } else if (event_callbacks[event_id]) {
1041
+ if (_data.msg === "process_completed") {
1042
+ unclosed_events.delete(event_id);
1043
+ if (unclosed_events.size === 0) {
1044
+ close_stream();
1045
+ }
1046
+ }
1047
+ let fn = event_callbacks[event_id];
1048
+ window.setTimeout(fn, 0, _data); // need to do this to put the event on the end of the event loop, so the browser can refresh between callbacks and not freeze in case of quick generations. See https://github.com/gradio-app/gradio/pull/7055
1049
+ } else {
1050
+ if (!pending_stream_messages[event_id]) {
1051
+ pending_stream_messages[event_id] = [];
1052
+ }
1053
+ pending_stream_messages[event_id].push(_data);
1054
+ }
1055
+ };
1056
+ event_stream.onerror = async function (event) {
1057
+ await Promise.all(
1058
+ Object.keys(event_callbacks).map((event_id) =>
1059
+ event_callbacks[event_id]({
1060
+ msg: "unexpected_error",
1061
+ message: BROKEN_CONNECTION_MSG
1062
+ })
1063
+ )
1064
+ );
1065
+ close_stream();
1066
+ };
1067
+ }
1068
+
1069
+ function close_stream(): void {
1070
+ stream_open = false;
1071
+ event_stream?.close();
1072
+ }
1073
+
1074
+ async function component_server(
1075
+ component_id: number,
1076
+ fn_name: string,
1077
+ data: unknown[]
1078
+ ): Promise<any> {
1079
+ const headers: {
1080
+ Authorization?: string;
1081
+ "Content-Type": "application/json";
1082
+ } = { "Content-Type": "application/json" };
1083
+ if (hf_token) {
1084
+ headers.Authorization = `Bearer ${hf_token}`;
1085
+ }
1086
+ let root_url: string;
1087
+ let component = config.components.find(
1088
+ (comp) => comp.id === component_id
1089
+ );
1090
+ if (component?.props?.root_url) {
1091
+ root_url = component.props.root_url;
1092
+ } else {
1093
+ root_url = config.root;
1094
+ }
1095
+ const response = await fetch_implementation(
1096
+ `${root_url}/component_server/`,
1097
+ {
1098
+ method: "POST",
1099
+ body: JSON.stringify({
1100
+ data: data,
1101
+ component_id: component_id,
1102
+ fn_name: fn_name,
1103
+ session_hash: session_hash
1104
+ }),
1105
+ headers
1106
+ }
1107
+ );
1108
+
1109
+ if (!response.ok) {
1110
+ throw new Error(
1111
+ "Could not connect to component server: " + response.statusText
1112
+ );
1113
+ }
1114
+
1115
+ const output = await response.json();
1116
+ return output;
1117
+ }
1118
+
1119
+ async function view_api(config?: Config): Promise<ApiInfo<JsApiData>> {
1120
+ if (api) return api;
1121
+
1122
+ const headers: {
1123
+ Authorization?: string;
1124
+ "Content-Type": "application/json";
1125
+ } = { "Content-Type": "application/json" };
1126
+ if (hf_token) {
1127
+ headers.Authorization = `Bearer ${hf_token}`;
1128
+ }
1129
+ let response: Response;
1130
+ // @ts-ignore
1131
+ if (semiver(config.version || "2.0.0", "3.30") < 0) {
1132
+ response = await fetch_implementation(
1133
+ "https://gradio-space-api-fetcher-v2.hf.space/api",
1134
+ {
1135
+ method: "POST",
1136
+ body: JSON.stringify({
1137
+ serialize: false,
1138
+ config: JSON.stringify(config)
1139
+ }),
1140
+ headers
1141
+ }
1142
+ );
1143
+ } else {
1144
+ response = await fetch_implementation(`${config.root}/info`, {
1145
+ headers
1146
+ });
1147
+ }
1148
+
1149
+ if (!response.ok) {
1150
+ throw new Error(BROKEN_CONNECTION_MSG);
1151
+ }
1152
+
1153
+ let api_info = (await response.json()) as
1154
+ | ApiInfo<ApiData>
1155
+ | { api: ApiInfo<ApiData> };
1156
+ if ("api" in api_info) {
1157
+ api_info = api_info.api;
1158
+ }
1159
+
1160
+ if (
1161
+ api_info.named_endpoints["/predict"] &&
1162
+ !api_info.unnamed_endpoints["0"]
1163
+ ) {
1164
+ api_info.unnamed_endpoints[0] = api_info.named_endpoints["/predict"];
1165
+ }
1166
+
1167
+ const x = transform_api_info(api_info, config, api_map);
1168
+ return x;
1169
+ }
1170
+ });
1171
+ }
1172
+
1173
+ async function handle_blob(
1174
+ endpoint: string,
1175
+ data: unknown[],
1176
+ api_info: ApiInfo<JsApiData>,
1177
+ token?: `hf_${string}`
1178
+ ): Promise<unknown[]> {
1179
+ const blob_refs = await walk_and_store_blobs(
1180
+ data,
1181
+ undefined,
1182
+ [],
1183
+ true,
1184
+ api_info
1185
+ );
1186
+
1187
+ return Promise.all(
1188
+ blob_refs.map(async ({ path, blob, type }) => {
1189
+ if (blob) {
1190
+ const file_url = (await upload_files(endpoint, [blob], token))
1191
+ .files[0];
1192
+ return { path, file_url, type, name: blob?.name };
1193
+ }
1194
+ return { path, type };
1195
+ })
1196
+ ).then((r) => {
1197
+ r.forEach(({ path, file_url, type, name }) => {
1198
+ if (type === "Gallery") {
1199
+ update_object(data, file_url, path);
1200
+ } else if (file_url) {
1201
+ const file = new FileData({ path: file_url, orig_name: name });
1202
+ update_object(data, file, path);
1203
+ }
1204
+ });
1205
+
1206
+ return data;
1207
+ });
1208
+ }
1209
+ }
1210
+
1211
+ export const { post_data, upload_files, client, handle_blob } = api_factory(
1212
+ fetch,
1213
+ (...args) => new EventSource(...args)
1214
+ );
1215
+
1216
+ interface ApiData {
1217
+ label: string;
1218
+ type: {
1219
+ type: any;
1220
+ description: string;
1221
+ };
1222
+ component: string;
1223
+ example_input?: any;
1224
+ }
1225
+
1226
+ interface JsApiData {
1227
+ label: string;
1228
+ type: string;
1229
+ component: string;
1230
+ example_input: any;
1231
+ }
1232
+
1233
+ interface EndpointInfo<T extends ApiData | JsApiData> {
1234
+ parameters: T[];
1235
+ returns: T[];
1236
+ }
1237
+ interface ApiInfo<T extends ApiData | JsApiData> {
1238
+ named_endpoints: {
1239
+ [key: string]: EndpointInfo<T>;
1240
+ };
1241
+ unnamed_endpoints: {
1242
+ [key: string]: EndpointInfo<T>;
1243
+ };
1244
+ }
1245
+
1246
+ function get_type(
1247
+ type: { [key: string]: any },
1248
+ component: string,
1249
+ serializer: string,
1250
+ signature_type: "return" | "parameter"
1251
+ ): string {
1252
+ switch (type.type) {
1253
+ case "string":
1254
+ return "string";
1255
+ case "boolean":
1256
+ return "boolean";
1257
+ case "number":
1258
+ return "number";
1259
+ }
1260
+
1261
+ if (
1262
+ serializer === "JSONSerializable" ||
1263
+ serializer === "StringSerializable"
1264
+ ) {
1265
+ return "any";
1266
+ } else if (serializer === "ListStringSerializable") {
1267
+ return "string[]";
1268
+ } else if (component === "Image") {
1269
+ return signature_type === "parameter" ? "Blob | File | Buffer" : "string";
1270
+ } else if (serializer === "FileSerializable") {
1271
+ if (type?.type === "array") {
1272
+ return signature_type === "parameter"
1273
+ ? "(Blob | File | Buffer)[]"
1274
+ : `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}[]`;
1275
+ }
1276
+ return signature_type === "parameter"
1277
+ ? "Blob | File | Buffer"
1278
+ : `{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}`;
1279
+ } else if (serializer === "GallerySerializable") {
1280
+ return signature_type === "parameter"
1281
+ ? "[(Blob | File | Buffer), (string | null)][]"
1282
+ : `[{ name: string; data: string; size?: number; is_file?: boolean; orig_name?: string}, (string | null))][]`;
1283
+ }
1284
+ }
1285
+
1286
+ function get_description(
1287
+ type: { type: any; description: string },
1288
+ serializer: string
1289
+ ): string {
1290
+ if (serializer === "GallerySerializable") {
1291
+ return "array of [file, label] tuples";
1292
+ } else if (serializer === "ListStringSerializable") {
1293
+ return "array of strings";
1294
+ } else if (serializer === "FileSerializable") {
1295
+ return "array of files or single file";
1296
+ }
1297
+ return type.description;
1298
+ }
1299
+
1300
+ function transform_api_info(
1301
+ api_info: ApiInfo<ApiData>,
1302
+ config: Config,
1303
+ api_map: Record<string, number>
1304
+ ): ApiInfo<JsApiData> {
1305
+ const new_data = {
1306
+ named_endpoints: {},
1307
+ unnamed_endpoints: {}
1308
+ };
1309
+ for (const key in api_info) {
1310
+ const cat = api_info[key];
1311
+
1312
+ for (const endpoint in cat) {
1313
+ const dep_index = config.dependencies[endpoint]
1314
+ ? endpoint
1315
+ : api_map[endpoint.replace("/", "")];
1316
+
1317
+ const info = cat[endpoint];
1318
+ new_data[key][endpoint] = {};
1319
+ new_data[key][endpoint].parameters = {};
1320
+ new_data[key][endpoint].returns = {};
1321
+ new_data[key][endpoint].type = config.dependencies[dep_index].types;
1322
+ new_data[key][endpoint].parameters = info.parameters.map(
1323
+ ({ label, component, type, serializer }) => ({
1324
+ label,
1325
+ component,
1326
+ type: get_type(type, component, serializer, "parameter"),
1327
+ description: get_description(type, serializer)
1328
+ })
1329
+ );
1330
+
1331
+ new_data[key][endpoint].returns = info.returns.map(
1332
+ ({ label, component, type, serializer }) => ({
1333
+ label,
1334
+ component,
1335
+ type: get_type(type, component, serializer, "return"),
1336
+ description: get_description(type, serializer)
1337
+ })
1338
+ );
1339
+ }
1340
+ }
1341
+
1342
+ return new_data;
1343
+ }
1344
+
1345
+ async function get_jwt(
1346
+ space: string,
1347
+ token: `hf_${string}`
1348
+ ): Promise<string | false> {
1349
+ try {
1350
+ const r = await fetch(`https://huggingface.co/api/spaces/${space}/jwt`, {
1351
+ headers: {
1352
+ Authorization: `Bearer ${token}`
1353
+ }
1354
+ });
1355
+
1356
+ const jwt = (await r.json()).token;
1357
+
1358
+ return jwt || false;
1359
+ } catch (e) {
1360
+ console.error(e);
1361
+ return false;
1362
+ }
1363
+ }
1364
+
1365
+ function update_object(object, newValue, stack): void {
1366
+ while (stack.length > 1) {
1367
+ object = object[stack.shift()];
1368
+ }
1369
+
1370
+ object[stack.shift()] = newValue;
1371
+ }
1372
+
1373
+ export async function walk_and_store_blobs(
1374
+ param,
1375
+ type = undefined,
1376
+ path = [],
1377
+ root = false,
1378
+ api_info = undefined
1379
+ ): Promise<
1380
+ {
1381
+ path: string[];
1382
+ type: string;
1383
+ blob: Blob | false;
1384
+ }[]
1385
+ > {
1386
+ if (Array.isArray(param)) {
1387
+ let blob_refs = [];
1388
+
1389
+ await Promise.all(
1390
+ param.map(async (v, i) => {
1391
+ let new_path = path.slice();
1392
+ new_path.push(i);
1393
+
1394
+ const array_refs = await walk_and_store_blobs(
1395
+ param[i],
1396
+ root ? api_info?.parameters[i]?.component || undefined : type,
1397
+ new_path,
1398
+ false,
1399
+ api_info
1400
+ );
1401
+
1402
+ blob_refs = blob_refs.concat(array_refs);
1403
+ })
1404
+ );
1405
+
1406
+ return blob_refs;
1407
+ } else if (globalThis.Buffer && param instanceof globalThis.Buffer) {
1408
+ const is_image = type === "Image";
1409
+ return [
1410
+ {
1411
+ path: path,
1412
+ blob: is_image ? false : new NodeBlob([param]),
1413
+ type
1414
+ }
1415
+ ];
1416
+ } else if (typeof param === "object") {
1417
+ let blob_refs = [];
1418
+ for (let key in param) {
1419
+ if (param.hasOwnProperty(key)) {
1420
+ let new_path = path.slice();
1421
+ new_path.push(key);
1422
+ blob_refs = blob_refs.concat(
1423
+ await walk_and_store_blobs(
1424
+ param[key],
1425
+ undefined,
1426
+ new_path,
1427
+ false,
1428
+ api_info
1429
+ )
1430
+ );
1431
+ }
1432
+ }
1433
+ return blob_refs;
1434
+ }
1435
+ return [];
1436
+ }
1437
+
1438
+ function image_to_data_uri(blob: Blob): Promise<string | ArrayBuffer> {
1439
+ return new Promise((resolve, _) => {
1440
+ const reader = new FileReader();
1441
+ reader.onloadend = () => resolve(reader.result);
1442
+ reader.readAsDataURL(blob);
1443
+ });
1444
+ }
1445
+
1446
+ function skip_queue(id: number, config: Config): boolean {
1447
+ return (
1448
+ !(config?.dependencies?.[id]?.queue === null
1449
+ ? config.enable_queue
1450
+ : config?.dependencies?.[id]?.queue) || false
1451
+ );
1452
+ }
1453
+
1454
+ async function resolve_config(
1455
+ fetch_implementation: typeof fetch,
1456
+ endpoint?: string,
1457
+ token?: `hf_${string}`
1458
+ ): Promise<Config> {
1459
+ const headers: { Authorization?: string } = {};
1460
+ if (token) {
1461
+ headers.Authorization = `Bearer ${token}`;
1462
+ }
1463
+ if (
1464
+ typeof window !== "undefined" &&
1465
+ window.gradio_config &&
1466
+ location.origin !== "http://localhost:9876" &&
1467
+ !window.gradio_config.dev_mode
1468
+ ) {
1469
+ const path = window.gradio_config.root;
1470
+ const config = window.gradio_config;
1471
+ config.root = resolve_root(endpoint, config.root, false);
1472
+ return { ...config, path: path };
1473
+ } else if (endpoint) {
1474
+ let response = await fetch_implementation(`${endpoint}/config`, {
1475
+ headers
1476
+ });
1477
+
1478
+ if (response.status === 200) {
1479
+ const config = await response.json();
1480
+ config.path = config.path ?? "";
1481
+ config.root = endpoint;
1482
+ return config;
1483
+ }
1484
+ throw new Error("Could not get config.");
1485
+ }
1486
+
1487
+ throw new Error("No config or app endpoint found");
1488
+ }
1489
+
1490
+ async function check_space_status(
1491
+ id: string,
1492
+ type: "subdomain" | "space_name",
1493
+ status_callback: SpaceStatusCallback
1494
+ ): Promise<void> {
1495
+ let endpoint =
1496
+ type === "subdomain"
1497
+ ? `https://huggingface.co/api/spaces/by-subdomain/${id}`
1498
+ : `https://huggingface.co/api/spaces/${id}`;
1499
+ let response;
1500
+ let _status;
1501
+ try {
1502
+ response = await fetch(endpoint);
1503
+ _status = response.status;
1504
+ if (_status !== 200) {
1505
+ throw new Error();
1506
+ }
1507
+ response = await response.json();
1508
+ } catch (e) {
1509
+ status_callback({
1510
+ status: "error",
1511
+ load_status: "error",
1512
+ message: "Could not get space status",
1513
+ detail: "NOT_FOUND"
1514
+ });
1515
+ return;
1516
+ }
1517
+
1518
+ if (!response || _status !== 200) return;
1519
+ const {
1520
+ runtime: { stage },
1521
+ id: space_name
1522
+ } = response;
1523
+
1524
+ switch (stage) {
1525
+ case "STOPPED":
1526
+ case "SLEEPING":
1527
+ status_callback({
1528
+ status: "sleeping",
1529
+ load_status: "pending",
1530
+ message: "Space is asleep. Waking it up...",
1531
+ detail: stage
1532
+ });
1533
+
1534
+ setTimeout(() => {
1535
+ check_space_status(id, type, status_callback);
1536
+ }, 1000); // poll for status
1537
+ break;
1538
+ case "PAUSED":
1539
+ status_callback({
1540
+ status: "paused",
1541
+ load_status: "error",
1542
+ message:
1543
+ "This space has been paused by the author. If you would like to try this demo, consider duplicating the space.",
1544
+ detail: stage,
1545
+ discussions_enabled: await discussions_enabled(space_name)
1546
+ });
1547
+ break;
1548
+ case "RUNNING":
1549
+ case "RUNNING_BUILDING":
1550
+ status_callback({
1551
+ status: "running",
1552
+ load_status: "complete",
1553
+ message: "",
1554
+ detail: stage
1555
+ });
1556
+ // load_config(source);
1557
+ // launch
1558
+ break;
1559
+ case "BUILDING":
1560
+ status_callback({
1561
+ status: "building",
1562
+ load_status: "pending",
1563
+ message: "Space is building...",
1564
+ detail: stage
1565
+ });
1566
+
1567
+ setTimeout(() => {
1568
+ check_space_status(id, type, status_callback);
1569
+ }, 1000);
1570
+ break;
1571
+ default:
1572
+ status_callback({
1573
+ status: "space_error",
1574
+ load_status: "error",
1575
+ message: "This space is experiencing an issue.",
1576
+ detail: stage,
1577
+ discussions_enabled: await discussions_enabled(space_name)
1578
+ });
1579
+ break;
1580
+ }
1581
+ }
1582
+
1583
+ function handle_message(
1584
+ data: any,
1585
+ last_status: Status["stage"]
1586
+ ): {
1587
+ type: "hash" | "data" | "update" | "complete" | "generating" | "log" | "none";
1588
+ data?: any;
1589
+ status?: Status;
1590
+ } {
1591
+ const queue = true;
1592
+ switch (data.msg) {
1593
+ case "send_data":
1594
+ return { type: "data" };
1595
+ case "send_hash":
1596
+ return { type: "hash" };
1597
+ case "queue_full":
1598
+ return {
1599
+ type: "update",
1600
+ status: {
1601
+ queue,
1602
+ message: QUEUE_FULL_MSG,
1603
+ stage: "error",
1604
+ code: data.code,
1605
+ success: data.success
1606
+ }
1607
+ };
1608
+ case "heartbeat":
1609
+ return {
1610
+ type: "heartbeat"
1611
+ };
1612
+ case "unexpected_error":
1613
+ return {
1614
+ type: "unexpected_error",
1615
+ status: {
1616
+ queue,
1617
+ message: data.message,
1618
+ stage: "error",
1619
+ success: false
1620
+ }
1621
+ };
1622
+ case "estimation":
1623
+ return {
1624
+ type: "update",
1625
+ status: {
1626
+ queue,
1627
+ stage: last_status || "pending",
1628
+ code: data.code,
1629
+ size: data.queue_size,
1630
+ position: data.rank,
1631
+ eta: data.rank_eta,
1632
+ success: data.success
1633
+ }
1634
+ };
1635
+ case "progress":
1636
+ return {
1637
+ type: "update",
1638
+ status: {
1639
+ queue,
1640
+ stage: "pending",
1641
+ code: data.code,
1642
+ progress_data: data.progress_data,
1643
+ success: data.success
1644
+ }
1645
+ };
1646
+ case "log":
1647
+ return { type: "log", data: data };
1648
+ case "process_generating":
1649
+ return {
1650
+ type: "generating",
1651
+ status: {
1652
+ queue,
1653
+ message: !data.success ? data.output.error : null,
1654
+ stage: data.success ? "generating" : "error",
1655
+ code: data.code,
1656
+ progress_data: data.progress_data,
1657
+ eta: data.average_duration
1658
+ },
1659
+ data: data.success ? data.output : null
1660
+ };
1661
+ case "process_completed":
1662
+ if ("error" in data.output) {
1663
+ return {
1664
+ type: "update",
1665
+ status: {
1666
+ queue,
1667
+ message: data.output.error as string,
1668
+ stage: "error",
1669
+ code: data.code,
1670
+ success: data.success
1671
+ }
1672
+ };
1673
+ }
1674
+ return {
1675
+ type: "complete",
1676
+ status: {
1677
+ queue,
1678
+ message: !data.success ? data.output.error : undefined,
1679
+ stage: data.success ? "complete" : "error",
1680
+ code: data.code,
1681
+ progress_data: data.progress_data
1682
+ },
1683
+ data: data.success ? data.output : null
1684
+ };
1685
+
1686
+ case "process_starts":
1687
+ return {
1688
+ type: "update",
1689
+ status: {
1690
+ queue,
1691
+ stage: "pending",
1692
+ code: data.code,
1693
+ size: data.rank,
1694
+ position: 0,
1695
+ success: data.success,
1696
+ eta: data.eta
1697
+ }
1698
+ };
1699
+ }
1700
+
1701
+ return { type: "none", status: { stage: "error", queue } };
1702
+ }
node_modules/@gradio/client/src/globals.d.ts ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ declare global {
2
+ interface Window {
3
+ __gradio_mode__: "app" | "website";
4
+ gradio_config: Config;
5
+ __is_colab__: boolean;
6
+ __gradio_space__: string | null;
7
+ }
8
+ }
9
+
10
+ export interface Config {
11
+ auth_required: boolean | undefined;
12
+ auth_message: string;
13
+ components: any[];
14
+ css: string | null;
15
+ dependencies: any[];
16
+ dev_mode: boolean;
17
+ enable_queue: boolean;
18
+ layout: any;
19
+ mode: "blocks" | "interface";
20
+ root: string;
21
+ theme: string;
22
+ title: string;
23
+ version: string;
24
+ space_id: string | null;
25
+ is_colab: boolean;
26
+ show_api: boolean;
27
+ stylesheets: string[];
28
+ path: string;
29
+ }
node_modules/@gradio/client/src/index.ts ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export {
2
+ client,
3
+ post_data,
4
+ upload_files,
5
+ duplicate,
6
+ api_factory
7
+ } from "./client.js";
8
+ export type { SpaceStatus } from "./types.js";
9
+ export {
10
+ FileData,
11
+ upload,
12
+ get_fetchable_url_or_file,
13
+ prepare_files
14
+ } from "./upload.js";
node_modules/@gradio/client/src/types.ts ADDED
@@ -0,0 +1,119 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export interface Config {
2
+ auth_required: boolean | undefined;
3
+ auth_message: string;
4
+ components: any[];
5
+ css: string | null;
6
+ js: string | null;
7
+ head: string | null;
8
+ dependencies: any[];
9
+ dev_mode: boolean;
10
+ enable_queue: boolean;
11
+ layout: any;
12
+ mode: "blocks" | "interface";
13
+ root: string;
14
+ root_url?: string;
15
+ theme: string;
16
+ title: string;
17
+ version: string;
18
+ space_id: string | null;
19
+ is_colab: boolean;
20
+ show_api: boolean;
21
+ stylesheets: string[];
22
+ path: string;
23
+ protocol?: "sse_v2" | "sse_v1" | "sse" | "ws";
24
+ }
25
+
26
+ export interface Payload {
27
+ data: unknown[];
28
+ fn_index?: number;
29
+ event_data?: unknown;
30
+ time?: Date;
31
+ }
32
+
33
+ export interface PostResponse {
34
+ error?: string;
35
+ [x: string]: any;
36
+ }
37
+ export interface UploadResponse {
38
+ error?: string;
39
+ files?: string[];
40
+ }
41
+
42
+ export interface Status {
43
+ queue: boolean;
44
+ code?: string;
45
+ success?: boolean;
46
+ stage: "pending" | "error" | "complete" | "generating";
47
+ broken?: boolean;
48
+ size?: number;
49
+ position?: number;
50
+ eta?: number;
51
+ message?: string;
52
+ progress_data?: {
53
+ progress: number | null;
54
+ index: number | null;
55
+ length: number | null;
56
+ unit: string | null;
57
+ desc: string | null;
58
+ }[];
59
+ time?: Date;
60
+ }
61
+
62
+ export interface LogMessage {
63
+ log: string;
64
+ level: "warning" | "info";
65
+ }
66
+
67
+ export interface SpaceStatusNormal {
68
+ status: "sleeping" | "running" | "building" | "error" | "stopped";
69
+ detail:
70
+ | "SLEEPING"
71
+ | "RUNNING"
72
+ | "RUNNING_BUILDING"
73
+ | "BUILDING"
74
+ | "NOT_FOUND";
75
+ load_status: "pending" | "error" | "complete" | "generating";
76
+ message: string;
77
+ }
78
+ export interface SpaceStatusError {
79
+ status: "space_error" | "paused";
80
+ detail:
81
+ | "NO_APP_FILE"
82
+ | "CONFIG_ERROR"
83
+ | "BUILD_ERROR"
84
+ | "RUNTIME_ERROR"
85
+ | "PAUSED";
86
+ load_status: "error";
87
+ message: string;
88
+ discussions_enabled: boolean;
89
+ }
90
+ export type SpaceStatus = SpaceStatusNormal | SpaceStatusError;
91
+
92
+ export type status_callback_function = (a: Status) => void;
93
+ export type SpaceStatusCallback = (a: SpaceStatus) => void;
94
+
95
+ export type EventType = "data" | "status" | "log";
96
+
97
+ export interface EventMap {
98
+ data: Payload;
99
+ status: Status;
100
+ log: LogMessage;
101
+ }
102
+
103
+ export type Event<K extends EventType> = {
104
+ [P in K]: EventMap[P] & { type: P; endpoint: string; fn_index: number };
105
+ }[K];
106
+ export type EventListener<K extends EventType> = (event: Event<K>) => void;
107
+ export type ListenerMap<K extends EventType> = {
108
+ [P in K]?: EventListener<K>[];
109
+ };
110
+ export interface FileData {
111
+ name: string;
112
+ orig_name?: string;
113
+ size?: number;
114
+ data: string;
115
+ blob?: File;
116
+ is_file?: boolean;
117
+ mime_type?: string;
118
+ alt_text?: string;
119
+ }
node_modules/@gradio/client/src/upload.ts ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { upload_files } from "./client";
2
+
3
+ function is_url(str: string): boolean {
4
+ try {
5
+ const url = new URL(str);
6
+ return url.protocol === "http:" || url.protocol === "https:";
7
+ } catch {
8
+ return false;
9
+ }
10
+ }
11
+
12
+ export function get_fetchable_url_or_file(
13
+ path: string | null,
14
+ server_url: string,
15
+ proxy_url: string | null
16
+ ): string {
17
+ if (path == null) {
18
+ return proxy_url ? `/proxy=${proxy_url}file=` : `${server_url}/file=`;
19
+ }
20
+ if (is_url(path)) {
21
+ return path;
22
+ }
23
+ return proxy_url
24
+ ? `/proxy=${proxy_url}file=${path}`
25
+ : `${server_url}/file=${path}`;
26
+ }
27
+
28
+ export async function upload(
29
+ file_data: FileData[],
30
+ root: string,
31
+ upload_id?: string,
32
+ upload_fn: typeof upload_files = upload_files
33
+ ): Promise<(FileData | null)[] | null> {
34
+ let files = (Array.isArray(file_data) ? file_data : [file_data]).map(
35
+ (file_data) => file_data.blob!
36
+ );
37
+
38
+ return await Promise.all(
39
+ await upload_fn(root, files, undefined, upload_id).then(
40
+ async (response: { files?: string[]; error?: string }) => {
41
+ if (response.error) {
42
+ throw new Error(response.error);
43
+ } else {
44
+ if (response.files) {
45
+ return response.files.map((f, i) => {
46
+ const file = new FileData({
47
+ ...file_data[i],
48
+ path: f,
49
+ url: root + "/file=" + f
50
+ });
51
+ return file;
52
+ });
53
+ }
54
+
55
+ return [];
56
+ }
57
+ }
58
+ )
59
+ );
60
+ }
61
+
62
+ export async function prepare_files(
63
+ files: File[],
64
+ is_stream?: boolean
65
+ ): Promise<FileData[]> {
66
+ return files.map(
67
+ (f, i) =>
68
+ new FileData({
69
+ path: f.name,
70
+ orig_name: f.name,
71
+ blob: f,
72
+ size: f.size,
73
+ mime_type: f.type,
74
+ is_stream
75
+ })
76
+ );
77
+ }
78
+
79
+ export class FileData {
80
+ path: string;
81
+ url?: string;
82
+ orig_name?: string;
83
+ size?: number;
84
+ blob?: File;
85
+ is_stream?: boolean;
86
+ mime_type?: string;
87
+ alt_text?: string;
88
+
89
+ constructor({
90
+ path,
91
+ url,
92
+ orig_name,
93
+ size,
94
+ blob,
95
+ is_stream,
96
+ mime_type,
97
+ alt_text
98
+ }: {
99
+ path: string;
100
+ url?: string;
101
+ orig_name?: string;
102
+ size?: number;
103
+ blob?: File;
104
+ is_stream?: boolean;
105
+ mime_type?: string;
106
+ alt_text?: string;
107
+ }) {
108
+ this.path = path;
109
+ this.url = url;
110
+ this.orig_name = orig_name;
111
+ this.size = size;
112
+ this.blob = url ? undefined : blob;
113
+ this.is_stream = is_stream;
114
+ this.mime_type = mime_type;
115
+ this.alt_text = alt_text;
116
+ }
117
+ }
node_modules/@gradio/client/src/utils.ts ADDED
@@ -0,0 +1,300 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { Config } from "./types.js";
2
+
3
+ /**
4
+ * This function is used to resolve the URL for making requests when the app has a root path.
5
+ * The root path could be a path suffix like "/app" which is appended to the end of the base URL. Or
6
+ * it could be a full URL like "https://abidlabs-test-client-replica--gqf2x.hf.space" which is used when hosting
7
+ * Gradio apps on Hugging Face Spaces.
8
+ * @param {string} base_url The base URL at which the Gradio server is hosted
9
+ * @param {string} root_path The root path, which could be a path suffix (e.g. mounted in FastAPI app) or a full URL (e.g. hosted on Hugging Face Spaces)
10
+ * @param {boolean} prioritize_base Whether to prioritize the base URL over the root path. This is used when both the base path and root paths are full URLs. For example, for fetching files the root path should be prioritized, but for making requests, the base URL should be prioritized.
11
+ * @returns {string} the resolved URL
12
+ */
13
+ export function resolve_root(
14
+ base_url: string,
15
+ root_path: string,
16
+ prioritize_base: boolean
17
+ ): string {
18
+ if (root_path.startsWith("http://") || root_path.startsWith("https://")) {
19
+ return prioritize_base ? base_url : root_path;
20
+ }
21
+ return base_url + root_path;
22
+ }
23
+
24
+ export function determine_protocol(endpoint: string): {
25
+ ws_protocol: "ws" | "wss";
26
+ http_protocol: "http:" | "https:";
27
+ host: string;
28
+ } {
29
+ if (endpoint.startsWith("http")) {
30
+ const { protocol, host } = new URL(endpoint);
31
+
32
+ if (host.endsWith("hf.space")) {
33
+ return {
34
+ ws_protocol: "wss",
35
+ host: host,
36
+ http_protocol: protocol as "http:" | "https:"
37
+ };
38
+ }
39
+ return {
40
+ ws_protocol: protocol === "https:" ? "wss" : "ws",
41
+ http_protocol: protocol as "http:" | "https:",
42
+ host
43
+ };
44
+ } else if (endpoint.startsWith("file:")) {
45
+ // This case is only expected to be used for the Wasm mode (Gradio-lite),
46
+ // where users can create a local HTML file using it and open the page in a browser directly via the `file:` protocol.
47
+ return {
48
+ ws_protocol: "ws",
49
+ http_protocol: "http:",
50
+ host: "lite.local" // Special fake hostname only used for this case. This matches the hostname allowed in `is_self_host()` in `js/wasm/network/host.ts`.
51
+ };
52
+ }
53
+
54
+ // default to secure if no protocol is provided
55
+ return {
56
+ ws_protocol: "wss",
57
+ http_protocol: "https:",
58
+ host: endpoint
59
+ };
60
+ }
61
+
62
+ export const RE_SPACE_NAME = /^[^\/]*\/[^\/]*$/;
63
+ export const RE_SPACE_DOMAIN = /.*hf\.space\/{0,1}$/;
64
+ export async function process_endpoint(
65
+ app_reference: string,
66
+ token?: `hf_${string}`
67
+ ): Promise<{
68
+ space_id: string | false;
69
+ host: string;
70
+ ws_protocol: "ws" | "wss";
71
+ http_protocol: "http:" | "https:";
72
+ }> {
73
+ const headers: { Authorization?: string } = {};
74
+ if (token) {
75
+ headers.Authorization = `Bearer ${token}`;
76
+ }
77
+
78
+ const _app_reference = app_reference.trim();
79
+
80
+ if (RE_SPACE_NAME.test(_app_reference)) {
81
+ try {
82
+ const res = await fetch(
83
+ `https://huggingface.co/api/spaces/${_app_reference}/host`,
84
+ { headers }
85
+ );
86
+
87
+ if (res.status !== 200)
88
+ throw new Error("Space metadata could not be loaded.");
89
+ const _host = (await res.json()).host;
90
+
91
+ return {
92
+ space_id: app_reference,
93
+ ...determine_protocol(_host)
94
+ };
95
+ } catch (e: any) {
96
+ throw new Error("Space metadata could not be loaded." + e.message);
97
+ }
98
+ }
99
+
100
+ if (RE_SPACE_DOMAIN.test(_app_reference)) {
101
+ const { ws_protocol, http_protocol, host } =
102
+ determine_protocol(_app_reference);
103
+
104
+ return {
105
+ space_id: host.replace(".hf.space", ""),
106
+ ws_protocol,
107
+ http_protocol,
108
+ host
109
+ };
110
+ }
111
+
112
+ return {
113
+ space_id: false,
114
+ ...determine_protocol(_app_reference)
115
+ };
116
+ }
117
+
118
+ export function map_names_to_ids(
119
+ fns: Config["dependencies"]
120
+ ): Record<string, number> {
121
+ let apis: Record<string, number> = {};
122
+
123
+ fns.forEach(({ api_name }, i) => {
124
+ if (api_name) apis[api_name] = i;
125
+ });
126
+
127
+ return apis;
128
+ }
129
+
130
+ const RE_DISABLED_DISCUSSION =
131
+ /^(?=[^]*\b[dD]iscussions{0,1}\b)(?=[^]*\b[dD]isabled\b)[^]*$/;
132
+ export async function discussions_enabled(space_id: string): Promise<boolean> {
133
+ try {
134
+ const r = await fetch(
135
+ `https://huggingface.co/api/spaces/${space_id}/discussions`,
136
+ {
137
+ method: "HEAD"
138
+ }
139
+ );
140
+ const error = r.headers.get("x-error-message");
141
+
142
+ if (error && RE_DISABLED_DISCUSSION.test(error)) return false;
143
+ return true;
144
+ } catch (e) {
145
+ return false;
146
+ }
147
+ }
148
+
149
+ export async function get_space_hardware(
150
+ space_id: string,
151
+ token: `hf_${string}`
152
+ ): Promise<(typeof hardware_types)[number]> {
153
+ const headers: { Authorization?: string } = {};
154
+ if (token) {
155
+ headers.Authorization = `Bearer ${token}`;
156
+ }
157
+
158
+ try {
159
+ const res = await fetch(
160
+ `https://huggingface.co/api/spaces/${space_id}/runtime`,
161
+ { headers }
162
+ );
163
+
164
+ if (res.status !== 200)
165
+ throw new Error("Space hardware could not be obtained.");
166
+
167
+ const { hardware } = await res.json();
168
+
169
+ return hardware;
170
+ } catch (e: any) {
171
+ throw new Error(e.message);
172
+ }
173
+ }
174
+
175
+ export async function set_space_hardware(
176
+ space_id: string,
177
+ new_hardware: (typeof hardware_types)[number],
178
+ token: `hf_${string}`
179
+ ): Promise<(typeof hardware_types)[number]> {
180
+ const headers: { Authorization?: string } = {};
181
+ if (token) {
182
+ headers.Authorization = `Bearer ${token}`;
183
+ }
184
+
185
+ try {
186
+ const res = await fetch(
187
+ `https://huggingface.co/api/spaces/${space_id}/hardware`,
188
+ { headers, body: JSON.stringify(new_hardware) }
189
+ );
190
+
191
+ if (res.status !== 200)
192
+ throw new Error(
193
+ "Space hardware could not be set. Please ensure the space hardware provided is valid and that a Hugging Face token is passed in."
194
+ );
195
+
196
+ const { hardware } = await res.json();
197
+
198
+ return hardware;
199
+ } catch (e: any) {
200
+ throw new Error(e.message);
201
+ }
202
+ }
203
+
204
+ export async function set_space_timeout(
205
+ space_id: string,
206
+ timeout: number,
207
+ token: `hf_${string}`
208
+ ): Promise<number> {
209
+ const headers: { Authorization?: string } = {};
210
+ if (token) {
211
+ headers.Authorization = `Bearer ${token}`;
212
+ }
213
+
214
+ try {
215
+ const res = await fetch(
216
+ `https://huggingface.co/api/spaces/${space_id}/hardware`,
217
+ { headers, body: JSON.stringify({ seconds: timeout }) }
218
+ );
219
+
220
+ if (res.status !== 200)
221
+ throw new Error(
222
+ "Space hardware could not be set. Please ensure the space hardware provided is valid and that a Hugging Face token is passed in."
223
+ );
224
+
225
+ const { hardware } = await res.json();
226
+
227
+ return hardware;
228
+ } catch (e: any) {
229
+ throw new Error(e.message);
230
+ }
231
+ }
232
+
233
+ export const hardware_types = [
234
+ "cpu-basic",
235
+ "cpu-upgrade",
236
+ "t4-small",
237
+ "t4-medium",
238
+ "a10g-small",
239
+ "a10g-large",
240
+ "a100-large"
241
+ ] as const;
242
+
243
+ function apply_edit(
244
+ target: any,
245
+ path: (number | string)[],
246
+ action: string,
247
+ value: any
248
+ ): any {
249
+ if (path.length === 0) {
250
+ if (action === "replace") {
251
+ return value;
252
+ } else if (action === "append") {
253
+ return target + value;
254
+ }
255
+ throw new Error(`Unsupported action: ${action}`);
256
+ }
257
+
258
+ let current = target;
259
+ for (let i = 0; i < path.length - 1; i++) {
260
+ current = current[path[i]];
261
+ }
262
+
263
+ const last_path = path[path.length - 1];
264
+ switch (action) {
265
+ case "replace":
266
+ current[last_path] = value;
267
+ break;
268
+ case "append":
269
+ current[last_path] += value;
270
+ break;
271
+ case "add":
272
+ if (Array.isArray(current)) {
273
+ current.splice(Number(last_path), 0, value);
274
+ } else {
275
+ current[last_path] = value;
276
+ }
277
+ break;
278
+ case "delete":
279
+ if (Array.isArray(current)) {
280
+ current.splice(Number(last_path), 1);
281
+ } else {
282
+ delete current[last_path];
283
+ }
284
+ break;
285
+ default:
286
+ throw new Error(`Unknown action: ${action}`);
287
+ }
288
+ return target;
289
+ }
290
+
291
+ export function apply_diff(
292
+ obj: any,
293
+ diff: [string, (number | string)[], any][]
294
+ ): any {
295
+ diff.forEach(([action, path, value]) => {
296
+ obj = apply_edit(obj, path, action, value);
297
+ });
298
+
299
+ return obj;
300
+ }
node_modules/@gradio/client/tsconfig.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "include": ["src/**/*"],
3
+ "exclude": ["src/**/*.test.ts", "src/**/*.node-test.ts"],
4
+ "compilerOptions": {
5
+ "allowJs": true,
6
+ "declaration": true,
7
+ "emitDeclarationOnly": true,
8
+ "outDir": "dist",
9
+ "declarationMap": true,
10
+ "module": "es2020",
11
+ "moduleResolution": "bundler",
12
+ "skipDefaultLibCheck": true
13
+ }
14
+ }
node_modules/@gradio/client/vite.config.js ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig } from "vite";
2
+ import { svelte } from "@sveltejs/vite-plugin-svelte";
3
+ import { fileURLToPath } from "url";
4
+ import path from "path";
5
+ const __dirname = fileURLToPath(new URL(".", import.meta.url));
6
+
7
+ export default defineConfig({
8
+ build: {
9
+ lib: {
10
+ entry: "src/index.ts",
11
+ formats: ["es"]
12
+ },
13
+ rollupOptions: {
14
+ input: "src/index.ts",
15
+ output: {
16
+ dir: "dist"
17
+ }
18
+ }
19
+ },
20
+ plugins: [
21
+ svelte()
22
+ // {
23
+ // name: "resolve-gradio-client",
24
+ // enforce: "pre",
25
+ // resolveId(id) {
26
+ // if (id === "@gradio/client") {
27
+ // return path.join(__dirname, "src", "index.ts");
28
+ // }
29
+ // }
30
+ // }
31
+ ],
32
+
33
+ ssr: {
34
+ target: "node",
35
+ format: "esm",
36
+ noExternal: ["ws", "semiver", "@gradio/upload"]
37
+ }
38
+ });
node_modules/bufferutil/LICENSE ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Copyright (c) 2011 Einar Otto Stangvik <einaros@gmail.com>
2
+ Copyright (c) 2013 Arnout Kazemier and contributors
3
+ Copyright (c) 2016 Luigi Pinca and contributors
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy of
6
+ this software and associated documentation files (the "Software"), to deal in
7
+ the Software without restriction, including without limitation the rights to
8
+ use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
9
+ the Software, and to permit persons to whom the Software is furnished to do so,
10
+ subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
17
+ FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
18
+ COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
19
+ IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
20
+ CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
node_modules/bufferutil/README.md ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # bufferutil
2
+
3
+ [![Version npm](https://img.shields.io/npm/v/bufferutil.svg?logo=npm)](https://www.npmjs.com/package/bufferutil)
4
+ [![Linux/macOS/Windows Build](https://img.shields.io/github/actions/workflow/status/websockets/bufferutil/ci.yml?branch=master&label=build&logo=github)](https://github.com/websockets/bufferutil/actions?query=workflow%3ACI+branch%3Amaster)
5
+
6
+ `bufferutil` is what makes `ws` fast. It provides some utilities to efficiently
7
+ perform some operations such as masking and unmasking the data payload of
8
+ WebSocket frames.
9
+
10
+ ## Installation
11
+
12
+ ```
13
+ npm install bufferutil --save-optional
14
+ ```
15
+
16
+ The `--save-optional` flag tells npm to save the package in your package.json
17
+ under the
18
+ [`optionalDependencies`](https://docs.npmjs.com/files/package.json#optionaldependencies)
19
+ key.
20
+
21
+ ## API
22
+
23
+ The module exports two functions.
24
+
25
+ ### `bufferUtil.mask(source, mask, output, offset, length)`
26
+
27
+ Masks a buffer using the given masking-key as specified by the WebSocket
28
+ protocol.
29
+
30
+ #### Arguments
31
+
32
+ - `source` - The buffer to mask.
33
+ - `mask` - A buffer representing the masking-key.
34
+ - `output` - The buffer where to store the result.
35
+ - `offset` - The offset at which to start writing.
36
+ - `length` - The number of bytes to mask.
37
+
38
+ #### Example
39
+
40
+ ```js
41
+ 'use strict';
42
+
43
+ const bufferUtil = require('bufferutil');
44
+ const crypto = require('crypto');
45
+
46
+ const source = crypto.randomBytes(10);
47
+ const mask = crypto.randomBytes(4);
48
+
49
+ bufferUtil.mask(source, mask, source, 0, source.length);
50
+ ```
51
+
52
+ ### `bufferUtil.unmask(buffer, mask)`
53
+
54
+ Unmasks a buffer using the given masking-key as specified by the WebSocket
55
+ protocol.
56
+
57
+ #### Arguments
58
+
59
+ - `buffer` - The buffer to unmask.
60
+ - `mask` - A buffer representing the masking-key.
61
+
62
+ #### Example
63
+
64
+ ```js
65
+ 'use strict';
66
+
67
+ const bufferUtil = require('bufferutil');
68
+ const crypto = require('crypto');
69
+
70
+ const buffer = crypto.randomBytes(10);
71
+ const mask = crypto.randomBytes(4);
72
+
73
+ bufferUtil.unmask(buffer, mask);
74
+ ```
75
+
76
+ ## License
77
+
78
+ [MIT](LICENSE)
node_modules/bufferutil/binding.gyp ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ 'targets': [
3
+ {
4
+ 'target_name': 'bufferutil',
5
+ 'sources': ['src/bufferutil.c'],
6
+ 'cflags': ['-std=c99'],
7
+ 'conditions': [
8
+ ["OS=='mac'", {
9
+ 'variables': {
10
+ 'clang_version':
11
+ '<!(cc -v 2>&1 | perl -ne \'print $1 if /clang version ([0-9]+(\.[0-9]+){2,})/\')'
12
+ },
13
+ 'xcode_settings': {
14
+ 'MACOSX_DEPLOYMENT_TARGET': '10.7'
15
+ },
16
+ 'conditions': [
17
+ # Use Perl v-strings to compare versions.
18
+ ['clang_version and <!(perl -e \'print <(clang_version) cmp 12.0.0\')==1', {
19
+ 'xcode_settings': {
20
+ 'OTHER_CFLAGS': ['-arch arm64'],
21
+ 'OTHER_LDFLAGS': ['-arch arm64']
22
+ }
23
+ }]
24
+ ]
25
+ }]
26
+ ]
27
+ }
28
+ ]
29
+ }
node_modules/bufferutil/fallback.js ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 'use strict';
2
+
3
+ /**
4
+ * Masks a buffer using the given mask.
5
+ *
6
+ * @param {Buffer} source The buffer to mask
7
+ * @param {Buffer} mask The mask to use
8
+ * @param {Buffer} output The buffer where to store the result
9
+ * @param {Number} offset The offset at which to start writing
10
+ * @param {Number} length The number of bytes to mask.
11
+ * @public
12
+ */
13
+ const mask = (source, mask, output, offset, length) => {
14
+ for (var i = 0; i < length; i++) {
15
+ output[offset + i] = source[i] ^ mask[i & 3];
16
+ }
17
+ };
18
+
19
+ /**
20
+ * Unmasks a buffer using the given mask.
21
+ *
22
+ * @param {Buffer} buffer The buffer to unmask
23
+ * @param {Buffer} mask The mask to use
24
+ * @public
25
+ */
26
+ const unmask = (buffer, mask) => {
27
+ // Required until https://github.com/nodejs/node/issues/9006 is resolved.
28
+ const length = buffer.length;
29
+ for (var i = 0; i < length; i++) {
30
+ buffer[i] ^= mask[i & 3];
31
+ }
32
+ };
33
+
34
+ module.exports = { mask, unmask };
node_modules/bufferutil/index.js ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ 'use strict';
2
+
3
+ try {
4
+ module.exports = require('node-gyp-build')(__dirname);
5
+ } catch (e) {
6
+ module.exports = require('./fallback');
7
+ }
node_modules/bufferutil/package.json ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "bufferutil",
3
+ "version": "4.0.8",
4
+ "description": "WebSocket buffer utils",
5
+ "main": "index.js",
6
+ "engines": {
7
+ "node": ">=6.14.2"
8
+ },
9
+ "scripts": {
10
+ "install": "node-gyp-build",
11
+ "prebuild": "prebuildify --napi --strip --target=14.0.0",
12
+ "prebuild-darwin-x64+arm64": "prebuildify --arch x64+arm64 --napi --strip --target=14.0.0",
13
+ "test": "mocha"
14
+ },
15
+ "repository": {
16
+ "type": "git",
17
+ "url": "https://github.com/websockets/bufferutil"
18
+ },
19
+ "keywords": [
20
+ "bufferutil"
21
+ ],
22
+ "author": "Einar Otto Stangvik <einaros@gmail.com> (http://2x.io)",
23
+ "license": "MIT",
24
+ "bugs": {
25
+ "url": "https://github.com/websockets/bufferutil/issues"
26
+ },
27
+ "homepage": "https://github.com/websockets/bufferutil",
28
+ "dependencies": {
29
+ "node-gyp-build": "^4.3.0"
30
+ },
31
+ "devDependencies": {
32
+ "mocha": "^10.0.0",
33
+ "node-gyp": "^9.1.0",
34
+ "prebuildify": "^5.0.0"
35
+ }
36
+ }
node_modules/bufferutil/prebuilds/darwin-x64+arm64/node.napi.node ADDED
Binary file (116 kB). View file
 
node_modules/bufferutil/prebuilds/linux-x64/node.napi.node ADDED
Binary file (14.6 kB). View file
 
node_modules/bufferutil/prebuilds/win32-ia32/node.napi.node ADDED
Binary file (122 kB). View file
 
node_modules/bufferutil/prebuilds/win32-x64/node.napi.node ADDED
Binary file (152 kB). View file
 
node_modules/bufferutil/src/bufferutil.c ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #define NAPI_VERSION 1
2
+ #include <assert.h>
3
+ #include <node_api.h>
4
+
5
+ napi_value Mask(napi_env env, napi_callback_info info) {
6
+ napi_status status;
7
+ size_t argc = 5;
8
+ napi_value argv[5];
9
+
10
+ status = napi_get_cb_info(env, info, &argc, argv, NULL, NULL);
11
+ assert(status == napi_ok);
12
+
13
+ uint8_t *source;
14
+ uint8_t *mask;
15
+ uint8_t *destination;
16
+ uint32_t offset;
17
+ uint32_t length;
18
+
19
+ status = napi_get_buffer_info(env, argv[0], (void **)&source, NULL);
20
+ assert(status == napi_ok);
21
+
22
+ status = napi_get_buffer_info(env, argv[1], (void **)&mask, NULL);
23
+ assert(status == napi_ok);
24
+
25
+ status = napi_get_buffer_info(env, argv[2], (void **)&destination, NULL);
26
+ assert(status == napi_ok);
27
+
28
+ status = napi_get_value_uint32(env, argv[3], &offset);
29
+ assert(status == napi_ok);
30
+
31
+ status = napi_get_value_uint32(env, argv[4], &length);
32
+ assert(status == napi_ok);
33
+
34
+ destination += offset;
35
+ uint32_t index = 0;
36
+
37
+ //
38
+ // Alignment preamble.
39
+ //
40
+ while (index < length && ((size_t)source % 8)) {
41
+ *destination++ = *source++ ^ mask[index % 4];
42
+ index++;
43
+ }
44
+
45
+ length -= index;
46
+ if (!length)
47
+ return NULL;
48
+
49
+ //
50
+ // Realign mask and convert to 64 bit.
51
+ //
52
+ uint8_t maskAlignedArray[8];
53
+
54
+ for (uint8_t i = 0; i < 8; i++, index++) {
55
+ maskAlignedArray[i] = mask[index % 4];
56
+ }
57
+
58
+ //
59
+ // Apply 64 bit mask in 8 byte chunks.
60
+ //
61
+ uint32_t loop = length / 8;
62
+ uint64_t *pMask8 = (uint64_t *)maskAlignedArray;
63
+
64
+ while (loop--) {
65
+ uint64_t *pFrom8 = (uint64_t *)source;
66
+ uint64_t *pTo8 = (uint64_t *)destination;
67
+ *pTo8 = *pFrom8 ^ *pMask8;
68
+ source += 8;
69
+ destination += 8;
70
+ }
71
+
72
+ //
73
+ // Apply mask to remaining data.
74
+ //
75
+ uint8_t *pmaskAlignedArray = maskAlignedArray;
76
+
77
+ length %= 8;
78
+ while (length--) {
79
+ *destination++ = *source++ ^ *pmaskAlignedArray++;
80
+ }
81
+
82
+ return NULL;
83
+ }
84
+
85
+ napi_value Unmask(napi_env env, napi_callback_info info) {
86
+ napi_status status;
87
+ size_t argc = 2;
88
+ napi_value argv[2];
89
+
90
+ status = napi_get_cb_info(env, info, &argc, argv, NULL, NULL);
91
+ assert(status == napi_ok);
92
+
93
+ uint8_t *source;
94
+ size_t length;
95
+ uint8_t *mask;
96
+
97
+ status = napi_get_buffer_info(env, argv[0], (void **)&source, &length);
98
+ assert(status == napi_ok);
99
+
100
+ status = napi_get_buffer_info(env, argv[1], (void **)&mask, NULL);
101
+ assert(status == napi_ok);
102
+
103
+ uint32_t index = 0;
104
+
105
+ //
106
+ // Alignment preamble.
107
+ //
108
+ while (index < length && ((size_t)source % 8)) {
109
+ *source++ ^= mask[index % 4];
110
+ index++;
111
+ }
112
+
113
+ length -= index;
114
+ if (!length)
115
+ return NULL;
116
+
117
+ //
118
+ // Realign mask and convert to 64 bit.
119
+ //
120
+ uint8_t maskAlignedArray[8];
121
+
122
+ for (uint8_t i = 0; i < 8; i++, index++) {
123
+ maskAlignedArray[i] = mask[index % 4];
124
+ }
125
+
126
+ //
127
+ // Apply 64 bit mask in 8 byte chunks.
128
+ //
129
+ uint32_t loop = length / 8;
130
+ uint64_t *pMask8 = (uint64_t *)maskAlignedArray;
131
+
132
+ while (loop--) {
133
+ uint64_t *pSource8 = (uint64_t *)source;
134
+ *pSource8 ^= *pMask8;
135
+ source += 8;
136
+ }
137
+
138
+ //
139
+ // Apply mask to remaining data.
140
+ //
141
+ uint8_t *pmaskAlignedArray = maskAlignedArray;
142
+
143
+ length %= 8;
144
+ while (length--) {
145
+ *source++ ^= *pmaskAlignedArray++;
146
+ }
147
+
148
+ return NULL;
149
+ }
150
+
151
+ napi_value Init(napi_env env, napi_value exports) {
152
+ napi_status status;
153
+ napi_value mask;
154
+ napi_value unmask;
155
+
156
+ status = napi_create_function(env, NULL, 0, Mask, NULL, &mask);
157
+ assert(status == napi_ok);
158
+
159
+ status = napi_create_function(env, NULL, 0, Unmask, NULL, &unmask);
160
+ assert(status == napi_ok);
161
+
162
+ status = napi_set_named_property(env, exports, "mask", mask);
163
+ assert(status == napi_ok);
164
+
165
+ status = napi_set_named_property(env, exports, "unmask", unmask);
166
+ assert(status == napi_ok);
167
+
168
+ return exports;
169
+ }
170
+
171
+ NAPI_MODULE(NODE_GYP_MODULE_NAME, Init)
node_modules/node-gyp-build/LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2017 Mathias Buus
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in
13
+ all copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21
+ THE SOFTWARE.
node_modules/node-gyp-build/README.md ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # node-gyp-build
2
+
3
+ > Build tool and bindings loader for [`node-gyp`][node-gyp] that supports prebuilds.
4
+
5
+ ```
6
+ npm install node-gyp-build
7
+ ```
8
+
9
+ [![Test](https://github.com/prebuild/node-gyp-build/actions/workflows/test.yml/badge.svg)](https://github.com/prebuild/node-gyp-build/actions/workflows/test.yml)
10
+
11
+ Use together with [`prebuildify`][prebuildify] to easily support prebuilds for your native modules.
12
+
13
+ ## Usage
14
+
15
+ > **Note.** Prebuild names have changed in [`prebuildify@3`][prebuildify] and `node-gyp-build@4`. Please see the documentation below.
16
+
17
+ `node-gyp-build` works similar to [`node-gyp build`][node-gyp] except that it will check if a build or prebuild is present before rebuilding your project.
18
+
19
+ It's main intended use is as an npm install script and bindings loader for native modules that bundle prebuilds using [`prebuildify`][prebuildify].
20
+
21
+ First add `node-gyp-build` as an install script to your native project
22
+
23
+ ``` js
24
+ {
25
+ ...
26
+ "scripts": {
27
+ "install": "node-gyp-build"
28
+ }
29
+ }
30
+ ```
31
+
32
+ Then in your `index.js`, instead of using the [`bindings`](https://www.npmjs.com/package/bindings) module use `node-gyp-build` to load your binding.
33
+
34
+ ``` js
35
+ var binding = require('node-gyp-build')(__dirname)
36
+ ```
37
+
38
+ If you do these two things and bundle prebuilds with [`prebuildify`][prebuildify] your native module will work for most platforms
39
+ without having to compile on install time AND will work in both node and electron without the need to recompile between usage.
40
+
41
+ Users can override `node-gyp-build` and force compiling by doing `npm install --build-from-source`.
42
+
43
+ Prebuilds will be attempted loaded from `MODULE_PATH/prebuilds/...` and then next `EXEC_PATH/prebuilds/...` (the latter allowing use with `zeit/pkg`)
44
+
45
+ ## Supported prebuild names
46
+
47
+ If so desired you can bundle more specific flavors, for example `musl` builds to support Alpine, or targeting a numbered ARM architecture version.
48
+
49
+ These prebuilds can be bundled in addition to generic prebuilds; `node-gyp-build` will try to find the most specific flavor first. Prebuild filenames are composed of _tags_. The runtime tag takes precedence, as does an `abi` tag over `napi`. For more details on tags, please see [`prebuildify`][prebuildify].
50
+
51
+ Values for the `libc` and `armv` tags are auto-detected but can be overridden through the `LIBC` and `ARM_VERSION` environment variables, respectively.
52
+
53
+ ## License
54
+
55
+ MIT
56
+
57
+ [prebuildify]: https://github.com/prebuild/prebuildify
58
+ [node-gyp]: https://www.npmjs.com/package/node-gyp
node_modules/node-gyp-build/bin.js ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env node
2
+
3
+ var proc = require('child_process')
4
+ var os = require('os')
5
+ var path = require('path')
6
+
7
+ if (!buildFromSource()) {
8
+ proc.exec('node-gyp-build-test', function (err, stdout, stderr) {
9
+ if (err) {
10
+ if (verbose()) console.error(stderr)
11
+ preinstall()
12
+ }
13
+ })
14
+ } else {
15
+ preinstall()
16
+ }
17
+
18
+ function build () {
19
+ var args = [os.platform() === 'win32' ? 'node-gyp.cmd' : 'node-gyp', 'rebuild']
20
+
21
+ try {
22
+ var pkg = require('node-gyp/package.json')
23
+ args = [
24
+ process.execPath,
25
+ path.join(require.resolve('node-gyp/package.json'), '..', typeof pkg.bin === 'string' ? pkg.bin : pkg.bin['node-gyp']),
26
+ 'rebuild'
27
+ ]
28
+ } catch (_) {}
29
+
30
+ proc.spawn(args[0], args.slice(1), { stdio: 'inherit' }).on('exit', function (code) {
31
+ if (code || !process.argv[3]) process.exit(code)
32
+ exec(process.argv[3]).on('exit', function (code) {
33
+ process.exit(code)
34
+ })
35
+ })
36
+ }
37
+
38
+ function preinstall () {
39
+ if (!process.argv[2]) return build()
40
+ exec(process.argv[2]).on('exit', function (code) {
41
+ if (code) process.exit(code)
42
+ build()
43
+ })
44
+ }
45
+
46
+ function exec (cmd) {
47
+ if (process.platform !== 'win32') {
48
+ var shell = os.platform() === 'android' ? 'sh' : '/bin/sh'
49
+ return proc.spawn(shell, ['-c', '--', cmd], {
50
+ stdio: 'inherit'
51
+ })
52
+ }
53
+
54
+ return proc.spawn(process.env.comspec || 'cmd.exe', ['/s', '/c', '"' + cmd + '"'], {
55
+ windowsVerbatimArguments: true,
56
+ stdio: 'inherit'
57
+ })
58
+ }
59
+
60
+ function buildFromSource () {
61
+ return hasFlag('--build-from-source') || process.env.npm_config_build_from_source === 'true'
62
+ }
63
+
64
+ function verbose () {
65
+ return hasFlag('--verbose') || process.env.npm_config_loglevel === 'verbose'
66
+ }
67
+
68
+ // TODO (next major): remove in favor of env.npm_config_* which works since npm
69
+ // 0.1.8 while npm_config_argv will stop working in npm 7. See npm/rfcs#90
70
+ function hasFlag (flag) {
71
+ if (!process.env.npm_config_argv) return false
72
+
73
+ try {
74
+ return JSON.parse(process.env.npm_config_argv).original.indexOf(flag) !== -1
75
+ } catch (_) {
76
+ return false
77
+ }
78
+ }
node_modules/node-gyp-build/build-test.js ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env node
2
+
3
+ process.env.NODE_ENV = 'test'
4
+
5
+ var path = require('path')
6
+ var test = null
7
+
8
+ try {
9
+ var pkg = require(path.join(process.cwd(), 'package.json'))
10
+ if (pkg.name && process.env[pkg.name.toUpperCase().replace(/-/g, '_')]) {
11
+ process.exit(0)
12
+ }
13
+ test = pkg.prebuild.test
14
+ } catch (err) {
15
+ // do nothing
16
+ }
17
+
18
+ if (test) require(path.join(process.cwd(), test))
19
+ else require('./')()
node_modules/node-gyp-build/index.js ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ const runtimeRequire = typeof __webpack_require__ === 'function' ? __non_webpack_require__ : require // eslint-disable-line
2
+ if (typeof runtimeRequire.addon === 'function') { // if the platform supports native resolving prefer that
3
+ module.exports = runtimeRequire.addon.bind(runtimeRequire)
4
+ } else { // else use the runtime version here
5
+ module.exports = require('./node-gyp-build.js')
6
+ }
node_modules/node-gyp-build/node-gyp-build.js ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ var fs = require('fs')
2
+ var path = require('path')
3
+ var os = require('os')
4
+
5
+ // Workaround to fix webpack's build warnings: 'the request of a dependency is an expression'
6
+ var runtimeRequire = typeof __webpack_require__ === 'function' ? __non_webpack_require__ : require // eslint-disable-line
7
+
8
+ var vars = (process.config && process.config.variables) || {}
9
+ var prebuildsOnly = !!process.env.PREBUILDS_ONLY
10
+ var abi = process.versions.modules // TODO: support old node where this is undef
11
+ var runtime = isElectron() ? 'electron' : (isNwjs() ? 'node-webkit' : 'node')
12
+
13
+ var arch = process.env.npm_config_arch || os.arch()
14
+ var platform = process.env.npm_config_platform || os.platform()
15
+ var libc = process.env.LIBC || (isAlpine(platform) ? 'musl' : 'glibc')
16
+ var armv = process.env.ARM_VERSION || (arch === 'arm64' ? '8' : vars.arm_version) || ''
17
+ var uv = (process.versions.uv || '').split('.')[0]
18
+
19
+ module.exports = load
20
+
21
+ function load (dir) {
22
+ return runtimeRequire(load.resolve(dir))
23
+ }
24
+
25
+ load.resolve = load.path = function (dir) {
26
+ dir = path.resolve(dir || '.')
27
+
28
+ try {
29
+ var name = runtimeRequire(path.join(dir, 'package.json')).name.toUpperCase().replace(/-/g, '_')
30
+ if (process.env[name + '_PREBUILD']) dir = process.env[name + '_PREBUILD']
31
+ } catch (err) {}
32
+
33
+ if (!prebuildsOnly) {
34
+ var release = getFirst(path.join(dir, 'build/Release'), matchBuild)
35
+ if (release) return release
36
+
37
+ var debug = getFirst(path.join(dir, 'build/Debug'), matchBuild)
38
+ if (debug) return debug
39
+ }
40
+
41
+ var prebuild = resolve(dir)
42
+ if (prebuild) return prebuild
43
+
44
+ var nearby = resolve(path.dirname(process.execPath))
45
+ if (nearby) return nearby
46
+
47
+ var target = [
48
+ 'platform=' + platform,
49
+ 'arch=' + arch,
50
+ 'runtime=' + runtime,
51
+ 'abi=' + abi,
52
+ 'uv=' + uv,
53
+ armv ? 'armv=' + armv : '',
54
+ 'libc=' + libc,
55
+ 'node=' + process.versions.node,
56
+ process.versions.electron ? 'electron=' + process.versions.electron : '',
57
+ typeof __webpack_require__ === 'function' ? 'webpack=true' : '' // eslint-disable-line
58
+ ].filter(Boolean).join(' ')
59
+
60
+ throw new Error('No native build was found for ' + target + '\n loaded from: ' + dir + '\n')
61
+
62
+ function resolve (dir) {
63
+ // Find matching "prebuilds/<platform>-<arch>" directory
64
+ var tuples = readdirSync(path.join(dir, 'prebuilds')).map(parseTuple)
65
+ var tuple = tuples.filter(matchTuple(platform, arch)).sort(compareTuples)[0]
66
+ if (!tuple) return
67
+
68
+ // Find most specific flavor first
69
+ var prebuilds = path.join(dir, 'prebuilds', tuple.name)
70
+ var parsed = readdirSync(prebuilds).map(parseTags)
71
+ var candidates = parsed.filter(matchTags(runtime, abi))
72
+ var winner = candidates.sort(compareTags(runtime))[0]
73
+ if (winner) return path.join(prebuilds, winner.file)
74
+ }
75
+ }
76
+
77
+ function readdirSync (dir) {
78
+ try {
79
+ return fs.readdirSync(dir)
80
+ } catch (err) {
81
+ return []
82
+ }
83
+ }
84
+
85
+ function getFirst (dir, filter) {
86
+ var files = readdirSync(dir).filter(filter)
87
+ return files[0] && path.join(dir, files[0])
88
+ }
89
+
90
+ function matchBuild (name) {
91
+ return /\.node$/.test(name)
92
+ }
93
+
94
+ function parseTuple (name) {
95
+ // Example: darwin-x64+arm64
96
+ var arr = name.split('-')
97
+ if (arr.length !== 2) return
98
+
99
+ var platform = arr[0]
100
+ var architectures = arr[1].split('+')
101
+
102
+ if (!platform) return
103
+ if (!architectures.length) return
104
+ if (!architectures.every(Boolean)) return
105
+
106
+ return { name, platform, architectures }
107
+ }
108
+
109
+ function matchTuple (platform, arch) {
110
+ return function (tuple) {
111
+ if (tuple == null) return false
112
+ if (tuple.platform !== platform) return false
113
+ return tuple.architectures.includes(arch)
114
+ }
115
+ }
116
+
117
+ function compareTuples (a, b) {
118
+ // Prefer single-arch prebuilds over multi-arch
119
+ return a.architectures.length - b.architectures.length
120
+ }
121
+
122
+ function parseTags (file) {
123
+ var arr = file.split('.')
124
+ var extension = arr.pop()
125
+ var tags = { file: file, specificity: 0 }
126
+
127
+ if (extension !== 'node') return
128
+
129
+ for (var i = 0; i < arr.length; i++) {
130
+ var tag = arr[i]
131
+
132
+ if (tag === 'node' || tag === 'electron' || tag === 'node-webkit') {
133
+ tags.runtime = tag
134
+ } else if (tag === 'napi') {
135
+ tags.napi = true
136
+ } else if (tag.slice(0, 3) === 'abi') {
137
+ tags.abi = tag.slice(3)
138
+ } else if (tag.slice(0, 2) === 'uv') {
139
+ tags.uv = tag.slice(2)
140
+ } else if (tag.slice(0, 4) === 'armv') {
141
+ tags.armv = tag.slice(4)
142
+ } else if (tag === 'glibc' || tag === 'musl') {
143
+ tags.libc = tag
144
+ } else {
145
+ continue
146
+ }
147
+
148
+ tags.specificity++
149
+ }
150
+
151
+ return tags
152
+ }
153
+
154
+ function matchTags (runtime, abi) {
155
+ return function (tags) {
156
+ if (tags == null) return false
157
+ if (tags.runtime && tags.runtime !== runtime && !runtimeAgnostic(tags)) return false
158
+ if (tags.abi && tags.abi !== abi && !tags.napi) return false
159
+ if (tags.uv && tags.uv !== uv) return false
160
+ if (tags.armv && tags.armv !== armv) return false
161
+ if (tags.libc && tags.libc !== libc) return false
162
+
163
+ return true
164
+ }
165
+ }
166
+
167
+ function runtimeAgnostic (tags) {
168
+ return tags.runtime === 'node' && tags.napi
169
+ }
170
+
171
+ function compareTags (runtime) {
172
+ // Precedence: non-agnostic runtime, abi over napi, then by specificity.
173
+ return function (a, b) {
174
+ if (a.runtime !== b.runtime) {
175
+ return a.runtime === runtime ? -1 : 1
176
+ } else if (a.abi !== b.abi) {
177
+ return a.abi ? -1 : 1
178
+ } else if (a.specificity !== b.specificity) {
179
+ return a.specificity > b.specificity ? -1 : 1
180
+ } else {
181
+ return 0
182
+ }
183
+ }
184
+ }
185
+
186
+ function isNwjs () {
187
+ return !!(process.versions && process.versions.nw)
188
+ }
189
+
190
+ function isElectron () {
191
+ if (process.versions && process.versions.electron) return true
192
+ if (process.env.ELECTRON_RUN_AS_NODE) return true
193
+ return typeof window !== 'undefined' && window.process && window.process.type === 'renderer'
194
+ }
195
+
196
+ function isAlpine (platform) {
197
+ return platform === 'linux' && fs.existsSync('/etc/alpine-release')
198
+ }
199
+
200
+ // Exposed for unit tests
201
+ // TODO: move to lib
202
+ load.parseTags = parseTags
203
+ load.matchTags = matchTags
204
+ load.compareTags = compareTags
205
+ load.parseTuple = parseTuple
206
+ load.matchTuple = matchTuple
207
+ load.compareTuples = compareTuples
node_modules/node-gyp-build/optional.js ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env node
2
+
3
+ /*
4
+ I am only useful as an install script to make node-gyp not compile for purely optional native deps
5
+ */
6
+
7
+ process.exit(0)
node_modules/node-gyp-build/package.json ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "node-gyp-build",
3
+ "version": "4.8.0",
4
+ "description": "Build tool and bindings loader for node-gyp that supports prebuilds",
5
+ "main": "index.js",
6
+ "devDependencies": {
7
+ "array-shuffle": "^1.0.1",
8
+ "standard": "^14.0.0",
9
+ "tape": "^5.0.0"
10
+ },
11
+ "scripts": {
12
+ "test": "standard && node test"
13
+ },
14
+ "bin": {
15
+ "node-gyp-build": "./bin.js",
16
+ "node-gyp-build-optional": "./optional.js",
17
+ "node-gyp-build-test": "./build-test.js"
18
+ },
19
+ "repository": {
20
+ "type": "git",
21
+ "url": "https://github.com/prebuild/node-gyp-build.git"
22
+ },
23
+ "author": "Mathias Buus (@mafintosh)",
24
+ "license": "MIT",
25
+ "bugs": {
26
+ "url": "https://github.com/prebuild/node-gyp-build/issues"
27
+ },
28
+ "homepage": "https://github.com/prebuild/node-gyp-build"
29
+ }
node_modules/semiver/dist/semiver.js ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ var fn = new Intl.Collator(0, { numeric:1 }).compare;
2
+
3
+ module.exports = function (a, b, bool) {
4
+ a = a.split('.');
5
+ b = b.split('.');
6
+
7
+ return fn(a[0], b[0]) || fn(a[1], b[1]) || (
8
+ b[2] = b.slice(2).join('.'),
9
+ bool = /[.-]/.test(a[2] = a.slice(2).join('.')),
10
+ bool == /[.-]/.test(b[2]) ? fn(a[2], b[2]) : bool ? -1 : 1
11
+ );
12
+ }