mindmime commited on
Commit
a03b3ba
1 Parent(s): f26a977

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .changeset/README.md +8 -0
  2. .changeset/changeset.cjs +280 -0
  3. .changeset/config.json +11 -0
  4. .changeset/every-bears-peel.md +6 -0
  5. .changeset/fix_changelogs.cjs +122 -0
  6. .config/.prettierignore +30 -0
  7. .config/.prettierrc.json +7 -0
  8. .config/basevite.config.ts +90 -0
  9. .config/copy_frontend.py +59 -0
  10. .config/eslint.config.js +153 -0
  11. .config/playwright-ct.config.ts +41 -0
  12. .config/playwright-setup.js +152 -0
  13. .config/playwright.config.js +24 -0
  14. .config/playwright/index.html +12 -0
  15. .config/playwright/index.ts +2 -0
  16. .config/postcss.config.cjs +8 -0
  17. .config/setup_vite_tests.ts +11 -0
  18. .config/tailwind.config.cjs +12 -0
  19. .config/vitest.config.ts +3 -0
  20. .devcontainer/devcontainer.json +41 -0
  21. .dockerignore +40 -0
  22. .editorconfig +8 -0
  23. .git-blame-ignore-revs +14 -0
  24. .gitattributes +17 -0
  25. .github/ISSUE_TEMPLATE/bug_report_template.yml +69 -0
  26. .github/ISSUE_TEMPLATE/config.yml +5 -0
  27. .github/ISSUE_TEMPLATE/feature_request.md +19 -0
  28. .github/PULL_REQUEST_TEMPLATE.md +18 -0
  29. .github/actions/install-all-deps/action.yml +60 -0
  30. .github/actions/install-frontend-deps/action.yml +51 -0
  31. .github/stale +17 -0
  32. .github/workflows/backend.yml +244 -0
  33. .github/workflows/build-pr.yml +76 -0
  34. .github/workflows/check-demo-notebooks.yml +50 -0
  35. .github/workflows/comment-queue.yml +36 -0
  36. .github/workflows/delete-stale-spaces.yml +35 -0
  37. .github/workflows/deploy-chromatic.yml +88 -0
  38. .github/workflows/deploy-pr-to-spaces.yml +99 -0
  39. .github/workflows/deploy-website.yml +108 -0
  40. .github/workflows/generate-changeset.yml +88 -0
  41. .github/workflows/large-files.yml +21 -0
  42. .github/workflows/publish-npm.yml +77 -0
  43. .github/workflows/report-notebook-status-pr.yml +47 -0
  44. .github/workflows/trigger-changeset.yml +19 -0
  45. .github/workflows/ui.yml +103 -0
  46. .gitignore +80 -0
  47. .vscode/extensions.json +9 -0
  48. .vscode/settings.json +23 -0
  49. CHANGELOG.md +0 -0
  50. CITATION.cff +45 -0
.changeset/README.md ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ # Changesets
2
+
3
+ Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
4
+ with multi-package repos, or single-package repos to help you version and publish your code. You can
5
+ find the full documentation for it [in our repository](https://github.com/changesets/changesets)
6
+
7
+ We have a quick list of common questions to get you started engaging with this project in
8
+ [our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
.changeset/changeset.cjs ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const { getPackagesSync } = require("@manypkg/get-packages");
2
+ const gh = require("@changesets/get-github-info");
3
+ const { existsSync, readFileSync, writeFileSync } = require("fs");
4
+ const { join } = require("path");
5
+
6
+ const { getInfo, getInfoFromPullRequest } = gh;
7
+ const { packages, rootDir } = getPackagesSync(process.cwd());
8
+
9
+ /**
10
+ * @typedef {{packageJson: {name: string, python?: boolean}, dir: string}} Package
11
+ */
12
+
13
+ /**
14
+ * @typedef {{summary: string, id: string, commit: string, releases: {name: string}}} Changeset
15
+ */
16
+
17
+ /**
18
+ *
19
+ * @param {string} package_name The name of the package to find the directories for
20
+ * @returns {string[]} The directories for the package
21
+ */
22
+ function find_packages_dirs(package_name) {
23
+ /** @type {string[]} */
24
+ let package_dirs = [];
25
+
26
+ /** @type {Package | undefined} */
27
+ const _package = packages.find((p) => p.packageJson.name === package_name);
28
+ if (!_package) throw new Error(`Package ${package_name} not found`);
29
+
30
+ package_dirs.push(_package.dir);
31
+ if (_package.packageJson.python) {
32
+ package_dirs.push(join(_package.dir, ".."));
33
+ }
34
+ return package_dirs;
35
+ }
36
+
37
+ const changelogFunctions = {
38
+ /**
39
+ *
40
+ * @param {Changeset[]} changesets The changesets that have been created
41
+ * @param {any} dependenciesUpdated The dependencies that have been updated
42
+ * @param {any} options The options passed to the changelog generator
43
+ * @returns {Promise<string>} The release line for the dependencies
44
+ */
45
+ getDependencyReleaseLine: async (
46
+ changesets,
47
+ dependenciesUpdated,
48
+ options
49
+ ) => {
50
+ if (!options.repo) {
51
+ throw new Error(
52
+ 'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]'
53
+ );
54
+ }
55
+ if (dependenciesUpdated.length === 0) return "";
56
+
57
+ const changesetLink = `- Updated dependencies [${(
58
+ await Promise.all(
59
+ changesets.map(async (cs) => {
60
+ if (cs.commit) {
61
+ let { links } = await getInfo({
62
+ repo: options.repo,
63
+ commit: cs.commit
64
+ });
65
+ return links.commit;
66
+ }
67
+ })
68
+ )
69
+ )
70
+ .filter((_) => _)
71
+ .join(", ")}]:`;
72
+
73
+ const updatedDepenenciesList = dependenciesUpdated.map(
74
+ /**
75
+ *
76
+ * @param {any} dependency The dependency that has been updated
77
+ * @returns {string} The formatted dependency
78
+ */
79
+ (dependency) => ` - ${dependency.name}@${dependency.newVersion}`
80
+ );
81
+
82
+ return [changesetLink, ...updatedDepenenciesList].join("\n");
83
+ },
84
+ /**
85
+ *
86
+ * @param {{summary: string, id: string, commit: string, releases: {name: string}[]}} changeset The changeset that has been created
87
+ * @param {any} type The type of changeset
88
+ * @param {any} options The options passed to the changelog generator
89
+ * @returns {Promise<string>} The release line for the changeset
90
+ */
91
+ getReleaseLine: async (changeset, type, options) => {
92
+ if (!options || !options.repo) {
93
+ throw new Error(
94
+ 'Please provide a repo to this changelog generator like this:\n"changelog": ["@changesets/changelog-github", { "repo": "org/repo" }]'
95
+ );
96
+ }
97
+
98
+ let prFromSummary;
99
+ let commitFromSummary;
100
+ /**
101
+ * @type {string[]}
102
+ */
103
+ let usersFromSummary = [];
104
+
105
+ const replacedChangelog = changeset.summary
106
+ .replace(/^\s*(?:pr|pull|pull\s+request):\s*#?(\d+)/im, (_, pr) => {
107
+ let num = Number(pr);
108
+ if (!isNaN(num)) prFromSummary = num;
109
+ return "";
110
+ })
111
+ .replace(/^\s*commit:\s*([^\s]+)/im, (_, commit) => {
112
+ commitFromSummary = commit;
113
+ return "";
114
+ })
115
+ .replace(/^\s*(?:author|user):\s*@?([^\s]+)/gim, (_, user) => {
116
+ usersFromSummary.push(user);
117
+ return "";
118
+ })
119
+ .trim();
120
+
121
+ const [firstLine, ...futureLines] = replacedChangelog
122
+ .split("\n")
123
+ .map((l) => l.trimRight());
124
+
125
+ const links = await (async () => {
126
+ if (prFromSummary !== undefined) {
127
+ let { links } = await getInfoFromPullRequest({
128
+ repo: options.repo,
129
+ pull: prFromSummary
130
+ });
131
+ if (commitFromSummary) {
132
+ links = {
133
+ ...links,
134
+ commit: `[\`${commitFromSummary}\`](https://github.com/${options.repo}/commit/${commitFromSummary})`
135
+ };
136
+ }
137
+ return links;
138
+ }
139
+ const commitToFetchFrom = commitFromSummary || changeset.commit;
140
+ if (commitToFetchFrom) {
141
+ let { links } = await getInfo({
142
+ repo: options.repo,
143
+ commit: commitToFetchFrom
144
+ });
145
+ return links;
146
+ }
147
+ return {
148
+ commit: null,
149
+ pull: null,
150
+ user: null
151
+ };
152
+ })();
153
+
154
+ const users =
155
+ usersFromSummary && usersFromSummary.length
156
+ ? usersFromSummary
157
+ .map(
158
+ (userFromSummary) =>
159
+ `[@${userFromSummary}](https://github.com/${userFromSummary})`
160
+ )
161
+ .join(", ")
162
+ : links.user;
163
+
164
+ const prefix = [
165
+ links.pull === null ? "" : `${links.pull}`,
166
+ links.commit === null ? "" : `${links.commit}`
167
+ ]
168
+ .join(" ")
169
+ .trim();
170
+
171
+ const suffix = users === null ? "" : ` Thanks ${users}!`;
172
+
173
+ /**
174
+ * @typedef {{[key: string]: string[] | {dirs: string[], current_changelog: string, feat: {summary: string}[], fix: {summary: string}[], highlight: {summary: string}[]}}} ChangesetMeta
175
+ */
176
+
177
+ /**
178
+ * @type { ChangesetMeta & { _handled: string[] } }}
179
+ */
180
+ let lines;
181
+ if (existsSync(join(rootDir, ".changeset", "_changelog.json"))) {
182
+ lines = JSON.parse(
183
+ readFileSync(join(rootDir, ".changeset", "_changelog.json"), "utf-8")
184
+ );
185
+ } else {
186
+ lines = {
187
+ _handled: []
188
+ };
189
+ }
190
+
191
+ if (lines._handled.includes(changeset.id)) {
192
+ return "done";
193
+ }
194
+ lines._handled.push(changeset.id);
195
+
196
+ changeset.releases.forEach((release) => {
197
+ if (!lines[release.name])
198
+ lines[release.name] = {
199
+ dirs: find_packages_dirs(release.name),
200
+ current_changelog: "",
201
+ feat: [],
202
+ fix: [],
203
+ highlight: []
204
+ };
205
+
206
+ const changelog_path = join(
207
+ //@ts-ignore
208
+ lines[release.name].dirs[1] || lines[release.name].dirs[0],
209
+ "CHANGELOG.md"
210
+ );
211
+
212
+ if (existsSync(changelog_path)) {
213
+ //@ts-ignore
214
+ lines[release.name].current_changelog = readFileSync(
215
+ changelog_path,
216
+ "utf-8"
217
+ )
218
+ .replace(`# ${release.name}`, "")
219
+ .trim();
220
+ }
221
+
222
+ const [, _type, summary] = changeset.summary
223
+ .trim()
224
+ .match(/^(feat|fix|highlight)\s*:\s*([^]*)/im) || [
225
+ ,
226
+ "feat",
227
+ changeset.summary
228
+ ];
229
+
230
+ let formatted_summary = "";
231
+
232
+ if (_type === "highlight") {
233
+ const [heading, ...rest] = summary.trim().split("\n");
234
+ const _heading = `${heading} ${prefix ? `(${prefix})` : ""}`;
235
+ const _rest = rest.concat(["", suffix]);
236
+
237
+ formatted_summary = `${_heading}\n${_rest.join("\n")}`;
238
+ } else {
239
+ formatted_summary = handle_line(summary, prefix, suffix);
240
+ }
241
+
242
+ //@ts-ignore
243
+ lines[release.name][_type].push({
244
+ summary: formatted_summary
245
+ });
246
+ });
247
+
248
+ writeFileSync(
249
+ join(rootDir, ".changeset", "_changelog.json"),
250
+ JSON.stringify(lines, null, 2)
251
+ );
252
+
253
+ return `\n\n-${prefix ? `${prefix} -` : ""} ${firstLine}\n${futureLines
254
+ .map((l) => ` ${l}`)
255
+ .join("\n")}`;
256
+ }
257
+ };
258
+
259
+ /**
260
+ * @param {string} str The changelog entry
261
+ * @param {string} prefix The prefix to add to the first line
262
+ * @param {string} suffix The suffix to add to the last line
263
+ * @returns {string} The formatted changelog entry
264
+ */
265
+ function handle_line(str, prefix, suffix) {
266
+ const [_s, ...lines] = str.split("\n").filter(Boolean);
267
+
268
+ const desc = `${prefix ? `${prefix} -` : ""} ${_s.replace(
269
+ /[\s\.]$/,
270
+ ""
271
+ )}. ${suffix}`;
272
+
273
+ if (_s.length === 1) {
274
+ return desc;
275
+ }
276
+
277
+ return [desc, ...lines.map((l) => ` ${l}`)].join("/n");
278
+ }
279
+
280
+ module.exports = changelogFunctions;
.changeset/config.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "$schema": "https://unpkg.com/@changesets/config@2.3.0/schema.json",
3
+ "changelog": ["./changeset.cjs", { "repo": "gradio-app/gradio" }],
4
+ "commit": false,
5
+ "fixed": [],
6
+ "linked": [],
7
+ "access": "public",
8
+ "baseBranch": "main",
9
+ "updateInternalDependencies": "patch",
10
+ "ignore": ["@gradio/spaces-test", "@gradio/cdn-test"]
11
+ }
.changeset/every-bears-peel.md ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ ---
2
+ "@gradio/audio": patch
3
+ "gradio": patch
4
+ ---
5
+
6
+ fix:Fix audio recording events not dispatching
.changeset/fix_changelogs.cjs ADDED
@@ -0,0 +1,122 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const { join } = require("path");
2
+ const { readFileSync, existsSync, writeFileSync, unlinkSync } = require("fs");
3
+ const { getPackagesSync } = require("@manypkg/get-packages");
4
+
5
+ const RE_PKG_NAME = /^[\w-]+\b/;
6
+ const pkg_meta = getPackagesSync(process.cwd());
7
+
8
+ /**
9
+ * @typedef {{dirs: string[], highlight: {summary: string}[], feat: {summary: string}[], fix: {summary: string}[], current_changelog: string}} ChangesetMeta
10
+ */
11
+
12
+ /**
13
+ * @typedef {{[key: string]: ChangesetMeta}} ChangesetMetaCollection
14
+ */
15
+
16
+ function run() {
17
+ if (!existsSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json"))) {
18
+ console.warn("No changesets to process");
19
+ return;
20
+ }
21
+
22
+ /**
23
+ * @type { ChangesetMetaCollection & { _handled: string[] } }}
24
+ */
25
+ const { _handled, ...packages } = JSON.parse(
26
+ readFileSync(
27
+ join(pkg_meta.rootDir, ".changeset", "_changelog.json"),
28
+ "utf-8"
29
+ )
30
+ );
31
+
32
+ /**
33
+ * @typedef { {packageJson: {name: string, version: string, python: boolean}, dir: string} } PackageMeta
34
+ */
35
+
36
+ /**
37
+ * @type { {[key:string]: PackageMeta} }
38
+ */
39
+ const all_packages = pkg_meta.packages.reduce((acc, pkg) => {
40
+ acc[pkg.packageJson.name] = /**@type {PackageMeta} */ (
41
+ /** @type {unknown} */ (pkg)
42
+ );
43
+ return acc;
44
+ }, /** @type {{[key:string] : PackageMeta}} */ ({}));
45
+
46
+ for (const pkg_name in packages) {
47
+ const { dirs, highlight, feat, fix, current_changelog } =
48
+ /**@type {ChangesetMeta} */ (packages[pkg_name]);
49
+
50
+ const { version, python } = all_packages[pkg_name].packageJson;
51
+
52
+ const highlights = highlight.map((h) => `${h.summary}`);
53
+ const features = feat.map((f) => `- ${f.summary}`);
54
+ const fixes = fix.map((f) => `- ${f.summary}`);
55
+
56
+ const release_notes = /** @type {[string[], string][]} */ ([
57
+ [highlights, "### Highlights"],
58
+ [features, "### Features"],
59
+ [fixes, "### Fixes"]
60
+ ])
61
+ .filter(([s], i) => s.length > 0)
62
+ .map(([lines, title]) => {
63
+ if (title === "### Highlights") {
64
+ return `${title}\n\n${lines.join("\n\n")}`;
65
+ }
66
+
67
+ return `${title}\n\n${lines.join("\n")}`;
68
+ })
69
+ .join("\n\n");
70
+
71
+ const new_changelog = `# ${pkg_name}
72
+
73
+ ## ${version}
74
+
75
+ ${release_notes}
76
+
77
+ ${current_changelog.replace(`# ${pkg_name}`, "").trim()}
78
+ `.trim();
79
+
80
+ dirs.forEach((dir) => {
81
+ writeFileSync(join(dir, "CHANGELOG.md"), new_changelog);
82
+ });
83
+
84
+ if (python) {
85
+ bump_local_dependents(pkg_name, version);
86
+ }
87
+ }
88
+
89
+ unlinkSync(join(pkg_meta.rootDir, ".changeset", "_changelog.json"));
90
+
91
+ /**
92
+ * @param {string} pkg_to_bump The name of the package to bump
93
+ * @param {string} version The version to bump to
94
+ * @returns {void}
95
+ * */
96
+ function bump_local_dependents(pkg_to_bump, version) {
97
+ for (const pkg_name in all_packages) {
98
+ const {
99
+ dir,
100
+ packageJson: { python }
101
+ } = all_packages[pkg_name];
102
+
103
+ if (!python) continue;
104
+
105
+ const requirements_path = join(dir, "..", "requirements.txt");
106
+ const requirements = readFileSync(requirements_path, "utf-8").split("\n");
107
+
108
+ const pkg_index = requirements.findIndex((line) => {
109
+ const m = line.trim().match(RE_PKG_NAME);
110
+ if (!m) return false;
111
+ return m[0] === pkg_to_bump;
112
+ });
113
+
114
+ if (pkg_index !== -1) {
115
+ requirements[pkg_index] = `${pkg_to_bump}==${version}`;
116
+ writeFileSync(requirements_path, requirements.join("\n"));
117
+ }
118
+ }
119
+ }
120
+ }
121
+
122
+ run();
.config/.prettierignore ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ **/js/app/public/**
2
+ **/pnpm-workspace.yaml
3
+ **/js/app/dist/**
4
+ **/js/wasm/dist/**
5
+ **/client/js/dist/**
6
+ **/js/lite/dist/**
7
+ **/pnpm-lock.yaml
8
+ **/js/plot/src/Plot.svelte
9
+ **/.svelte-kit/**
10
+ **/demo/**
11
+ **/gradio/**
12
+ **/.pnpm-store/**
13
+ **/.venv/**
14
+ **/.github/**
15
+ /guides/**
16
+ **/.mypy_cache/**
17
+ !test-strategy.md
18
+ **/js/_space-test/**
19
+ ../js/app/src/lite/theme.css
20
+ ../js/storybook/theme.css
21
+ **/gradio_cached_examples/**
22
+ **/storybook-static/**
23
+ **/.vscode/**
24
+ sweep.yaml
25
+ **/.vercel/**
26
+ **/build/**
27
+ **/*.md
28
+ **/src/lib/json/**/*
29
+ **/playwright/.cache/**/*
30
+ **/theme/src/pollen.css
.config/.prettierrc.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "useTabs": true,
3
+ "singleQuote": false,
4
+ "trailingComma": "none",
5
+ "printWidth": 80,
6
+ "plugins": ["prettier-plugin-svelte"]
7
+ }
.config/basevite.config.ts ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig } from "vite";
2
+ import { svelte } from "@sveltejs/vite-plugin-svelte";
3
+ import sveltePreprocess from "svelte-preprocess";
4
+ // @ts-ignore
5
+ import custom_media from "postcss-custom-media";
6
+ import global_data from "@csstools/postcss-global-data";
7
+ // @ts-ignore
8
+ import prefixer from "postcss-prefix-selector";
9
+ import { readFileSync } from "fs";
10
+ import { join } from "path";
11
+ import { fileURLToPath } from "url";
12
+
13
+ const __dirname = fileURLToPath(new URL(".", import.meta.url));
14
+ const version_path = join(__dirname, "..", "gradio", "package.json");
15
+ const theme_token_path = join(
16
+ __dirname,
17
+ "..",
18
+ "js",
19
+ "theme",
20
+ "src",
21
+ "tokens.css"
22
+ );
23
+
24
+ const version = JSON.parse(readFileSync(version_path, { encoding: "utf-8" }))
25
+ .version.trim()
26
+ .replace(/\./g, "-");
27
+
28
+ //@ts-ignore
29
+ export default defineConfig(({ mode }) => {
30
+ const production = mode === "production";
31
+
32
+ return {
33
+ server: {
34
+ port: 9876
35
+ },
36
+
37
+ build: {
38
+ sourcemap: false,
39
+ target: "esnext",
40
+ minify: production
41
+ },
42
+ define: {
43
+ BUILD_MODE: production ? JSON.stringify("prod") : JSON.stringify("dev"),
44
+ BACKEND_URL: production
45
+ ? JSON.stringify("")
46
+ : JSON.stringify("http://localhost:7860/"),
47
+ GRADIO_VERSION: JSON.stringify(version)
48
+ },
49
+ css: {
50
+ postcss: {
51
+ plugins: [
52
+ prefixer({
53
+ prefix: `.gradio-container-${version}`,
54
+ // @ts-ignore
55
+ transform(prefix, selector, prefixedSelector, fileName) {
56
+ if (selector.indexOf("gradio-container") > -1) {
57
+ return prefix;
58
+ } else if (
59
+ selector.indexOf(":root") > -1 ||
60
+ selector.indexOf("dark") > -1 ||
61
+ fileName.indexOf(".svelte") > -1
62
+ ) {
63
+ return selector;
64
+ }
65
+ return prefixedSelector;
66
+ }
67
+ }),
68
+ custom_media()
69
+ ]
70
+ }
71
+ },
72
+ plugins: [
73
+ svelte({
74
+ inspector: true,
75
+ compilerOptions: {
76
+ dev: !production
77
+ },
78
+ hot: !process.env.VITEST && !production,
79
+ preprocess: sveltePreprocess({
80
+ postcss: {
81
+ plugins: [
82
+ global_data({ files: [theme_token_path] }),
83
+ custom_media()
84
+ ]
85
+ }
86
+ })
87
+ })
88
+ ]
89
+ };
90
+ });
.config/copy_frontend.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import shutil
4
+ import pathlib
5
+ from typing import Any
6
+
7
+ from hatchling.builders.hooks.plugin.interface import BuildHookInterface
8
+
9
+
10
+ def copy_js_code(root: str | pathlib.Path):
11
+ NOT_COMPONENT = [
12
+ "app",
13
+ "node_modules",
14
+ "storybook",
15
+ "playwright-report",
16
+ "workbench",
17
+ "tooltils",
18
+ ]
19
+ for entry in (pathlib.Path(root) / "js").iterdir():
20
+ if (
21
+ entry.is_dir()
22
+ and not str(entry.name).startswith("_")
23
+ and not str(entry.name) in NOT_COMPONENT
24
+ ):
25
+
26
+ def ignore(s, names):
27
+ ignored = []
28
+ for n in names:
29
+ if (
30
+ n.startswith("CHANGELOG")
31
+ or n.startswith("README.md")
32
+ or n.startswith("node_modules")
33
+ or ".test." in n
34
+ or ".stories." in n
35
+ or ".spec." in n
36
+ ):
37
+ ignored.append(n)
38
+ return ignored
39
+ shutil.copytree(
40
+ str(entry),
41
+ str(pathlib.Path("gradio") / "_frontend_code" / entry.name),
42
+ ignore=ignore,
43
+ dirs_exist_ok=True,
44
+ )
45
+ shutil.copytree(
46
+ str(pathlib.Path(root) / "client" / "js"),
47
+ str(pathlib.Path("gradio") / "_frontend_code" / "client"),
48
+ ignore=lambda d, names: ["node_modules"],
49
+ dirs_exist_ok=True,
50
+ )
51
+
52
+
53
+ class BuildHook(BuildHookInterface):
54
+ def initialize(self, version: str, build_data: dict[str, Any]) -> None:
55
+ copy_js_code(self.root)
56
+
57
+
58
+ if __name__ == "__main__":
59
+ copy_js_code(pathlib.Path("..").resolve())
.config/eslint.config.js ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import globals from "globals";
2
+ import ts_plugin from "@typescript-eslint/eslint-plugin";
3
+ import js_plugin from "@eslint/js";
4
+
5
+ import typescriptParser from "@typescript-eslint/parser";
6
+ import sveltePlugin from "eslint-plugin-svelte";
7
+ import svelteParser from "svelte-eslint-parser";
8
+
9
+ const ts_rules_disabled = Object.fromEntries(
10
+ Object.keys(ts_plugin.rules).map((rule) => [
11
+ `@typescript-eslint/${rule}`,
12
+ "off"
13
+ ])
14
+ );
15
+ const js_rules_disabled = Object.fromEntries(
16
+ Object.keys(js_plugin.configs.all.rules).map((rule) => [rule, "off"])
17
+ );
18
+
19
+ const js_rules = {
20
+ ...js_rules_disabled,
21
+ "no-console": ["error", { allow: ["warn", "error", "debug", "info"] }],
22
+ "no-constant-condition": "error",
23
+ "no-dupe-args": "error",
24
+ "no-extra-boolean-cast": "error",
25
+ "no-unexpected-multiline": "error",
26
+ "no-unreachable": "error",
27
+ "valid-jsdoc": "error",
28
+ "array-callback-return": "error",
29
+ complexity: "error",
30
+ "no-else-return": "error",
31
+ "no-useless-return": "error",
32
+ "no-undef": "error",
33
+ "valid-jsdoc": [
34
+ "error",
35
+ {
36
+ requireReturn: false,
37
+ requireParamDescription: true,
38
+ requireReturnDescription: true,
39
+ requireReturnType: false,
40
+ requireParamType: false
41
+ }
42
+ ]
43
+ };
44
+
45
+ const ts_rules = {
46
+ ...ts_rules_disabled,
47
+ "@typescript-eslint/adjacent-overload-signatures": "error",
48
+ "@typescript-eslint/explicit-function-return-type": [
49
+ "error",
50
+ { allowExpressions: true }
51
+ ],
52
+ "@typescript-eslint/consistent-type-exports": "error",
53
+ "@typescript-eslint/ban-types": "error",
54
+ "@typescript-eslint/array-type": "error",
55
+ "@typescript-eslint/no-inferrable-types": "error"
56
+ };
57
+
58
+ const { browser, es2021, node } = globals;
59
+
60
+ export default [
61
+ {
62
+ ignores: [
63
+ ".svelte-kit/**/*",
64
+ "**/node_modules/**",
65
+ "**/dist/**",
66
+ "**/.config/*",
67
+ "**/*.spec.ts",
68
+ "**/*.test.ts",
69
+ "**/*.node-test.ts",
70
+ "js/app/test/**/*",
71
+ "**/*vite.config.ts",
72
+ "**/_website/**/*",
73
+ "**/_spaces-test/**/*",
74
+ "**/preview/test/**/*"
75
+ ]
76
+ },
77
+ {
78
+ files: ["**/*.js", "**/*.cjs"],
79
+ languageOptions: {
80
+ globals: {
81
+ ...browser,
82
+ ...es2021,
83
+ ...node
84
+ }
85
+ },
86
+
87
+ plugins: {
88
+ "eslint:recommended": js_plugin
89
+ },
90
+ rules: js_rules
91
+ },
92
+
93
+ {
94
+ files: ["**/*.ts"],
95
+ languageOptions: {
96
+ parser: typescriptParser,
97
+ parserOptions: {
98
+ project: "./tsconfig.json",
99
+ extraFileExtensions: [".svelte"]
100
+ },
101
+ globals: {
102
+ ...browser,
103
+ ...es2021,
104
+ ...node
105
+ }
106
+ },
107
+
108
+ plugins: {
109
+ "@typescript-eslint": ts_plugin,
110
+ "eslint:recommended": js_plugin
111
+ },
112
+ rules: {
113
+ ...ts_rules,
114
+ ...js_rules,
115
+ "no-undef": "off"
116
+ }
117
+ },
118
+ {
119
+ files: ["**/client/js/**"],
120
+ languageOptions: {
121
+ parserOptions: {
122
+ project: "./client/js/tsconfig.json"
123
+ }
124
+ }
125
+ },
126
+ {
127
+ files: ["**/*.svelte"],
128
+ languageOptions: {
129
+ parser: svelteParser,
130
+ parserOptions: {
131
+ parser: typescriptParser,
132
+ project: "./tsconfig.json",
133
+ extraFileExtensions: [".svelte"]
134
+ },
135
+ globals: {
136
+ ...browser,
137
+ ...es2021
138
+ }
139
+ },
140
+ plugins: {
141
+ svelte: sveltePlugin,
142
+ "@typescript-eslint": ts_plugin,
143
+ "eslint:recommended": js_plugin
144
+ },
145
+ rules: {
146
+ ...ts_rules,
147
+ ...js_rules,
148
+ ...sveltePlugin.configs.recommended.rules,
149
+ "svelte/no-at-html-tags": "off",
150
+ "no-undef": "off"
151
+ }
152
+ }
153
+ ];
.config/playwright-ct.config.ts ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig, devices } from "@playwright/experimental-ct-svelte";
2
+ import config from "./basevite.config";
3
+
4
+ /**
5
+ * See https://playwright.dev/docs/test-configuration.
6
+ */
7
+ export default defineConfig({
8
+ testDir: "../",
9
+ /* The base directory, relative to the config file, for snapshot files created with toMatchSnapshot and toHaveScreenshot. */
10
+ snapshotDir: "./__snapshots__",
11
+ /* Maximum time one test can run for. */
12
+ timeout: 10 * 1000,
13
+ /* Run tests in files in parallel */
14
+ fullyParallel: true,
15
+ /* Fail the build on CI if you accidentally left test.only in the source code. */
16
+ forbidOnly: !!process.env.CI,
17
+ /* Retry on CI only */
18
+ retries: process.env.CI ? 2 : 0,
19
+ /* Opt out of parallel tests on CI. */
20
+ workers: process.env.CI ? 1 : undefined,
21
+ /* Reporter to use. See https://playwright.dev/docs/test-reporters */
22
+ reporter: "html",
23
+ /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
24
+ use: {
25
+ /* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
26
+ trace: "on-first-retry",
27
+
28
+ /* Port to use for Playwright component endpoint. */
29
+ ctPort: 3100,
30
+ ctViteConfig: config({ mode: "development" })
31
+ },
32
+ testMatch: "*.component.spec.ts",
33
+
34
+ /* Configure projects for major browsers */
35
+ projects: [
36
+ {
37
+ name: "chromium",
38
+ use: { ...devices["Desktop Chrome"] }
39
+ }
40
+ ]
41
+ });
.config/playwright-setup.js ADDED
@@ -0,0 +1,152 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { spawn } from "node:child_process";
2
+ import { join, basename } from "path";
3
+ import { fileURLToPath } from "url";
4
+ import { readdirSync, writeFileSync } from "fs";
5
+ import net from "net";
6
+
7
+ import kl from "kleur";
8
+
9
+ const __dirname = fileURLToPath(new URL(".", import.meta.url));
10
+ const TEST_APP_PATH = join(__dirname, "./test.py");
11
+ const TEST_FILES_PATH = join(__dirname, "..", "js", "app", "test");
12
+ const ROOT = join(__dirname, "..");
13
+
14
+ const test_files = readdirSync(TEST_FILES_PATH)
15
+ .filter(
16
+ (f) =>
17
+ f.endsWith("spec.ts") &&
18
+ !f.endsWith(".skip.spec.ts") &&
19
+ !f.endsWith(".component.spec.ts")
20
+ )
21
+ .map((f) => basename(f, ".spec.ts"));
22
+
23
+ export default async function global_setup() {
24
+ const verbose = process.env.GRADIO_TEST_VERBOSE;
25
+
26
+ const port = await find_free_port(7860, 8860);
27
+ process.env.GRADIO_E2E_TEST_PORT = port;
28
+
29
+ process.stdout.write(kl.yellow("\nCreating test gradio app.\n\n"));
30
+
31
+ const test_app = make_app(test_files, port);
32
+ process.stdout.write(kl.yellow("App created. Starting test server.\n\n"));
33
+
34
+ process.stdout.write(kl.bgBlue(" =========================== \n"));
35
+ process.stdout.write(kl.bgBlue(" === PYTHON STARTUP LOGS === \n"));
36
+ process.stdout.write(kl.bgBlue(" =========================== \n\n"));
37
+
38
+ writeFileSync(TEST_APP_PATH, test_app);
39
+
40
+ const app = await spawn_gradio_app(TEST_APP_PATH, port, verbose);
41
+
42
+ process.stdout.write(
43
+ kl.green(`\n\nServer started. Running tests on port ${port}.\n`)
44
+ );
45
+
46
+ return () => {
47
+ process.stdout.write(kl.green(`\nTests complete, cleaning up!\n`));
48
+
49
+ kill_process(app);
50
+ };
51
+ }
52
+ const INFO_RE = /^INFO:/;
53
+
54
+ function spawn_gradio_app(app, port, verbose) {
55
+ const PORT_RE = new RegExp(`:${port}`);
56
+
57
+ return new Promise((res, rej) => {
58
+ const _process = spawn(`python`, [app], {
59
+ shell: true,
60
+ stdio: "pipe",
61
+ cwd: ROOT,
62
+ env: {
63
+ ...process.env,
64
+ GRADIO_SERVER_PORT: `7879`,
65
+ PYTHONUNBUFFERED: "true",
66
+ GRADIO_ANALYTICS_ENABLED: "False"
67
+ }
68
+ });
69
+ _process.stdout.setEncoding("utf8");
70
+
71
+ function std_out(data) {
72
+ const _data = data.toString();
73
+ const is_info = INFO_RE.test(_data);
74
+
75
+ if (is_info) {
76
+ process.stdout.write(kl.yellow(_data));
77
+ }
78
+
79
+ if (!is_info) {
80
+ process.stdout.write(`${_data}\n`);
81
+ }
82
+
83
+ if (PORT_RE.test(_data)) {
84
+ process.stdout.write(kl.bgBlue("\n =========== END =========== "));
85
+
86
+ res(_process);
87
+
88
+ if (!verbose) {
89
+ _process.stdout.off("data", std_out);
90
+ _process.stderr.off("data", std_out);
91
+ }
92
+ }
93
+ }
94
+
95
+ _process.stdout.on("data", std_out);
96
+ _process.stderr.on("data", std_out);
97
+ _process.on("exit", () => kill_process(_process));
98
+ _process.on("close", () => kill_process(_process));
99
+ _process.on("disconnect", () => kill_process(_process));
100
+ });
101
+ }
102
+
103
+ function kill_process(process) {
104
+ process.kill("SIGKILL");
105
+ }
106
+
107
+ function make_app(demos, port) {
108
+ return `import gradio as gr
109
+ import uvicorn
110
+ from fastapi import FastAPI
111
+ import gradio as gr
112
+ ${demos.map((d) => `from demo.${d}.run import demo as ${d}`).join("\n")}
113
+
114
+ app = FastAPI()
115
+ ${demos
116
+ .map((d) => `app = gr.mount_gradio_app(app, ${d}, path="/${d}")`)
117
+ .join("\n")}
118
+
119
+ config = uvicorn.Config(app, port=${port}, log_level="info")
120
+ server = uvicorn.Server(config=config)
121
+ server.run()`;
122
+ }
123
+
124
+ export async function find_free_port(start_port, end_port) {
125
+ for (let port = start_port; port < end_port; port++) {
126
+ if (await is_free_port(port)) {
127
+ return port;
128
+ }
129
+ }
130
+
131
+ throw new Error(
132
+ `Could not find free ports: there were not enough ports available.`
133
+ );
134
+ }
135
+
136
+ export function is_free_port(port) {
137
+ return new Promise((accept, reject) => {
138
+ const sock = net.createConnection(port, "127.0.0.1");
139
+ sock.once("connect", () => {
140
+ sock.end();
141
+ accept(false);
142
+ });
143
+ sock.once("error", (e) => {
144
+ sock.destroy();
145
+ if (e.code === "ECONNREFUSED") {
146
+ accept(true);
147
+ } else {
148
+ reject(e);
149
+ }
150
+ });
151
+ });
152
+ }
.config/playwright.config.js ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { defineConfig } from "@playwright/test";
2
+
3
+ export default defineConfig({
4
+ use: {
5
+ screenshot: "only-on-failure",
6
+ trace: "retain-on-failure",
7
+ permissions: ["clipboard-read", "clipboard-write", "microphone"],
8
+ bypassCSP: true,
9
+ launchOptions: {
10
+ args: [
11
+ "--disable-web-security",
12
+ "--use-fake-device-for-media-stream",
13
+ "--use-fake-ui-for-media-stream",
14
+ "--use-file-for-fake-audio-capture=../gradio/test_data/test_audio.wav"
15
+ ]
16
+ }
17
+ },
18
+ expect: { timeout: 60000 },
19
+ timeout: 90000,
20
+ testMatch: /.*.spec.ts/,
21
+ testDir: "..",
22
+ globalSetup: "./playwright-setup.js",
23
+ workers: process.env.CI ? 1 : undefined
24
+ });
.config/playwright/index.html ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!doctype html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8" />
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0" />
6
+ <title>Testing Page</title>
7
+ </head>
8
+ <body>
9
+ <div id="root"></div>
10
+ <script type="module" src="./index.ts"></script>
11
+ </body>
12
+ </html>
.config/playwright/index.ts ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ // Import styles, initialize component theme here.
2
+ // import '../src/common.css';
.config/postcss.config.cjs ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ const tailwindcss = require("tailwindcss");
2
+ const autoprefixer = require("autoprefixer");
3
+ const nested = require("tailwindcss/nesting");
4
+ const tw_config = require("./tailwind.config.cjs");
5
+
6
+ module.exports = {
7
+ plugins: [nested, tailwindcss(tw_config), autoprefixer]
8
+ };
.config/setup_vite_tests.ts ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { TestingLibraryMatchers } from "@testing-library/jest-dom/matchers";
2
+ import matchers from "@testing-library/jest-dom/matchers";
3
+ import { expect } from "vitest";
4
+
5
+ declare module "vitest" {
6
+ interface Assertion<T = any>
7
+ extends jest.Matchers<void, T>,
8
+ TestingLibraryMatchers<T, void> {}
9
+ }
10
+
11
+ expect.extend(matchers);
.config/tailwind.config.cjs ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ module.exports = {
2
+ content: [
3
+ "./src/**/*.{html,js,svelte,ts}",
4
+ "**/@gradio/**/*.{html,js,svelte,ts}"
5
+ ],
6
+
7
+ theme: {
8
+ extend: {}
9
+ },
10
+
11
+ plugins: [require("@tailwindcss/forms")]
12
+ };
.config/vitest.config.ts ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import config from "../js/app/vite.config";
2
+
3
+ export default config;
.devcontainer/devcontainer.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // See https://containers.dev
2
+ {
3
+ "name": "Python 3",
4
+ "image": "mcr.microsoft.com/devcontainers/python:0-3.9",
5
+
6
+ // See https://containers.dev/features
7
+ "features": {
8
+ "ghcr.io/devcontainers/features/git:1": {},
9
+ "ghcr.io/devcontainers/features/node:1": {},
10
+ "ghcr.io/devcontainers-contrib/features/ffmpeg-apt-get:1": {}
11
+ },
12
+
13
+ "hostRequirements": {
14
+ "cpus": 4,
15
+ "memory": "8gb",
16
+ "storage": "32gb"
17
+ },
18
+
19
+ "customizations": {
20
+ "vscode": {
21
+ "extensions": [
22
+ "ms-python.python",
23
+ "ms-python.vscode-pylance",
24
+ "ms-python.black-formatter",
25
+ "ms-toolsai.jupyter",
26
+ "esbenp.prettier-vscode",
27
+ "svelte.svelte-vscode",
28
+ "phoenisx.cssvar"
29
+ ],
30
+ "remote.autoForwardPorts": false
31
+ }
32
+ },
33
+
34
+ "forwardPorts": [7860, 9876],
35
+ "portsAttributes": {
36
+ "7860": { "label": "gradio port" },
37
+ "9876": { "label": "gradio dev port" }
38
+ },
39
+
40
+ "postCreateCommand": "export NODE_OPTIONS=\"--max-old-space-size=8192\" && chmod +x scripts/install_gradio.sh scripts/install_test_requirements.sh scripts/build_frontend.sh && ./scripts/install_gradio.sh && ./scripts/install_test_requirements.sh && ./scripts/build_frontend.sh"
41
+ }
.dockerignore ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python build
2
+ .eggs/
3
+ gradio.egg-info/*
4
+ !gradio.egg-info/requires.txt
5
+ !gradio.egg-info/PKG-INFO
6
+ dist/
7
+ *.pyc
8
+ __pycache__/
9
+ *.py[cod]
10
+ *$py.class
11
+ build/
12
+
13
+ # JS build
14
+ gradio/templates/frontend/static
15
+ gradio/templates/frontend/cdn
16
+
17
+ # Secrets
18
+ .env
19
+
20
+ # Gradio run artifacts
21
+ *.db
22
+ *.sqlite3
23
+ gradio/launches.json
24
+
25
+ # Tests
26
+ .coverage
27
+ coverage.xml
28
+ test.txt
29
+
30
+ # Demos
31
+ demo/tmp.zip
32
+ demo/flagged
33
+ demo/files/*.avi
34
+ demo/files/*.mp4
35
+
36
+ # Etc
37
+ .idea/*
38
+ .DS_Store
39
+ *.bak
40
+ workspace.code-workspace
.editorconfig ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+
2
+ root = true
3
+
4
+ [{js/**,client/js/**}]
5
+ end_of_line = lf
6
+ insert_final_newline = true
7
+ indent_style = tab
8
+ tab_width = 2
.git-blame-ignore-revs ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # https://github.com/gradio-app/gradio/pull/4487 - refactor components.py to separate files
2
+ 69f36f98535c904e7cac2b4942cecc747ed7443c
3
+ # Format the codebase
4
+ cc0cff893f9d7d472788adc2510c123967b384fe
5
+ # Switch from black to ruff
6
+ 8a70e83db9c7751b46058cdd2514e6bddeef6210
7
+ # format (#4810)
8
+ 7fa5e766ce0f89f1fb84c329e62c9df9c332120a
9
+ # lint website
10
+ 4bf301324b3b180fa32166ff1774312b01334c88
11
+ # format frontend with prettier
12
+ 980b9f60eb49ed81e4957debe7b23a559a4d4b51
13
+ # Refactor component directories (#5074)
14
+ 1419538ea795caa391e3de809379f10639e9e764
.gitattributes CHANGED
@@ -33,3 +33,20 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ demo/blocks_flipper/screenshot.gif filter=lfs diff=lfs merge=lfs -text
37
+ demo/blocks_neural_instrument_coding/sax.wav filter=lfs diff=lfs merge=lfs -text
38
+ demo/calculator/screenshot.gif filter=lfs diff=lfs merge=lfs -text
39
+ demo/dataset/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
40
+ demo/fake_diffusion_with_gif/image.gif filter=lfs diff=lfs merge=lfs -text
41
+ demo/hello_world_2/screenshot.gif filter=lfs diff=lfs merge=lfs -text
42
+ demo/hello_world_4/screenshot.gif filter=lfs diff=lfs merge=lfs -text
43
+ demo/image_mod/screenshot.png filter=lfs diff=lfs merge=lfs -text
44
+ demo/kitchen_sink/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
45
+ demo/sales_projections/screenshot.gif filter=lfs diff=lfs merge=lfs -text
46
+ demo/sepia_filter/screenshot.gif filter=lfs diff=lfs merge=lfs -text
47
+ demo/unispeech-speaker-verification/samples/kirsten_dunst.wav filter=lfs diff=lfs merge=lfs -text
48
+ demo/video_component/files/world.mp4 filter=lfs diff=lfs merge=lfs -text
49
+ guides/assets/hf_demo.mp4 filter=lfs diff=lfs merge=lfs -text
50
+ guides/cn/assets/hf_demo.mp4 filter=lfs diff=lfs merge=lfs -text
51
+ js/app/test/files/file_test.ogg filter=lfs diff=lfs merge=lfs -text
52
+ test/test_files/rotated_image.jpeg filter=lfs diff=lfs merge=lfs -text
.github/ISSUE_TEMPLATE/bug_report_template.yml ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "\U0001F41E Bug report"
2
+ description: Report a bug on Gradio
3
+ labels: ["bug"]
4
+ body:
5
+ - type: markdown
6
+ attributes:
7
+ value: |
8
+ Thanks for taking the time to fill out this bug report! Before you get started, please [search to see](https://github.com/gradio-app/gradio/issues) if an issue already exists for the bug you encountered
9
+ - type: textarea
10
+ id: bug-description
11
+ attributes:
12
+ label: Describe the bug
13
+ description: Please provide a concise description of what the bug is, in clear English. If you intend to submit a PR for this issue, tell us in the description.
14
+ placeholder: Bug description
15
+ validations:
16
+ required: true
17
+ - type: checkboxes
18
+ attributes:
19
+ label: Have you searched existing issues? 🔎
20
+ description: Please search to see if an issue already exists for the issue you encountered.
21
+ options:
22
+ - label: I have searched and found no existing issues
23
+ required: true
24
+ - type: textarea
25
+ id: reproduction
26
+ attributes:
27
+ label: Reproduction
28
+ description: Please provide a minimal example, with code, that can be run to reproduce the issue. Do NOT provide screenshots of code, or link to external repos or applications. Use ``` to format code blocks.
29
+ placeholder: Reproduction
30
+ value: |
31
+ ```python
32
+ import gradio as gr
33
+
34
+ ```
35
+ validations:
36
+ required: true
37
+ - type: textarea
38
+ id: screenshot
39
+ attributes:
40
+ label: Screenshot
41
+ description: If relevant, please include screenshot(s) of your Gradio app so that we can understand what the issue is.
42
+ - type: textarea
43
+ id: logs
44
+ attributes:
45
+ label: Logs
46
+ description: "Please include the full stacktrace of the errors you get from Python or Javascript. If you are running in a colab notebooks, you can get the logs with by setting `debug=True`, i.e: `gradio.Interface.launch(debug=True)`"
47
+ render: shell
48
+ - type: textarea
49
+ id: system-info
50
+ attributes:
51
+ label: System Info
52
+ description: Please ensure you are running the latest version of Gradio. You can get the Gradio version and all its dependencies by running `gradio environment`
53
+ render: shell
54
+ validations:
55
+ required: true
56
+ - type: dropdown
57
+ id: severity
58
+ attributes:
59
+ label: Severity
60
+ description: Select the severity of this issue
61
+ options:
62
+ - I can work around it
63
+ - Blocking usage of gradio
64
+ validations:
65
+ required: true
66
+ - type: markdown
67
+ attributes:
68
+ value: |
69
+ 📌 Please ensure that you have filled all of the required sections above, and that the reproduction you have provided is [minimal, complete, and reproducible](https://stackoverflow.com/help/minimal-reproducible-example). Incomplete issues will be closed.
.github/ISSUE_TEMPLATE/config.yml ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ blank_issues_enabled: false
2
+ contact_links:
3
+ - name: 💡 General questions
4
+ url: https://discord.com/invite/feTf9x3ZSB
5
+ about: Have general questions about how to use Gradio? Please ask in our community Discord for quicker responses
.github/ISSUE_TEMPLATE/feature_request.md ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ name: ⚡ Feature request
3
+ about: Suggest an improvement or new feature or a new Guide for Gradio
4
+ title: ''
5
+ labels: ''
6
+ assignees: ''
7
+
8
+ ---
9
+ - [ ] I have searched to see if a similar issue already exists.
10
+
11
+
12
+ **Is your feature request related to a problem? Please describe.**
13
+ A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
14
+
15
+ **Describe the solution you'd like**
16
+ A clear and concise description of what you want to happen.
17
+
18
+ **Additional context**
19
+ Add any other context or screenshots about the feature request here.
.github/PULL_REQUEST_TEMPLATE.md ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ## Description
2
+
3
+ Please include a concise summary, in clear English, of the changes in this pull request. If it closes an issue, please mention it here.
4
+
5
+ Closes: #(issue)
6
+
7
+ ## 🎯 PRs Should Target Issues
8
+
9
+ Before your create a PR, please check to see if there is [an existing issue](https://github.com/gradio-app/gradio/issues) for this change. If not, please create an issue before you create this PR, unless the fix is very small.
10
+
11
+ Not adhering to this guideline will result in the PR being closed.
12
+
13
+ ## Tests
14
+
15
+ 1. PRs will only be merged if tests pass on CI. To run the tests locally, please set up [your Gradio environment locally](https://github.com/gradio-app/gradio/blob/main/CONTRIBUTING.md) and run the tests: `bash scripts/run_all_tests.sh`
16
+
17
+ 2. You may need to run the linters: `bash scripts/format_backend.sh` and `bash scripts/format_frontend.sh`
18
+
.github/actions/install-all-deps/action.yml ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: 'install all deps'
2
+ description: 'Install all deps'
3
+
4
+ inputs:
5
+ always-install-pnpm:
6
+ description: 'Dictates whether or not we should install pnpm & dependencies, regardless of the cache'
7
+ default: 'false'
8
+ node_auth_token:
9
+ description: 'Node auth token'
10
+ default: ""
11
+ npm_token:
12
+ description: 'npm token'
13
+ default: ""
14
+ skip_build:
15
+ description: 'Skip build'
16
+ default: 'false'
17
+
18
+ runs:
19
+ using: "composite"
20
+ steps:
21
+ - name: Install Python
22
+ uses: actions/setup-python@v5
23
+ with:
24
+ python-version: 3.8
25
+ cache: pip
26
+ cache-dependency-path: |
27
+ client/python/requirements.txt
28
+ requirements.txt
29
+ test/requirements.txt
30
+ - name: Create env
31
+ shell: bash
32
+ run: |
33
+ python -m pip install --upgrade virtualenv
34
+ python -m virtualenv venv
35
+ - uses: actions/cache@v4
36
+ id: cache
37
+ with:
38
+ path: |
39
+ venv/*
40
+ key: gradio-lib-ubuntu-latest-pip-${{ hashFiles('client/python/requirements.txt') }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('test/requirements.txt') }}
41
+ - name: Install Gradio and Client Libraries Locally (Linux)
42
+ shell: bash
43
+ run: |
44
+ . venv/bin/activate
45
+ python -m pip install -e client/python
46
+ python -m pip install -e .
47
+ - name: Install ffmpeg
48
+ uses: FedericoCarboni/setup-ffmpeg@v2
49
+ - name: install-frontend
50
+ uses: "./.github/actions/install-frontend-deps"
51
+ with:
52
+ always-install-pnpm: ${{ inputs.always-install-pnpm }}
53
+ node_auth_token: ${{ inputs.node_auth_token }}
54
+ npm_token: ${{ inputs.npm_token }}
55
+ skip_build: ${{ inputs.skip_build }}
56
+ - name: generate json
57
+ shell: bash
58
+ run: |
59
+ . venv/bin/activate
60
+ python js/_website/generate_jsons/generate.py
.github/actions/install-frontend-deps/action.yml ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: 'install frontend'
2
+ description: 'Install frontend deps'
3
+
4
+ inputs:
5
+ always-install-pnpm:
6
+ description: 'Dictates whether or not we should install pnpm & dependencies, regardless of the cache'
7
+ default: 'false'
8
+ node_auth_token:
9
+ description: 'Node auth token'
10
+ default: ""
11
+ npm_token:
12
+ description: 'npm token'
13
+ default: ""
14
+ skip_build:
15
+ description: 'Skip build'
16
+ default: 'false'
17
+
18
+ runs:
19
+ using: "composite"
20
+ steps:
21
+ - uses: actions/cache@v4
22
+ id: frontend-cache
23
+ with:
24
+ path: |
25
+ gradio/templates/*
26
+ key: gradio-lib-front-end-${{ hashFiles('js/**', 'client/js/**')}}
27
+ - name: Install pnpm
28
+ if: steps.frontend-cache.outputs.cache-hit != 'true' || inputs.always-install-pnpm == 'true'
29
+ uses: pnpm/action-setup@v2
30
+ with:
31
+ version: 8.9
32
+ - uses: actions/setup-node@v4
33
+ with:
34
+ node-version: 18
35
+ cache: pnpm
36
+ cache-dependency-path: pnpm-lock.yaml
37
+ env:
38
+ NODE_AUTH_TOKEN: ${{ inputs.always-install-pnpm }}
39
+ NPM_TOKEN: ${{ inputs.always-install-pnpm }}
40
+ - name: Install deps
41
+ if: steps.frontend-cache.outputs.cache-hit != 'true' || inputs.always-install-pnpm == 'true'
42
+ shell: bash
43
+ run: pnpm i --frozen-lockfile --ignore-scripts
44
+ - name: Build Css
45
+ if: inputs.always-install-pnpm == 'true'
46
+ shell: bash
47
+ run: pnpm css
48
+ - name: Build frontend
49
+ if: inputs.skip_build == 'false' && steps.frontend-cache.outputs.cache-hit != 'true'
50
+ shell: bash
51
+ run: pnpm build
.github/stale ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Number of days of inactivity before an issue becomes stale
2
+ daysUntilStale: 30
3
+ # Number of days of inactivity before a stale issue is closed
4
+ daysUntilClose: 7
5
+ # Issues with these labels will never be considered stale
6
+ exemptLabels:
7
+ - pinned
8
+ - security
9
+ # Label to use when marking an issue as stale
10
+ staleLabel: wontfix
11
+ # Comment to post when marking an issue as stale. Set to `false` to disable
12
+ markComment: >
13
+ This issue has been automatically marked as stale because it has not had
14
+ recent activity. It will be closed if no further activity occurs. Thank you
15
+ for your contributions.
16
+ # Comment to post when closing a stale issue. Set to `false` to disable
17
+ closeComment: false
.github/workflows/backend.yml ADDED
@@ -0,0 +1,244 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: gradio-backend
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - "main"
7
+ pull_request:
8
+ types: [opened, synchronize, reopened, labeled, unlabeled]
9
+
10
+ concurrency:
11
+ group: backend-${{ github.ref }}-${{ github.event_name == 'push' || github.event.inputs.fire != null }}
12
+ cancel-in-progress: true
13
+
14
+ env:
15
+ NODE_OPTIONS: "--max-old-space-size=4096"
16
+ HF_TOKEN: ${{ secrets.HF_TOKEN }}
17
+
18
+ jobs:
19
+ changes:
20
+ runs-on: ubuntu-latest
21
+ outputs:
22
+ python-client: ${{ steps.changes.outputs.python-client }}
23
+ gradio: ${{ steps.changes.outputs.gradio }}
24
+ test: ${{ steps.changes.outputs.test }}
25
+ workflows: ${{ steps.changes.outputs.workflows }}
26
+ scripts: ${{ steps.changes.outputs.scripts }}
27
+ client-scripts: ${{ steps.changes.outputs.client-scripts }}
28
+ steps:
29
+ - uses: actions/checkout@v3
30
+ - uses: dorny/paths-filter@v2
31
+ id: changes
32
+ with:
33
+ filters: |
34
+ python-client:
35
+ - 'client/python/**'
36
+ - 'gradio/**'
37
+ - 'requirements.txt'
38
+ gradio:
39
+ - 'client/python/**'
40
+ - 'gradio/**'
41
+ - 'requirements.txt'
42
+ test:
43
+ - 'test/**'
44
+ workflows:
45
+ - '.github/**'
46
+ scripts:
47
+ - 'scripts/**'
48
+ client-test:
49
+ needs: [changes]
50
+ if: needs.changes.outputs.python-client == 'true' || needs.changes.outputs.workflows == 'true'
51
+ strategy:
52
+ matrix:
53
+ os: ["ubuntu-latest", "windows-latest"]
54
+ test-type: ["not flaky", "flaky"]
55
+ python-version: ["3.8"]
56
+ exclude:
57
+ - os: ${{ github.event_name == 'pull_request' && contains( github.event.pull_request.labels.*.name, 'windows-tests') && 'dummy' || 'windows-latest' }}
58
+ - test-type: ${{ github.event_name == 'pull_request' && contains( github.event.pull_request.labels.*.name, 'flaky-tests') && 'dummy' || 'flaky' }}
59
+ runs-on: ${{ matrix.os }}
60
+ continue-on-error: true
61
+ steps:
62
+ - uses: actions/checkout@v3
63
+ - name: Install Python
64
+ uses: actions/setup-python@v5
65
+ with:
66
+ python-version: ${{ matrix.python-version }}
67
+ cache: pip
68
+ cache-dependency-path: |
69
+ client/python/requirements.txt
70
+ requirements.txt
71
+ test/requirements.txt
72
+ - name: Create env
73
+ run: |
74
+ python -m pip install --upgrade virtualenv
75
+ python -m virtualenv venv
76
+ - uses: actions/cache@master
77
+ id: cache
78
+ with:
79
+ path: |
80
+ client/python/venv/*
81
+ key: python-client-${{ runner.os }}-pip-${{ hashFiles('client/python/requirements.txt') }}-${{ hashFiles('client/python/test/requirements.txt') }}
82
+ - uses: actions/cache@v4
83
+ id: frontend-cache
84
+ with:
85
+ path: |
86
+ gradio/templates/*
87
+ key: gradio-lib-front-end-${{ hashFiles('js/**', 'client/js/**')}}
88
+ - name: Install pnpm
89
+ if: steps.frontend-cache.outputs.cache-hit != 'true'
90
+ uses: pnpm/action-setup@v2
91
+ with:
92
+ version: 8
93
+ - uses: actions/setup-node@v4
94
+ if: steps.frontend-cache.outputs.cache-hit != 'true'
95
+ with:
96
+ node-version: 18
97
+ cache: pnpm
98
+ cache-dependency-path: pnpm-lock.yaml
99
+ - name: Build Frontend
100
+ if: steps.frontend-cache.outputs.cache-hit != 'true'
101
+ run: |
102
+ pnpm i --frozen-lockfile --ignore-scripts
103
+ pnpm build
104
+ - name: Install Test Requirements (Linux)
105
+ if: runner.os == 'Linux'
106
+ run: |
107
+ . venv/bin/activate
108
+ python -m pip install -r client/python/test/requirements.txt
109
+ - name: Install ffmpeg
110
+ uses: FedericoCarboni/setup-ffmpeg@v2
111
+ - name: Install Gradio and Client Libraries Locally (Linux)
112
+ if: runner.os == 'Linux'
113
+ run: |
114
+ . venv/bin/activate
115
+ python -m pip install client/python
116
+ python -m pip install ".[oauth]"
117
+ - name: Lint (Linux)
118
+ if: runner.os == 'Linux'
119
+ run: |
120
+ . venv/bin/activate
121
+ bash client/python/scripts/lint.sh
122
+ - name: Tests (Linux)
123
+ if: runner.os == 'Linux'
124
+ run: |
125
+ . venv/bin/activate
126
+ python -m pytest -m "${{ matrix.test-type }}" client/python/
127
+ - name: Install Test Requirements (Windows)
128
+ if: runner.os == 'Windows'
129
+ run: |
130
+ venv\Scripts\activate
131
+ pip install -r client/python/test/requirements.txt
132
+ - name: Install Gradio and Client Libraries Locally (Windows)
133
+ if: runner.os == 'Windows'
134
+ run: |
135
+ venv\Scripts\activate
136
+ python -m pip install client/python
137
+ python -m pip install ".[oauth]"
138
+ - name: Tests (Windows)
139
+ if: runner.os == 'Windows'
140
+ run: |
141
+ venv\Scripts\activate
142
+ python -m pytest -m "${{ matrix.test-type }}" client/python/
143
+ test:
144
+ needs: [changes]
145
+ if: needs.changes.outputs.gradio == 'true' || needs.changes.outputs.workflows == 'true' || needs.changes.outputs.scripts == 'true' || needs.changes.outputs.test == 'true'
146
+ strategy:
147
+ matrix:
148
+ os: ["ubuntu-latest", "windows-latest"]
149
+ test-type: ["not flaky", "flaky"]
150
+ python-version: ["3.8"]
151
+ exclude:
152
+ - os: ${{ github.event_name == 'pull_request' && contains( github.event.pull_request.labels.*.name, 'windows-tests') && 'dummy' || 'windows-latest' }}
153
+ - test-type: ${{ github.event_name == 'pull_request' && contains( github.event.pull_request.labels.*.name, 'flaky-tests') && 'dummy' || 'flaky' }}
154
+ runs-on: ${{ matrix.os }}
155
+ continue-on-error: true
156
+ steps:
157
+ - uses: actions/checkout@v3
158
+ - name: Install Python
159
+ uses: actions/setup-python@v5
160
+ with:
161
+ python-version: ${{ matrix.python-version }}
162
+ cache: pip
163
+ cache-dependency-path: |
164
+ client/python/requirements.txt
165
+ requirements.txt
166
+ test/requirements.txt
167
+ - name: Create env
168
+ run: |
169
+ python -m pip install --upgrade virtualenv
170
+ python -m virtualenv venv
171
+ - uses: actions/cache@v4
172
+ id: cache
173
+ with:
174
+ path: |
175
+ venv/*
176
+ key: gradio-lib-${{ runner.os }}-pip-${{ hashFiles('client/python/requirements.txt') }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('test/requirements.txt') }}
177
+ - uses: actions/cache@v4
178
+ id: frontend-cache
179
+ with:
180
+ path: |
181
+ gradio/templates/*
182
+ gradio/node/*
183
+ key: gradio-lib-front-end-${{ hashFiles('js/**', 'client/js/**')}}
184
+ - name: Install pnpm
185
+ if: steps.frontend-cache.outputs.cache-hit != 'true'
186
+ uses: pnpm/action-setup@v2
187
+ with:
188
+ version: 8
189
+ - uses: actions/setup-node@v4
190
+ if: steps.frontend-cache.outputs.cache-hit != 'true'
191
+ with:
192
+ node-version: 18
193
+ cache: pnpm
194
+ cache-dependency-path: pnpm-lock.yaml
195
+ - name: Build frontend
196
+ if: steps.frontend-cache.outputs.cache-hit != 'true'
197
+ run: |
198
+ pnpm i --frozen-lockfile --ignore-scripts
199
+ pnpm build
200
+ - name: Install Gradio and Client Libraries Locally (Linux)
201
+ if: runner.os == 'Linux'
202
+ run: |
203
+ . venv/bin/activate
204
+ python -m pip install client/python
205
+ python -m pip install ".[oauth]"
206
+ - name: Install Test Dependencies (Linux)
207
+ if: steps.cache.outputs.cache-hit != 'true' && runner.os == 'Linux'
208
+ run: |
209
+ . venv/bin/activate
210
+ bash scripts/install_test_requirements.sh
211
+ - name: Install ffmpeg
212
+ uses: FedericoCarboni/setup-ffmpeg@v2
213
+ - name: Lint (Linux)
214
+ if: runner.os == 'Linux'
215
+ run: |
216
+ . venv/bin/activate
217
+ bash scripts/lint_backend.sh
218
+ - name: Typecheck (Linux)
219
+ if: runner.os == 'Linux'
220
+ run: |
221
+ . venv/bin/activate
222
+ bash scripts/type_check_backend.sh
223
+ - name: Run tests (Linux)
224
+ if: runner.os == 'Linux'
225
+ run: |
226
+ . venv/bin/activate
227
+ python -m pytest -m "${{ matrix.test-type }}" --ignore=client
228
+ - name: Install Gradio and Client Libraries Locally (Windows)
229
+ if: runner.os == 'Windows'
230
+ run: |
231
+ venv\Scripts\activate
232
+ python -m pip install client/python
233
+ python -m pip install ".[oauth]"
234
+ - name: Install Test Dependencies (Windows)
235
+ if: steps.cache.outputs.cache-hit != 'true' && runner.os == 'Windows'
236
+ run: |
237
+ venv\Scripts\activate
238
+ python -m pip install . -r test/requirements.txt
239
+ - name: Run tests (Windows)
240
+ if: runner.os == 'Windows'
241
+ run: |
242
+ venv\Scripts\activate
243
+ python -m pytest -m "${{ matrix.test-type }}" --ignore=client
244
+ echo "The exit code for pytest was $LASTEXITCODE"
.github/workflows/build-pr.yml ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Build PR Artifacts
2
+
3
+ on:
4
+ workflow_dispatch:
5
+ pull_request:
6
+ branches:
7
+ - main
8
+
9
+ jobs:
10
+ comment-spaces-start:
11
+ uses: "./.github/workflows/comment-queue.yml"
12
+ secrets:
13
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
14
+ with:
15
+ pr_number: ${{ github.event.pull_request.number }}
16
+ message: spaces~pending~null
17
+ build_pr:
18
+ runs-on: ubuntu-latest
19
+ steps:
20
+ - uses: actions/checkout@v3
21
+ - name: Install Python
22
+ uses: actions/setup-python@v5
23
+ with:
24
+ python-version: '3.9'
25
+ - name: Install pnpm
26
+ uses: pnpm/action-setup@v2
27
+ with:
28
+ version: 8
29
+ - uses: actions/setup-node@v4
30
+ with:
31
+ node-version: 18
32
+ cache: pnpm
33
+ cache-dependency-path: pnpm-lock.yaml
34
+ - name: Install pip
35
+ run: python -m pip install build requests
36
+ - name: Get PR Number
37
+ id: get_pr_number
38
+ run: |
39
+ if ${{ github.event_name == 'pull_request' }}; then
40
+ echo "GRADIO_VERSION=$(python -c 'import requests;print(requests.get("https://pypi.org/pypi/gradio/json").json()["info"]["version"])')" >> $GITHUB_OUTPUT
41
+ python -c "import os;print(os.environ['GITHUB_REF'].split('/')[2])" > pr_number.txt
42
+ echo "PR_NUMBER=$(cat pr_number.txt)" >> $GITHUB_OUTPUT
43
+ else
44
+ echo "GRADIO_VERSION=$(python -c 'import json; print(json.load(open("gradio/package.json"))["version"])')" >> $GITHUB_OUTPUT
45
+ echo "PR_NUMBER='main'" >> $GITHUB_OUTPUT
46
+ fi
47
+ - name: Build pr package
48
+ run: |
49
+ python -c 'import json; j = json.load(open("gradio/package.json")); j["version"] = "${{ steps.get_pr_number.outputs.GRADIO_VERSION }}"; json.dump(j, open("gradio/package.json", "w"))'
50
+ pnpm i --frozen-lockfile --ignore-scripts
51
+ pnpm build
52
+ python3 -m build -w
53
+ env:
54
+ NODE_OPTIONS: --max_old_space_size=8192
55
+ - name: Upload wheel
56
+ uses: actions/upload-artifact@v4
57
+ with:
58
+ name: gradio-${{ steps.get_pr_number.outputs.GRADIO_VERSION }}-py3-none-any.whl
59
+ path: dist/gradio-${{ steps.get_pr_number.outputs.GRADIO_VERSION }}-py3-none-any.whl
60
+ - name: Set up Demos
61
+ run: |
62
+ python scripts/copy_demos.py https://gradio-builds.s3.amazonaws.com/${{ github.sha }}/gradio-${{ steps.get_pr_number.outputs.GRADIO_VERSION }}-py3-none-any.whl \
63
+ "gradio-client @ git+https://github.com/gradio-app/gradio@${{ github.sha }}#subdirectory=client/python"
64
+ - name: Upload all_demos
65
+ uses: actions/upload-artifact@v4
66
+ with:
67
+ name: all_demos
68
+ path: demo/all_demos
69
+ - name: Create metadata artifact
70
+ run: |
71
+ python -c "import json; json.dump({'gh_sha': '${{ github.sha }}', 'pr_number': ${{ steps.get_pr_number.outputs.pr_number }}, 'version': '${{ steps.get_pr_number.outputs.GRADIO_VERSION }}', 'wheel': 'gradio-${{ steps.get_pr_number.outputs.GRADIO_VERSION }}-py3-none-any.whl'}, open('metadata.json', 'w'))"
72
+ - name: Upload metadata
73
+ uses: actions/upload-artifact@v4
74
+ with:
75
+ name: metadata.json
76
+ path: metadata.json
.github/workflows/check-demo-notebooks.yml ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This workflow will check if the run.py files in every demo match the run.ipynb notebooks.
2
+
3
+ name: Check Demos Match Notebooks
4
+
5
+ on:
6
+ pull_request:
7
+ types: [opened, synchronize, reopened]
8
+ paths:
9
+ - 'demo/**'
10
+
11
+ jobs:
12
+ comment-notebook-start:
13
+ uses: "./.github/workflows/comment-queue.yml"
14
+ secrets:
15
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
16
+ with:
17
+ pr_number: ${{ github.event.pull_request.number }}
18
+ message: notebooks~pending~null
19
+ check-notebooks:
20
+ name: Generate Notebooks and Check
21
+ runs-on: ubuntu-latest
22
+ steps:
23
+ - name: Checkout
24
+ uses: actions/checkout@v3
25
+ with:
26
+ ref: ${{ github.event.pull_request.head.ref }}
27
+ repository: ${{ github.event.pull_request.head.repo.full_name }}
28
+ - name: Generate Notebooks
29
+ run: |
30
+ pip install nbformat && cd demo && python generate_notebooks.py
31
+ - name: Print Git Status
32
+ run: echo $(git status) && echo $(git diff)
33
+ - name: Assert Notebooks Match
34
+ id: assertNotebooksMatch
35
+ run: git status | grep "nothing to commit, working tree clean"
36
+ - name: Get PR Number
37
+ if: always()
38
+ run: |
39
+ python -c "import os;print(os.environ['GITHUB_REF'].split('/')[2])" > pr_number.txt
40
+ echo "PR_NUMBER=$(cat pr_number.txt)" >> $GITHUB_ENV
41
+ - name: Upload PR Number
42
+ if: always()
43
+ run: |
44
+ python -c "import json; json.dump({'pr_number': ${{ env.PR_NUMBER }}}, open('metadata.json', 'w'))"
45
+ - name: Upload metadata
46
+ if: always()
47
+ uses: actions/upload-artifact@v4
48
+ with:
49
+ name: metadata.json
50
+ path: metadata.json
.github/workflows/comment-queue.yml ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Comment on pull request without race conditions
2
+
3
+ on:
4
+ workflow_call:
5
+ inputs:
6
+ pr_number:
7
+ type: string
8
+ message:
9
+ required: true
10
+ type: string
11
+ tag:
12
+ required: false
13
+ type: string
14
+ default: "previews"
15
+ additional_text:
16
+ required: false
17
+ type: string
18
+ default: ""
19
+ secrets:
20
+ gh_token:
21
+ required: true
22
+
23
+ jobs:
24
+ comment:
25
+ concurrency:
26
+ group: ${{inputs.pr_number || inputs.tag}}
27
+ runs-on: ubuntu-latest
28
+ steps:
29
+ - name: comment on pr
30
+ uses: "gradio-app/github/actions/comment-pr@main"
31
+ with:
32
+ gh_token: ${{ secrets.gh_token }}
33
+ tag: ${{ inputs.tag }}
34
+ pr_number: ${{ inputs.pr_number}}
35
+ message: ${{ inputs.message }}
36
+ additional_text: ${{ inputs.additional_text }}
.github/workflows/delete-stale-spaces.yml ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Delete Stale Spaces
2
+
3
+ on:
4
+ schedule:
5
+ - cron: '0 0 * * *'
6
+ workflow_dispatch:
7
+ inputs:
8
+ daysStale:
9
+ description: 'How stale a space needs to be to be deleted (days)'
10
+ required: true
11
+ default: '7'
12
+
13
+
14
+ jobs:
15
+ delete-old-spaces:
16
+ runs-on: ubuntu-latest
17
+ steps:
18
+ - uses: actions/checkout@v3
19
+ - name: Install Python
20
+ uses: actions/setup-python@v5
21
+ with:
22
+ python-version: '3.9'
23
+ - name: Install pip
24
+ run: python -m pip install pip wheel requests
25
+ - name: Install Hub Client Library
26
+ run: pip install huggingface-hub==0.9.1
27
+ - name: Set daysStale
28
+ env:
29
+ DEFAULT_DAYS_STALE: '7'
30
+ run: echo "DAYS_STALE=${{ github.event.inputs.daysStale || env.DEFAULT_DAYS_STALE }}" >> $GITHUB_ENV
31
+ - name: Find and delete stale spaces
32
+ run: |
33
+ python scripts/delete_old_spaces.py $DAYS_STALE \
34
+ gradio-pr-deploys \
35
+ ${{ secrets.SPACES_DEPLOY_TOKEN }}
.github/workflows/deploy-chromatic.yml ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: 'deploy to chromatic'
2
+
3
+ on:
4
+ push:
5
+ paths:
6
+ - 'js/**'
7
+ - 'gradio/themes/**'
8
+ - '.github/workflows/deploy-chromatic.yml'
9
+ - '!js/_website/**'
10
+
11
+
12
+ jobs:
13
+ get-current-pr:
14
+ runs-on: ubuntu-latest
15
+ outputs:
16
+ pr_found: ${{ steps.get-pr.outputs.pr_found }}
17
+ pr_number: ${{ steps.get-pr.outputs.number }}
18
+ pr_labels: ${{ steps.get-pr.outputs.pr_labels }}
19
+ steps:
20
+ - uses: 8BitJonny/gh-get-current-pr@2.2.0
21
+ id: get-pr
22
+ with:
23
+ filterOutDraft: true
24
+ comment-chromatic-start:
25
+ uses: "./.github/workflows/comment-queue.yml"
26
+ needs: get-current-pr
27
+ secrets:
28
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
29
+ with:
30
+ pr_number: ${{ needs.get-current-pr.outputs.pr_number }}
31
+ message: |
32
+ storybook~pending~null
33
+ visual~pending~0~0~null
34
+ chromatic-deployment:
35
+ needs: get-current-pr
36
+ runs-on: ubuntu-latest
37
+ outputs:
38
+ changes: ${{ steps.publish-chromatic.outputs.changeCount }}
39
+ errors: ${{ steps.publish-chromatic.outputs.errorCount }}
40
+ storybook_url: ${{ steps.publish-chromatic.outputs.storybookUrl }}
41
+ build_url: ${{ steps.publish-chromatic.outputs.buildUrl }}
42
+ if: ${{ github.repository == 'gradio-app/gradio' && !contains(needs.get-current-pr.outputs.pr_labels, 'no-visual-update') }}
43
+ steps:
44
+ - uses: actions/checkout@v3
45
+ with:
46
+ fetch-depth: 0
47
+ - name: install dependencies
48
+ uses: "./.github/actions/install-all-deps"
49
+ with:
50
+ always-install-pnpm: true
51
+ skip_build: 'true'
52
+ - name: build client
53
+ run: pnpm --filter @gradio/client build
54
+ - name: generate theme.css
55
+ run: |
56
+ . venv/bin/activate
57
+ python scripts/generate_theme.py --outfile js/storybook/theme.css
58
+ - name: build storybook
59
+ run: pnpm build-storybook --quiet
60
+ - name: publish to chromatic
61
+ id: publish-chromatic
62
+ uses: chromaui/action@v10
63
+ with:
64
+ projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
65
+ token: ${{ secrets.GITHUB_TOKEN }}
66
+ onlyChanged: true
67
+ exitOnceUploaded: true
68
+ comment-chromatic-end:
69
+ uses: "./.github/workflows/comment-queue.yml"
70
+ needs: [chromatic-deployment, get-current-pr]
71
+ secrets:
72
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
73
+ with:
74
+ pr_number: ${{ needs.get-current-pr.outputs.pr_number }}
75
+ message: |
76
+ storybook~success~${{ needs.chromatic-deployment.outputs.storybook_url }}
77
+ visual~success~${{ needs.chromatic-deployment.outputs.changes }}~${{ needs.chromatic-deployment.outputs.errors }}~${{ needs.chromatic-deployment.outputs.build_url }}
78
+ comment-chromatic-fail:
79
+ uses: "./.github/workflows/comment-queue.yml"
80
+ needs: [chromatic-deployment, get-current-pr]
81
+ if: always() && needs.chromatic-deployment.result == 'failure'
82
+ secrets:
83
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
84
+ with:
85
+ pr_number: ${{ needs.get-current-pr.outputs.pr_number }}
86
+ message: |
87
+ storybook~failure~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
88
+ visual~failure~0~0~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
.github/workflows/deploy-pr-to-spaces.yml ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Deploy PR to Spaces
2
+
3
+ on:
4
+ workflow_run:
5
+ workflows: [Build PR Artifacts]
6
+ types:
7
+ - completed
8
+
9
+ jobs:
10
+ deploy-current-pr:
11
+ outputs:
12
+ pr_number: ${{ steps.set-outputs.outputs.pr_number }}
13
+ space_url: ${{ steps.upload-demo.outputs.SPACE_URL }}
14
+ sha: ${{ steps.set-outputs.outputs.gh_sha }}
15
+ gradio_version: ${{ steps.set-outputs.outputs.gradio_version }}
16
+ runs-on: ubuntu-latest
17
+ steps:
18
+ - uses: actions/checkout@v3
19
+ - name: Install Python
20
+ uses: actions/setup-python@v5
21
+ with:
22
+ python-version: '3.9'
23
+ - name: Install pip
24
+ run: python -m pip install build requests
25
+ - name: Download metadata
26
+ run: python scripts/download_artifacts.py ${{github.event.workflow_run.id }} metadata.json ${{ secrets.COMMENT_TOKEN }} --owner ${{ github.repository_owner }}
27
+ - run: unzip metadata.json.zip
28
+ - name: set outputs
29
+ id: set-outputs
30
+ run: |
31
+ echo "wheel_name=$(python -c 'import json; print(json.load(open("metadata.json"))["wheel"])')" >> $GITHUB_OUTPUT
32
+ echo "gh_sha=$(python -c 'import json; print(json.load(open("metadata.json"))["gh_sha"])')" >> $GITHUB_OUTPUT
33
+ echo "gradio_version=$(python -c 'import json; print(json.load(open("metadata.json"))["version"])')" >> $GITHUB_OUTPUT
34
+ echo "pr_number=$(python -c 'import json; print(json.load(open("metadata.json"))["pr_number"])')" >> $GITHUB_OUTPUT
35
+ - name: 'Download wheel'
36
+ run: python scripts/download_artifacts.py ${{ github.event.workflow_run.id }} ${{ steps.set-outputs.outputs.wheel_name }} ${{ secrets.COMMENT_TOKEN }} --owner ${{ github.repository_owner }}
37
+ - run: unzip ${{ steps.set-outputs.outputs.wheel_name }}.zip
38
+ - name: Upload wheel
39
+ run: |
40
+ export AWS_ACCESS_KEY_ID=${{ secrets.PR_DEPLOY_KEY }}
41
+ export AWS_SECRET_ACCESS_KEY=${{ secrets.PR_DEPLOY_SECRET }}
42
+ export AWS_DEFAULT_REGION=us-east-1
43
+ aws s3 cp ${{ steps.set-outputs.outputs.wheel_name }} s3://gradio-builds/${{ steps.set-outputs.outputs.gh_sha }}/
44
+ - name: Install Hub Client Library
45
+ run: pip install huggingface-hub
46
+ - name: 'Download all_demos'
47
+ run: python scripts/download_artifacts.py ${{ github.event.workflow_run.id }} all_demos ${{ secrets.COMMENT_TOKEN }} --owner ${{ github.repository_owner }}
48
+ - run: unzip all_demos.zip -d all_demos
49
+ - run: cp -R all_demos/* demo/all_demos
50
+ - name: Upload demo to spaces
51
+ if: github.event.workflow_run.event == 'pull_request'
52
+ id: upload-demo
53
+ run: |
54
+ python scripts/upload_demo_to_space.py all_demos \
55
+ gradio-pr-deploys/pr-${{ steps.set-outputs.outputs.pr_number }}-all-demos \
56
+ ${{ secrets.SPACES_DEPLOY_TOKEN }} \
57
+ --gradio-version ${{ steps.set-outputs.outputs.gradio_version }} > url.txt
58
+ echo "SPACE_URL=$(cat url.txt)" >> $GITHUB_OUTPUT
59
+ - name: Upload Website Demos
60
+ if: >
61
+ github.event.workflow_run.event == 'workflow_dispatch' &&
62
+ github.event.workflow_run.conclusion == 'success'
63
+ id: upload-website-demos
64
+ run: |
65
+ python scripts/upload_website_demos.py --AUTH_TOKEN ${{ secrets.WEBSITE_SPACES_DEPLOY_TOKEN }} \
66
+ --WHEEL_URL https://gradio-builds.s3.amazonaws.com/${{ steps.set-outputs.outputs.gh_sha }}/ \
67
+ --GRADIO_VERSION ${{ steps.set-outputs.outputs.gradio_version }}
68
+
69
+ comment-spaces-success:
70
+ uses: "./.github/workflows/comment-queue.yml"
71
+ needs: [deploy-current-pr]
72
+ if: >
73
+ github.event.workflow_run.event == 'pull_request' &&
74
+ github.event.workflow_run.conclusion == 'success' &&
75
+ needs.deploy-current-pr.result == 'success'
76
+ secrets:
77
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
78
+ with:
79
+ pr_number: ${{ needs.deploy-current-pr.outputs.pr_number }}
80
+ message: spaces~success~${{ needs.deploy-current-pr.outputs.space_url }}
81
+ additional_text: |
82
+ **Install Gradio from this PR**
83
+ ```bash
84
+ pip install https://gradio-builds.s3.amazonaws.com/${{ needs.deploy-current-pr.outputs.sha }}/gradio-${{ needs.deploy-current-pr.outputs.gradio_version }}-py3-none-any.whl
85
+ ```
86
+
87
+ **Install Gradio Python Client from this PR**
88
+ ```bash
89
+ pip install "gradio-client @ git+https://github.com/gradio-app/gradio@${{ needs.deploy-current-pr.outputs.sha }}#subdirectory=client/python"
90
+ ```
91
+ comment-spaces-failure:
92
+ uses: "./.github/workflows/comment-queue.yml"
93
+ needs: [deploy-current-pr]
94
+ if: always() && needs.deploy-current-pr.result == 'failure'
95
+ secrets:
96
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
97
+ with:
98
+ pr_number: ${{ needs.deploy-current-pr.outputs.pr_number }}
99
+ message: spaces~failure~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
.github/workflows/deploy-website.yml ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: "deploy website"
2
+
3
+ on:
4
+ workflow_call:
5
+ inputs:
6
+ branch_name:
7
+ description: "The branch name"
8
+ type: string
9
+ pr_number:
10
+ description: "The PR number"
11
+ type: string
12
+ secrets:
13
+ vercel_token:
14
+ description: "Vercel API token"
15
+ gh_token:
16
+ description: "Github token"
17
+ required: true
18
+ vercel_org_id:
19
+ description: "Vercel organization ID"
20
+ required: true
21
+ vercel_project_id:
22
+ description: "Vercel project ID"
23
+ required: true
24
+
25
+ env:
26
+ VERCEL_ORG_ID: ${{ secrets.vercel_org_id }}
27
+ VERCEL_PROJECT_ID: ${{ secrets.vercel_project_id }}
28
+
29
+ jobs:
30
+ comment-deploy-start:
31
+ uses: "./.github/workflows/comment-queue.yml"
32
+ secrets:
33
+ gh_token: ${{ secrets.gh_token }}
34
+ with:
35
+ pr_number: ${{ inputs.pr_number }}
36
+ message: website~pending~null
37
+ deploy:
38
+ name: "Deploy website"
39
+ runs-on: ubuntu-latest
40
+ outputs:
41
+ vercel_url: ${{ steps.output_url.outputs.vercel_url }}
42
+ steps:
43
+ - uses: actions/checkout@v3
44
+ - name: install dependencies
45
+ uses: "./.github/actions/install-frontend-deps"
46
+ with:
47
+ always-install-pnpm: true
48
+ skip_build: true
49
+ - name: download artifacts
50
+ uses: actions/download-artifact@v4
51
+ with:
52
+ name: website-json-${{ inputs.pr_number }}
53
+ path: |
54
+ ./js/_website/src/lib/json
55
+ - name: echo artifact path
56
+ shell: bash
57
+ run: ls ./js/_website/src/lib/json
58
+ - name: Install Vercel CLI
59
+ shell: bash
60
+ run: pnpm install --global vercel@latest
61
+ # preview
62
+ - name: Pull Vercel Environment Information
63
+ shell: bash
64
+ if: github.event_name == 'pull_request'
65
+ run: vercel pull --yes --environment=preview --token=${{ secrets.vercel_token }}
66
+ - name: Build Project Artifacts
67
+ if: github.event_name == 'pull_request'
68
+ shell: bash
69
+ run: vercel build --token=${{ secrets.vercel_token }}
70
+ - name: Deploy Project Artifacts to Vercel
71
+ if: github.event_name == 'pull_request'
72
+ id: output_url
73
+ shell: bash
74
+ run: echo "vercel_url=$(vercel deploy --prebuilt --token=${{ secrets.vercel_token }})" >> $GITHUB_OUTPUT
75
+ # production
76
+ - name: Pull Vercel Environment Information
77
+ if: github.event_name == 'push' && inputs.branch_name == 'main'
78
+ shell: bash
79
+ run: vercel pull --yes --environment=production --token=${{ secrets.vercel_token }}
80
+ - name: Build Project Artifacts
81
+ if: github.event_name == 'push' && inputs.branch_name == 'main'
82
+ shell: bash
83
+ run: vercel build --prod --token=${{ secrets.vercel_token }}
84
+ - name: Deploy Project Artifacts to Vercel
85
+ if: github.event_name == 'push' && inputs.branch_name == 'main'
86
+ shell: bash
87
+ run: echo "VERCEL_URL=$(vercel deploy --prebuilt --prod --token=${{ secrets.vercel_token }})" >> $GITHUB_ENV
88
+ - name: echo vercel url
89
+ shell: bash
90
+ run: echo $VERCEL_URL #add to comment
91
+ comment-deploy-success:
92
+ uses: "./.github/workflows/comment-queue.yml"
93
+ needs: deploy
94
+ if: needs.deploy.result == 'success'
95
+ secrets:
96
+ gh_token: ${{ secrets.gh_token }}
97
+ with:
98
+ pr_number: ${{ inputs.pr_number }}
99
+ message: website~success~${{needs.deploy.outputs.vercel_url}}
100
+ comment-deploy-failure:
101
+ uses: "./.github/workflows/comment-queue.yml"
102
+ needs: deploy
103
+ if: always() && needs.deploy.result == 'failure'
104
+ secrets:
105
+ gh_token: ${{ secrets.gh_token }}
106
+ with:
107
+ pr_number: ${{ inputs.pr_number }}
108
+ message: website~failure~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
.github/workflows/generate-changeset.yml ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Generate changeset
2
+ on:
3
+ workflow_run:
4
+ workflows: ["trigger changeset generation"]
5
+ types:
6
+ - completed
7
+
8
+ env:
9
+ CI: true
10
+ NODE_OPTIONS: "--max-old-space-size=4096"
11
+
12
+ concurrency:
13
+ group: ${{ github.event.workflow_run.head_repository.full_name }}::${{ github.event.workflow_run.head_branch }}
14
+
15
+ jobs:
16
+ get-pr:
17
+ runs-on: ubuntu-latest
18
+ if: github.event.workflow_run.conclusion == 'success'
19
+ outputs:
20
+ found_pr: ${{ steps.pr_details.outputs.found_pr }}
21
+ pr_number: ${{ steps.pr_details.outputs.pr_number }}
22
+ source_repo: ${{ steps.pr_details.outputs.source_repo }}
23
+ source_branch: ${{ steps.pr_details.outputs.source_branch }}
24
+ steps:
25
+ - name: get pr details
26
+ id: pr_details
27
+ uses: gradio-app/github/actions/find-pr@main
28
+ with:
29
+ github_token: ${{ secrets.GITHUB_TOKEN }}
30
+ comment-changes-start:
31
+ uses: "./.github/workflows/comment-queue.yml"
32
+ needs: get-pr
33
+ secrets:
34
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
35
+ with:
36
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
37
+ message: changes~pending~null
38
+ version:
39
+ permissions: write-all
40
+ name: static checks
41
+ needs: get-pr
42
+ runs-on: ubuntu-22.04
43
+ if: needs.get-pr.outputs.found_pr == 'true'
44
+ outputs:
45
+ skipped: ${{ steps.version.outputs.skipped }}
46
+ comment_url: ${{ steps.version.outputs.comment_url }}
47
+ steps:
48
+ - uses: actions/checkout@v3
49
+ with:
50
+ repository: ${{ needs.get-pr.outputs.source_repo }}
51
+ ref: ${{ needs.get-pr.outputs.source_branch }}
52
+ fetch-depth: 0
53
+ token: ${{ secrets.COMMENT_TOKEN }}
54
+ - name: generate changeset
55
+ id: version
56
+ uses: "gradio-app/github/actions/generate-changeset@main"
57
+ with:
58
+ github_token: ${{ secrets.COMMENT_TOKEN }}
59
+ main_pkg: gradio
60
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
61
+ branch_name: ${{ needs.get-pr.outputs.source_branch }}
62
+ comment-changes-skipped:
63
+ uses: "./.github/workflows/comment-queue.yml"
64
+ needs: [get-pr, version]
65
+ if: needs.version.result == 'success' && needs.version.outputs.skipped == 'true'
66
+ secrets:
67
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
68
+ with:
69
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
70
+ message: changes~warning~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
71
+ comment-changes-success:
72
+ uses: "./.github/workflows/comment-queue.yml"
73
+ needs: [get-pr, version]
74
+ if: needs.version.result == 'success' && needs.version.outputs.skipped == 'false'
75
+ secrets:
76
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
77
+ with:
78
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
79
+ message: changes~success~${{ needs.version.outputs.comment_url }}
80
+ comment-changes-failure:
81
+ uses: "./.github/workflows/comment-queue.yml"
82
+ needs: [get-pr, version]
83
+ if: always() && needs.version.result == 'failure'
84
+ secrets:
85
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
86
+ with:
87
+ pr_number: ${{ needs.get-pr.outputs.pr_number }}
88
+ message: changes~failure~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
.github/workflows/large-files.yml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Check for large files
2
+
3
+ on:
4
+ pull_request:
5
+
6
+ jobs:
7
+ check-files:
8
+ runs-on: ubuntu-latest
9
+ if: github.event.pull_request.head.repo.full_name == github.event.pull_request.base.repo.full_name
10
+
11
+ steps:
12
+ - name: Checkout
13
+ uses: actions/checkout@v3
14
+ with:
15
+ ref: ${{ github.event.pull_request.head.ref }}
16
+ repository: ${{ github.event.pull_request.head.repo.full_name }}
17
+ - name: Check for large files
18
+ uses: actionsdesk/lfs-warning@v3.2
19
+ with:
20
+ filesizelimit: 5MB
21
+
.github/workflows/publish-npm.yml ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: Changesets
2
+ on:
3
+ push:
4
+ branches:
5
+ - main
6
+
7
+ env:
8
+ CI: true
9
+ PNPM_CACHE_FOLDER: .pnpm-store
10
+ NODE_OPTIONS: "--max-old-space-size=4096"
11
+ jobs:
12
+ version_or_publish:
13
+ runs-on: ubuntu-22.04
14
+ steps:
15
+ - name: checkout repo
16
+ uses: actions/checkout@v3
17
+ with:
18
+ fetch-depth: 0
19
+ persist-credentials: false
20
+ - name: install dependencies
21
+ uses: "./.github/actions/install-all-deps"
22
+ with:
23
+ always-install-pnpm: true
24
+ node_auth_token: ${{ secrets.NPM_TOKEN }}
25
+ npm_token: ${{ secrets.NPM_TOKEN }}
26
+ skip_build: 'true'
27
+ - name: Build packages
28
+ run: |
29
+ . venv/bin/activate
30
+ pip install build
31
+ pnpm --filter @gradio/client --filter @gradio/lite build
32
+ - name: create and publish versions
33
+ id: changesets
34
+ uses: changesets/action@v1
35
+ with:
36
+ version: pnpm ci:version
37
+ commit: "chore: update versions"
38
+ title: "chore: update versions"
39
+ publish: pnpm ci:publish
40
+ env:
41
+ NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
42
+ NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
43
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
44
+ - name: add label to skip chromatic build
45
+ if: ${{ steps.changesets.outputs.pullRequestNumber != '' && steps.changesets.outputs.pullRequestNumber != 'undefined' }}
46
+ run: gh pr edit "$PR_NUMBER" --add-label "no-visual-update"
47
+ env:
48
+ PR_NUMBER: ${{ steps.changesets.outputs.pullRequestNumber }}
49
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
50
+ - name: add label to run flaky tests
51
+ if: ${{ steps.changesets.outputs.pullRequestNumber != '' && steps.changesets.outputs.pullRequestNumber != 'undefined' }}
52
+ run: gh pr edit "$PR_NUMBER" --add-label "flaky-tests"
53
+ env:
54
+ PR_NUMBER: ${{ steps.changesets.outputs.pullRequestNumber }}
55
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
56
+ - name: add label to run backend tests on Windows
57
+ if: ${{ steps.changesets.outputs.pullRequestNumber != '' && steps.changesets.outputs.pullRequestNumber != 'undefined' }}
58
+ run: gh pr edit "$PR_NUMBER" --add-label "windows-tests"
59
+ env:
60
+ PR_NUMBER: ${{ steps.changesets.outputs.pullRequestNumber }}
61
+ GITHUB_TOKEN: ${{ secrets.GRADIO_PAT }}
62
+ - name: publish to pypi
63
+ if: steps.changesets.outputs.hasChangesets != 'true'
64
+ uses: "gradio-app/github/actions/publish-pypi@main"
65
+ env:
66
+ AWS_ACCESS_KEY_ID: ${{ secrets.AWSACCESSKEYID }}
67
+ AWS_SECRET_ACCESS_KEY: ${{ secrets.AWSSECRETKEY }}
68
+ AWS_DEFAULT_REGION: us-west-2
69
+ with:
70
+ user: __token__
71
+ passwords: |
72
+ gradio:${{ secrets.PYPI_API_TOKEN }}
73
+ gradio_client:${{ secrets.PYPI_GRADIO_CLIENT_TOKEN }}
74
+ - name: trigger spaces deploy workflow
75
+ env:
76
+ GITHUB_TOKEN: ${{ secrets.COMMENT_TOKEN }}
77
+ run: gh workflow run build-pr.yml
.github/workflows/report-notebook-status-pr.yml ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ on:
2
+ workflow_run:
3
+ workflows: [Check Demos Match Notebooks]
4
+ types: [completed]
5
+
6
+ jobs:
7
+ get-pr-number:
8
+ runs-on: ubuntu-latest
9
+ outputs:
10
+ pr_number: ${{ steps.pr_number.outputs.pr_number }}
11
+ steps:
12
+ - uses: actions/checkout@v3
13
+ - name: Install Python
14
+ uses: actions/setup-python@v5
15
+ with:
16
+ python-version: '3.9'
17
+ - name: Install pip
18
+ run: python -m pip install requests
19
+ - name: Download metadata
20
+ run: python scripts/download_artifacts.py ${{github.event.workflow_run.id }} metadata.json ${{ secrets.COMMENT_TOKEN }} --owner ${{ github.repository_owner }}
21
+ - run: unzip metadata.json.zip
22
+ - name: Pipe metadata to env
23
+ id: pr_number
24
+ run: echo "pr_number=$(python -c 'import json; print(json.load(open("metadata.json"))["pr_number"])')" >> $GITHUB_OUTPUT
25
+ comment-success:
26
+ uses: "./.github/workflows/comment-queue.yml"
27
+ if: ${{ github.event.workflow_run.conclusion == 'success' && github.event.workflow_run.name == 'Check Demos Match Notebooks'}}
28
+ needs: get-pr-number
29
+ secrets:
30
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
31
+ with:
32
+ pr_number: ${{ needs.get-pr-number.outputs.pr_number }}
33
+ message: notebooks~success~null
34
+ comment-failure:
35
+ uses: "./.github/workflows/comment-queue.yml"
36
+ if: ${{ github.event.workflow_run.conclusion == 'failure' && github.event.workflow_run.name == 'Check Demos Match Notebooks'}}
37
+ needs: get-pr-number
38
+ secrets:
39
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
40
+ with:
41
+ pr_number: ${{ needs.get-pr-number.outputs.pr_number }}
42
+ message: notebooks~failure~https://github.com/gradio-app/gradio/actions/runs/${{github.run_id}}/
43
+ additional_text: |
44
+ The demo notebooks don't match the run.py files. Please run this command from the root of the repo and then commit the changes:
45
+ ```bash
46
+ pip install nbformat && cd demo && python generate_notebooks.py
47
+ ```
.github/workflows/trigger-changeset.yml ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: trigger changeset generation
2
+ on:
3
+ pull_request:
4
+ types: [opened, synchronize, reopened, edited, labeled, unlabeled]
5
+ branches:
6
+ - main
7
+ issue_comment:
8
+ types: [edited]
9
+
10
+ jobs:
11
+ version:
12
+ permissions: write-all
13
+ name: static checks
14
+ runs-on: ubuntu-22.04
15
+ if: github.event.sender.login != 'gradio-pr-bot'
16
+ steps:
17
+ - run: echo ${{ github.event_name }}
18
+ - run: echo ${{ github.event.sender.login }}
19
+ - run: echo "Triggering changeset generation"
.github/workflows/ui.yml ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ name: gradio-ui
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - "main"
7
+ pull_request:
8
+
9
+ env:
10
+ CI: true
11
+ PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: "1"
12
+ NODE_OPTIONS: "--max-old-space-size=4096"
13
+ VERCEL_ORG_ID: ${{ secrets.VERCEL_ORG_ID }}
14
+ VERCEL_PROJECT_ID: ${{ secrets.VERCEL_PROJECT_ID }}
15
+ concurrency:
16
+ group: deploy-${{ github.ref }}-${{ github.event_name == 'push' || github.event.inputs.fire != null }}
17
+ cancel-in-progress: true
18
+
19
+ jobs:
20
+ quick-checks:
21
+ name: static checks
22
+ runs-on: ubuntu-22.04
23
+ steps:
24
+ - uses: actions/checkout@v3
25
+ - name: install dependencies
26
+ uses: "./.github/actions/install-frontend-deps"
27
+ with:
28
+ always-install-pnpm: true
29
+ - name: build client
30
+ run: pnpm --filter @gradio/client build
31
+ - name: build the wasm module
32
+ run: pnpm --filter @gradio/wasm build
33
+ - name: format check
34
+ run: pnpm format:check
35
+ - name: lint
36
+ run: pnpm lint
37
+ - name: typecheck
38
+ run: pnpm ts:check
39
+ - name: unit tests
40
+ run: pnpm test:run
41
+ functional-test:
42
+ runs-on: ubuntu-latest
43
+ outputs:
44
+ source_branch: ${{ steps.pr_details.outputs.source_branch }}
45
+ pr_number: ${{ steps.pr_details.outputs.pr_number }}
46
+ steps:
47
+ - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4
48
+ - name: install dependencies
49
+ id: install_deps
50
+ uses: "./.github/actions/install-all-deps"
51
+ with:
52
+ always-install-pnpm: true
53
+ - name: get pr details
54
+ id: pr_details
55
+ uses: gradio-app/github/actions/find-pr@main
56
+ with:
57
+ github_token: ${{ secrets.GITHUB_TOKEN }}
58
+ - name: deploy json to aws
59
+ if: steps.pr_details.outputs.source_branch == 'changeset-release/main'
60
+ run: |
61
+ export AWS_ACCESS_KEY_ID=${{ secrets.AWSACCESSKEYID }}
62
+ export AWS_SECRET_ACCESS_KEY=${{ secrets.AWSSECRETKEY }}
63
+ export AWS_DEFAULT_REGION=us-west-2
64
+ version=$(jq -r .version js/_website/src/lib/json/version.json)
65
+ aws s3 cp ./js/_website/src/lib/json/ s3://gradio-docs-json/$version/ --recursive
66
+ - name: install outbreak_forecast dependencies
67
+ run: |
68
+ . venv/bin/activate
69
+ python -m pip install -r demo/outbreak_forecast/requirements.txt
70
+ - run: pnpm exec playwright install chromium
71
+ - name: run browser tests
72
+ run: |
73
+ . venv/bin/activate
74
+ CI=1 pnpm test:browser
75
+ - name: upload screenshots
76
+ uses: actions/upload-artifact@v4
77
+ if: always()
78
+ with:
79
+ name: playwright-screenshots
80
+ path: |
81
+ ./test-results
82
+ - name: run browser component tests
83
+ run: |
84
+ . venv/bin/activate
85
+ pnpm run test:ct
86
+ - name: save artifacts
87
+ uses: actions/upload-artifact@v4
88
+ with:
89
+ name: website-json-${{ steps.pr_details.outputs.pr_number }}
90
+ path: |
91
+ ./js/_website/src/lib/json
92
+ deploy_to_vercel:
93
+ uses: "./.github/workflows/deploy-website.yml"
94
+ needs: functional-test
95
+ if: always()
96
+ secrets:
97
+ gh_token: ${{ secrets.COMMENT_TOKEN }}
98
+ vercel_token: ${{ secrets.VERCEL_TOKEN }}
99
+ vercel_org_id: ${{ secrets.VERCEL_ORG_ID }}
100
+ vercel_project_id: ${{ secrets.VERCEL_PROJECT_ID }}
101
+ with:
102
+ branch_name: ${{ needs.functional-test.outputs.source_branch }}
103
+ pr_number: ${{ needs.functional-test.outputs.pr_number }}
.gitignore ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python build
2
+ .eggs/
3
+ gradio.egg-info
4
+ dist/
5
+ *.pyc
6
+ __pycache__/
7
+ *.py[cod]
8
+ *$py.class
9
+ build/
10
+ __tmp/*
11
+ *.pyi
12
+ py.typed
13
+
14
+ # JS build
15
+ gradio/templates/*
16
+ gradio/node/*
17
+ gradio/_frontend_code/*
18
+ js/gradio-preview/test/*
19
+
20
+ # Secrets
21
+ .env
22
+
23
+ # Gradio run artifacts
24
+ *.db
25
+ *.sqlite3
26
+ gradio/launches.json
27
+ flagged/
28
+ gradio_cached_examples/
29
+ tmp.zip
30
+
31
+ # Tests
32
+ .coverage
33
+ coverage.xml
34
+ test.txt
35
+ **/snapshots/**/*.png
36
+ playwright-report/
37
+
38
+ # Demos
39
+ demo/tmp.zip
40
+ demo/files/*.avi
41
+ demo/files/*.mp4
42
+ demo/all_demos/demos/*
43
+ demo/all_demos/requirements.txt
44
+ demo/*/config.json
45
+ demo/annotatedimage_component/*.png
46
+
47
+ # Etc
48
+ .idea/*
49
+ .DS_Store
50
+ *.bak
51
+ workspace.code-workspace
52
+ *.h5
53
+
54
+ # dev containers
55
+ .pnpm-store/
56
+
57
+ # log files
58
+ .pnpm-debug.log
59
+
60
+ # Local virtualenv for devs
61
+ .venv*
62
+
63
+ # FRP
64
+ gradio/frpc_*
65
+ .vercel
66
+
67
+ # js
68
+ node_modules
69
+ public/build/
70
+ test-results
71
+ client/js/test.js
72
+ .config/test.py
73
+
74
+ # storybook
75
+ storybook-static
76
+ build-storybook.log
77
+ js/storybook/theme.css
78
+
79
+ # playwright
80
+ .config/playwright/.cache
.vscode/extensions.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "recommendations": [
3
+ "dbaeumer.vscode-eslint",
4
+ "phoenisx.cssvar",
5
+ "esbenp.prettier-vscode",
6
+ "svelte.svelte-vscode",
7
+ "charliermarsh.ruff"
8
+ ]
9
+ }
.vscode/settings.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "python.formatting.provider": "none",
3
+ "cssvar.files": ["./js/node_modules/pollen-css/pollen.css"],
4
+ "cssvar.ignore": [],
5
+ "cssvar.disableSort": true,
6
+ "cssvar.extensions": ["js", "css", "html", "jsx", "tsx", "svelte"],
7
+ "python.analysis.extraPaths": ["./gradio/themes/utils"],
8
+ "svelte.plugin.svelte.format.enable": true,
9
+ "svelte.plugin.svelte.diagnostics.enable": false,
10
+ "svelte.enable-ts-plugin": true,
11
+ "prettier.configPath": ".config/.prettierrc.json",
12
+ "prettier.ignorePath": ".config/.prettierignore",
13
+ "python.analysis.typeCheckingMode": "basic",
14
+ "python.testing.pytestArgs": ["."],
15
+ "python.testing.unittestEnabled": false,
16
+ "python.testing.pytestEnabled": true,
17
+ "eslint.validate": ["javascript", "typescript", "html", "markdown", "svelte"],
18
+ "eslint.experimental.useFlatConfig": true,
19
+ "eslint.options": {
20
+ "overrideConfigFile": "./.config/eslint.config.js"
21
+ },
22
+ "typescript.tsdk": "node_modules/typescript/lib"
23
+ }
CHANGELOG.md ADDED
The diff for this file is too large to render. See raw diff
 
CITATION.cff ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ cff-version: 1.2.0
2
+ message: Please cite this project using these metadata.
3
+ title: "Gradio: Hassle-free sharing and testing of ML models in the wild"
4
+ abstract: >-
5
+ Accessibility is a major challenge of machine learning (ML).
6
+ Typical ML models are built by specialists and require
7
+ specialized hardware/software as well as ML experience to
8
+ validate. This makes it challenging for non-technical
9
+ collaborators and endpoint users (e.g. physicians) to easily
10
+ provide feedback on model development and to gain trust in
11
+ ML. The accessibility challenge also makes collaboration
12
+ more difficult and limits the ML researcher's exposure to
13
+ realistic data and scenarios that occur in the wild. To
14
+ improve accessibility and facilitate collaboration, we
15
+ developed an open-source Python package, Gradio, which
16
+ allows researchers to rapidly generate a visual interface
17
+ for their ML models. Gradio makes accessing any ML model as
18
+ easy as sharing a URL. Our development of Gradio is informed
19
+ by interviews with a number of machine learning researchers
20
+ who participate in interdisciplinary collaborations. Their
21
+ feedback identified that Gradio should support a variety of
22
+ interfaces and frameworks, allow for easy sharing of the
23
+ interface, allow for input manipulation and interactive
24
+ inference by the domain expert, as well as allow embedding
25
+ the interface in iPython notebooks. We developed these
26
+ features and carried out a case study to understand Gradio's
27
+ usefulness and usability in the setting of a machine
28
+ learning collaboration between a researcher and a
29
+ cardiologist.
30
+ authors:
31
+ - family-names: Abid
32
+ given-names: Abubakar
33
+ - family-names: Abdalla
34
+ given-names: Ali
35
+ - family-names: Abid
36
+ given-names: Ali
37
+ - family-names: Khan
38
+ given-names: Dawood
39
+ - family-names: Alfozan
40
+ given-names: Abdulrahman
41
+ - family-names: Zou
42
+ given-names: James
43
+ doi: 10.48550/arXiv.1906.02569
44
+ date-released: 2019-06-06
45
+ url: https://arxiv.org/abs/1906.02569