Killer01 commited on
Commit
e5e7619
β€’
1 Parent(s): 33219d1

Upload 15 files

Browse files
Files changed (15) hide show
  1. .env +114 -0
  2. .eslintrc.json +3 -0
  3. .gitignore +37 -0
  4. .nvmrc +1 -0
  5. CONTRIBUTORS.md +10 -0
  6. Dockerfile +65 -0
  7. LICENCE.md +170 -0
  8. README.md +189 -9
  9. components.json +16 -0
  10. next.config.js +12 -0
  11. package-lock.json +0 -0
  12. package.json +85 -0
  13. postcss.config.js +6 -0
  14. tailwind.config.js +82 -0
  15. tsconfig.json +28 -0
.env ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Supported values:
2
+ # - VIDEOCHAIN
3
+ # - REPLICATE
4
+ # - INFERENCE_ENDPOINT
5
+ # - INFERENCE_API
6
+ # - OPENAI
7
+ RENDERING_ENGINE="INFERENCE_API"
8
+
9
+ # Supported values:
10
+ # - INFERENCE_ENDPOINT
11
+ # - INFERENCE_API
12
+ # - OPENAI
13
+ # - GROQ
14
+ # - ANTHROPIC
15
+ LLM_ENGINE="INFERENCE_API"
16
+
17
+ # set this to control the number of pages
18
+ MAX_NB_PAGES=6
19
+
20
+ # Set to "true" to create artificial delays and smooth out traffic
21
+ NEXT_PUBLIC_ENABLE_RATE_LIMITER="false"
22
+
23
+ # ------------- HUGGING FACE OAUTH -------------
24
+ ENABLE_HUGGING_FACE_OAUTH=
25
+ ENABLE_HUGGING_FACE_OAUTH_WALL=
26
+ HUGGING_FACE_OAUTH_CLIENT_ID=
27
+
28
+ # in production this should be the space's domain and/or URL
29
+ HUGGING_FACE_OAUTH_REDIRECT_URL=
30
+
31
+ # this one must be kept secret (and is unused for now)
32
+ HUGGING_FACE_OAUTH_SECRET=
33
+
34
+ # ------------- PROVIDER AUTH ------------
35
+ # You only need to configure the access token(s) for the provider(s) you want to use
36
+
37
+ # HuggingFace.co token: available for the LLM engine and the RENDERING engine
38
+ AUTH_HF_API_TOKEN=
39
+
40
+ # Replicate.com token: available for the RENDERING engine
41
+ AUTH_REPLICATE_API_TOKEN=
42
+
43
+ # OpenAI.dom token: available for the LLM engine and the RENDERING engine
44
+ AUTH_OPENAI_API_KEY=
45
+
46
+ # An experimental RENDERING engine (sorry it is not very documented yet, so you can use one of the other engines)
47
+ AUTH_VIDEOCHAIN_API_TOKEN=
48
+
49
+ # Groq.com key: available for the LLM engine
50
+ AUTH_GROQ_API_KEY=
51
+
52
+ # Anthropic.com key: available for the LLM engine
53
+ AUTH_ANTHROPIC_API_KEY=
54
+
55
+ # ------------- RENDERING API CONFIG --------------
56
+
57
+ # If you decide to use Replicate for the RENDERING engine
58
+ RENDERING_REPLICATE_API_MODEL="stabilityai/sdxl"
59
+ RENDERING_REPLICATE_API_MODEL_VERSION="da77bc59ee60423279fd632efb4795ab731d9e3ca9705ef3341091fb989b7eaf"
60
+
61
+ # If you decide to use a private Hugging Face Inference Endpoint for the RENDERING engine
62
+ RENDERING_HF_INFERENCE_ENDPOINT_URL="https://XXXXXXXXXX.endpoints.huggingface.cloud"
63
+
64
+ # If you decide to use a Hugging Face Inference API model for the RENDERING engine
65
+ RENDERING_HF_INFERENCE_API_BASE_MODEL="stabilityai/stable-diffusion-xl-base-1.0"
66
+
67
+ # If you decide to use a Hugging Face Inference API model for the RENDERING engine
68
+ RENDERING_HF_INFERENCE_API_REFINER_MODEL="stabilityai/stable-diffusion-xl-refiner-1.0"
69
+
70
+ # If your model returns a different file type (eg. jpg or webp) change it here
71
+ RENDERING_HF_INFERENCE_API_FILE_TYPE="image/png"
72
+
73
+ # An experimental RENDERING engine (sorry it is not very documented yet, so you can use one of the other engines)
74
+ RENDERING_VIDEOCHAIN_API_URL="http://localhost:7860"
75
+
76
+ RENDERING_OPENAI_API_BASE_URL="https://api.openai.com/v1"
77
+ RENDERING_OPENAI_API_MODEL="dall-e-3"
78
+
79
+ # ------------- LLM API CONFIG ----------------
80
+
81
+ LLM_GROQ_API_MODEL="mixtral-8x7b-32768"
82
+
83
+ # If you decide to use OpenAI for the LLM engine
84
+ LLM_OPENAI_API_BASE_URL="https://api.openai.com/v1"
85
+ LLM_OPENAI_API_MODEL="gpt-4-turbo"
86
+
87
+ # If you decide to use Anthropic (eg. Claude) for the LLM engine
88
+ # https://docs.anthropic.com/claude/docs/models-overview
89
+ LLM_ANTHROPIC_API_MODEL="claude-3-opus-20240229"
90
+
91
+ # If you decide to use a private Hugging Face Inference Endpoint for the LLM engine
92
+ LLM_HF_INFERENCE_ENDPOINT_URL=""
93
+
94
+ # If you decide to use a Hugging Face Inference API model for the LLM engine
95
+ # LLM_HF_INFERENCE_API_MODEL="HuggingFaceH4/zephyr-7b-beta"
96
+ LLM_HF_INFERENCE_API_MODEL="HuggingFaceH4/zephyr-7b-beta"
97
+
98
+ # ----------- COMMUNITY SHARING (OPTIONAL) -----------
99
+ # You don't need those community sharing options to run the AI Comic Factory
100
+ # locally or on your own server (they are meant to be used by the Hugging Face team)
101
+ NEXT_PUBLIC_ENABLE_COMMUNITY_SHARING="false"
102
+ COMMUNITY_API_URL=
103
+ COMMUNITY_API_TOKEN=
104
+ COMMUNITY_API_ID=
105
+
106
+ # ----------- CENSORSHIP (OPTIONAL) -----------
107
+ # censorship is currently disabled, but will be required when we create a "community roll"
108
+ # (a public repositoruy of user-generated comic strips)
109
+ ENABLE_CENSORSHIP="false"
110
+
111
+ # Due to the sensitive nature of some of keywords we want to ban (users try all kind of crazy illegal things)
112
+ # the words are are not put in clear in the source code, but behind an encryption key
113
+ # (I don't want the project to be flagged by an AI robot police on GitHub or something)
114
+ SECRET_FINGERPRINT=""
.eslintrc.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "extends": "next/core-web-vitals"
3
+ }
.gitignore ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2
+
3
+ # dependencies
4
+ /node_modules
5
+ /.pnp
6
+ .pnp.js
7
+
8
+ # testing
9
+ /coverage
10
+
11
+ # next.js
12
+ /.next/
13
+ /out/
14
+
15
+ # production
16
+ /build
17
+
18
+ # misc
19
+ .DS_Store
20
+ *.pem
21
+
22
+ # debug
23
+ npm-debug.log*
24
+ yarn-debug.log*
25
+ yarn-error.log*
26
+
27
+ # local env files
28
+ .env*.local
29
+
30
+ # vercel
31
+ .vercel
32
+
33
+ # typescript
34
+ *.tsbuildinfo
35
+ next-env.d.ts
36
+
37
+ pnpm-lock.yaml
.nvmrc ADDED
@@ -0,0 +1 @@
 
 
1
+ v20.9.0
CONTRIBUTORS.md ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ This project was developed by Julian Bilcke (@jbilcke-hf), as part of his work at Hugging Face.
2
+
3
+ ------------------------------------------
4
+
5
+ A huge thanks to external developers for their contributions!
6
+
7
+ 艾逗笔 (@idoubi):
8
+ - [feature] Added support for OpenAI: https://github.com/jbilcke-hf/ai-comic-factory/pull/6
9
+ - [bug] predict import error (use dynamic imports for the LLM provider): https://github.com/jbilcke-hf/ai-comic-factory/pull/9
10
+
Dockerfile ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM node:20-alpine AS base
2
+
3
+ # Install dependencies only when needed
4
+ FROM base AS deps
5
+ # Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed.
6
+ RUN apk add --no-cache libc6-compat
7
+ WORKDIR /app
8
+
9
+ # Install dependencies based on the preferred package manager
10
+ COPY package.json yarn.lock* package-lock.json* pnpm-lock.yaml* ./
11
+ RUN \
12
+ if [ -f yarn.lock ]; then yarn --frozen-lockfile; \
13
+ elif [ -f package-lock.json ]; then npm ci; \
14
+ elif [ -f pnpm-lock.yaml ]; then yarn global add pnpm && pnpm i --frozen-lockfile; \
15
+ else echo "Lockfile not found." && exit 1; \
16
+ fi
17
+
18
+ # Uncomment the following lines if you want to use a secret at buildtime,
19
+ # for example to access your private npm packages
20
+ # RUN --mount=type=secret,id=HF_EXAMPLE_SECRET,mode=0444,required=true \
21
+ # $(cat /run/secrets/HF_EXAMPLE_SECRET)
22
+
23
+ # Rebuild the source code only when needed
24
+ FROM base AS builder
25
+ WORKDIR /app
26
+ COPY --from=deps /app/node_modules ./node_modules
27
+ COPY . .
28
+
29
+ # Next.js collects completely anonymous telemetry data about general usage.
30
+ # Learn more here: https://nextjs.org/telemetry
31
+ # Uncomment the following line in case you want to disable telemetry during the build.
32
+ # ENV NEXT_TELEMETRY_DISABLED 1
33
+
34
+ # RUN yarn build
35
+
36
+ # If you use yarn, comment out this line and use the line above
37
+ RUN npm run build
38
+
39
+ # Production image, copy all the files and run next
40
+ FROM base AS runner
41
+ WORKDIR /app
42
+
43
+ ENV NODE_ENV production
44
+ # Uncomment the following line in case you want to disable telemetry during runtime.
45
+ # ENV NEXT_TELEMETRY_DISABLED 1
46
+
47
+ RUN addgroup --system --gid 1001 nodejs
48
+ RUN adduser --system --uid 1001 nextjs
49
+
50
+ COPY --from=builder /app/public ./public
51
+
52
+ # Automatically leverage output traces to reduce image size
53
+ # https://nextjs.org/docs/advanced-features/output-file-tracing
54
+ COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./
55
+ COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static
56
+ COPY --from=builder --chown=nextjs:nodejs /app/.next/cache ./.next/cache
57
+ # COPY --from=builder --chown=nextjs:nodejs /app/.next/cache/fetch-cache ./.next/cache/fetch-cache
58
+
59
+ USER nextjs
60
+
61
+ EXPOSE 3000
62
+
63
+ ENV PORT 3000
64
+
65
+ CMD ["node", "server.js"]
LICENCE.md ADDED
@@ -0,0 +1,170 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Apache License
2
+ ==============
3
+
4
+ _Version 2.0, January 2004_
5
+ _&lt;<http://www.apache.org/licenses/>&gt;_
6
+
7
+ ### Terms and Conditions for use, reproduction, and distribution
8
+
9
+ #### 1. Definitions
10
+
11
+ β€œLicense” shall mean the terms and conditions for use, reproduction, and
12
+ distribution as defined by Sections 1 through 9 of this document.
13
+
14
+ β€œLicensor” shall mean the copyright owner or entity authorized by the copyright
15
+ owner that is granting the License.
16
+
17
+ β€œLegal Entity” shall mean the union of the acting entity and all other entities
18
+ that control, are controlled by, or are under common control with that entity.
19
+ For the purposes of this definition, β€œcontrol” means **(i)** the power, direct or
20
+ indirect, to cause the direction or management of such entity, whether by
21
+ contract or otherwise, or **(ii)** ownership of fifty percent (50%) or more of the
22
+ outstanding shares, or **(iii)** beneficial ownership of such entity.
23
+
24
+ β€œYou” (or β€œYour”) shall mean an individual or Legal Entity exercising
25
+ permissions granted by this License.
26
+
27
+ β€œSource” form shall mean the preferred form for making modifications, including
28
+ but not limited to software source code, documentation source, and configuration
29
+ files.
30
+
31
+ β€œObject” form shall mean any form resulting from mechanical transformation or
32
+ translation of a Source form, including but not limited to compiled object code,
33
+ generated documentation, and conversions to other media types.
34
+
35
+ β€œWork” shall mean the work of authorship, whether in Source or Object form, made
36
+ available under the License, as indicated by a copyright notice that is included
37
+ in or attached to the work (an example is provided in the Appendix below).
38
+
39
+ β€œDerivative Works” shall mean any work, whether in Source or Object form, that
40
+ is based on (or derived from) the Work and for which the editorial revisions,
41
+ annotations, elaborations, or other modifications represent, as a whole, an
42
+ original work of authorship. For the purposes of this License, Derivative Works
43
+ shall not include works that remain separable from, or merely link (or bind by
44
+ name) to the interfaces of, the Work and Derivative Works thereof.
45
+
46
+ β€œContribution” shall mean any work of authorship, including the original version
47
+ of the Work and any modifications or additions to that Work or Derivative Works
48
+ thereof, that is intentionally submitted to Licensor for inclusion in the Work
49
+ by the copyright owner or by an individual or Legal Entity authorized to submit
50
+ on behalf of the copyright owner. For the purposes of this definition,
51
+ β€œsubmitted” means any form of electronic, verbal, or written communication sent
52
+ to the Licensor or its representatives, including but not limited to
53
+ communication on electronic mailing lists, source code control systems, and
54
+ issue tracking systems that are managed by, or on behalf of, the Licensor for
55
+ the purpose of discussing and improving the Work, but excluding communication
56
+ that is conspicuously marked or otherwise designated in writing by the copyright
57
+ owner as β€œNot a Contribution.”
58
+
59
+ β€œContributor” shall mean Licensor and any individual or Legal Entity on behalf
60
+ of whom a Contribution has been received by Licensor and subsequently
61
+ incorporated within the Work.
62
+
63
+ #### 2. Grant of Copyright License
64
+
65
+ Subject to the terms and conditions of this License, each Contributor hereby
66
+ grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
67
+ irrevocable copyright license to reproduce, prepare Derivative Works of,
68
+ publicly display, publicly perform, sublicense, and distribute the Work and such
69
+ Derivative Works in Source or Object form.
70
+
71
+ #### 3. Grant of Patent License
72
+
73
+ Subject to the terms and conditions of this License, each Contributor hereby
74
+ grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free,
75
+ irrevocable (except as stated in this section) patent license to make, have
76
+ made, use, offer to sell, sell, import, and otherwise transfer the Work, where
77
+ such license applies only to those patent claims licensable by such Contributor
78
+ that are necessarily infringed by their Contribution(s) alone or by combination
79
+ of their Contribution(s) with the Work to which such Contribution(s) was
80
+ submitted. If You institute patent litigation against any entity (including a
81
+ cross-claim or counterclaim in a lawsuit) alleging that the Work or a
82
+ Contribution incorporated within the Work constitutes direct or contributory
83
+ patent infringement, then any patent licenses granted to You under this License
84
+ for that Work shall terminate as of the date such litigation is filed.
85
+
86
+ #### 4. Redistribution
87
+
88
+ You may reproduce and distribute copies of the Work or Derivative Works thereof
89
+ in any medium, with or without modifications, and in Source or Object form,
90
+ provided that You meet the following conditions:
91
+
92
+ * **(a)** You must give any other recipients of the Work or Derivative Works a copy of
93
+ this License; and
94
+ * **(b)** You must cause any modified files to carry prominent notices stating that You
95
+ changed the files; and
96
+ * **(c)** You must retain, in the Source form of any Derivative Works that You distribute,
97
+ all copyright, patent, trademark, and attribution notices from the Source form
98
+ of the Work, excluding those notices that do not pertain to any part of the
99
+ Derivative Works; and
100
+ * **(d)** If the Work includes a β€œNOTICE” text file as part of its distribution, then any
101
+ Derivative Works that You distribute must include a readable copy of the
102
+ attribution notices contained within such NOTICE file, excluding those notices
103
+ that do not pertain to any part of the Derivative Works, in at least one of the
104
+ following places: within a NOTICE text file distributed as part of the
105
+ Derivative Works; within the Source form or documentation, if provided along
106
+ with the Derivative Works; or, within a display generated by the Derivative
107
+ Works, if and wherever such third-party notices normally appear. The contents of
108
+ the NOTICE file are for informational purposes only and do not modify the
109
+ License. You may add Your own attribution notices within Derivative Works that
110
+ You distribute, alongside or as an addendum to the NOTICE text from the Work,
111
+ provided that such additional attribution notices cannot be construed as
112
+ modifying the License.
113
+
114
+ You may add Your own copyright statement to Your modifications and may provide
115
+ additional or different license terms and conditions for use, reproduction, or
116
+ distribution of Your modifications, or for any such Derivative Works as a whole,
117
+ provided Your use, reproduction, and distribution of the Work otherwise complies
118
+ with the conditions stated in this License.
119
+
120
+ #### 5. Submission of Contributions
121
+
122
+ Unless You explicitly state otherwise, any Contribution intentionally submitted
123
+ for inclusion in the Work by You to the Licensor shall be under the terms and
124
+ conditions of this License, without any additional terms or conditions.
125
+ Notwithstanding the above, nothing herein shall supersede or modify the terms of
126
+ any separate license agreement you may have executed with Licensor regarding
127
+ such Contributions.
128
+
129
+ #### 6. Trademarks
130
+
131
+ This License does not grant permission to use the trade names, trademarks,
132
+ service marks, or product names of the Licensor, except as required for
133
+ reasonable and customary use in describing the origin of the Work and
134
+ reproducing the content of the NOTICE file.
135
+
136
+ #### 7. Disclaimer of Warranty
137
+
138
+ Unless required by applicable law or agreed to in writing, Licensor provides the
139
+ Work (and each Contributor provides its Contributions) on an β€œAS IS” BASIS,
140
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied,
141
+ including, without limitation, any warranties or conditions of TITLE,
142
+ NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are
143
+ solely responsible for determining the appropriateness of using or
144
+ redistributing the Work and assume any risks associated with Your exercise of
145
+ permissions under this License.
146
+
147
+ #### 8. Limitation of Liability
148
+
149
+ In no event and under no legal theory, whether in tort (including negligence),
150
+ contract, or otherwise, unless required by applicable law (such as deliberate
151
+ and grossly negligent acts) or agreed to in writing, shall any Contributor be
152
+ liable to You for damages, including any direct, indirect, special, incidental,
153
+ or consequential damages of any character arising as a result of this License or
154
+ out of the use or inability to use the Work (including but not limited to
155
+ damages for loss of goodwill, work stoppage, computer failure or malfunction, or
156
+ any and all other commercial damages or losses), even if such Contributor has
157
+ been advised of the possibility of such damages.
158
+
159
+ #### 9. Accepting Warranty or Additional Liability
160
+
161
+ While redistributing the Work or Derivative Works thereof, You may choose to
162
+ offer, and charge a fee for, acceptance of support, warranty, indemnity, or
163
+ other liability obligations and/or rights consistent with this License. However,
164
+ in accepting such obligations, You may act only on Your own behalf and on Your
165
+ sole responsibility, not on behalf of any other Contributor, and only if You
166
+ agree to indemnify, defend, and hold each Contributor harmless for any liability
167
+ incurred by, or claims asserted against, such Contributor by reason of your
168
+ accepting any such warranty or additional liability.
169
+
170
+ _END OF TERMS AND CONDITIONS_
README.md CHANGED
@@ -1,12 +1,192 @@
1
  ---
2
- title: ComicGenerator
3
- emoji: πŸ“š
4
- colorFrom: purple
5
- colorTo: green
6
- sdk: streamlit
7
- sdk_version: 1.36.0
8
- app_file: app.py
9
- pinned: false
 
 
 
 
10
  ---
11
 
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: AI Comic Factory
3
+ emoji: πŸ‘©β€πŸŽ¨
4
+ colorFrom: red
5
+ colorTo: yellow
6
+ sdk: docker
7
+ pinned: true
8
+ app_port: 3000
9
+ disable_embedding: true
10
+ short_description: Create your own AI comic with a single prompt
11
+ hf_oauth: true
12
+ hf_oauth_expiration_minutes: 43200
13
+ hf_oauth_scopes: [inference-api]
14
  ---
15
 
16
+ # AI Comic Factory
17
+
18
+ Last release: AI Comic Factory 1.2
19
+
20
+ The AI Comic Factory will soon have an official website: [aicomicfactory.app](https://aicomicfactory.app)
21
+
22
+ For more information about my other projects please check [linktr.ee/FLNGR](https://linktr.ee/FLNGR).
23
+
24
+ ## Running the project at home
25
+
26
+ First, I would like to highlight that everything is open-source (see [here](https://huggingface.co/spaces/jbilcke-hf/ai-comic-factory/tree/main), [here](https://huggingface.co/spaces/jbilcke-hf/VideoChain-API/tree/main), [here](https://huggingface.co/spaces/hysts/SD-XL/tree/main), [here](https://github.com/huggingface/text-generation-inference)).
27
+
28
+ However the project isn't a monolithic Space that can be duplicated and ran immediately:
29
+ it requires various components to run for the frontend, backend, LLM, SDXL etc.
30
+
31
+ If you try to duplicate the project, open the `.env` you will see it requires some variables.
32
+
33
+ Provider config:
34
+ - `LLM_ENGINE`: can be one of `INFERENCE_API`, `INFERENCE_ENDPOINT`, `OPENAI`, `GROQ`, `ANTHROPIC`
35
+ - `RENDERING_ENGINE`: can be one of: "INFERENCE_API", "INFERENCE_ENDPOINT", "REPLICATE", "VIDEOCHAIN", "OPENAI" for now, unless you code your custom solution
36
+
37
+ Auth config:
38
+ - `AUTH_HF_API_TOKEN`: if you decide to use Hugging Face for the LLM engine (inference api model or a custom inference endpoint)
39
+ - `AUTH_OPENAI_API_KEY`: to use OpenAI for the LLM engine
40
+ - `AUTH_GROQ_API_KEY`: to use Groq for the LLM engine
41
+ - `AUTH_ANTHROPIC_API_KEY`: to use Anthropic (Claude) for the LLM engine
42
+ - `AUTH_VIDEOCHAIN_API_TOKEN`: secret token to access the VideoChain API server
43
+ - `AUTH_REPLICATE_API_TOKEN`: in case you want to use Replicate.com
44
+
45
+ Rendering config:
46
+ - `RENDERING_HF_INFERENCE_ENDPOINT_URL`: necessary if you decide to use a custom inference endpoint
47
+ - `RENDERING_REPLICATE_API_MODEL_VERSION`: url to the VideoChain API server
48
+ - `RENDERING_HF_INFERENCE_ENDPOINT_URL`: optional, default to nothing
49
+ - `RENDERING_HF_INFERENCE_API_BASE_MODEL`: optional, defaults to "stabilityai/stable-diffusion-xl-base-1.0"
50
+ - `RENDERING_HF_INFERENCE_API_REFINER_MODEL`: optional, defaults to "stabilityai/stable-diffusion-xl-refiner-1.0"
51
+ - `RENDERING_REPLICATE_API_MODEL`: optional, defaults to "stabilityai/sdxl"
52
+ - `RENDERING_REPLICATE_API_MODEL_VERSION`: optional, in case you want to change the version
53
+
54
+ Language model config (depending on the LLM engine you decide to use):
55
+ - `LLM_HF_INFERENCE_ENDPOINT_URL`: "<use your own>"
56
+ - `LLM_HF_INFERENCE_API_MODEL`: "HuggingFaceH4/zephyr-7b-beta"
57
+ - `LLM_OPENAI_API_BASE_URL`: "https://api.openai.com/v1"
58
+ - `LLM_OPENAI_API_MODEL`: "gpt-4-turbo"
59
+ - `LLM_GROQ_API_MODEL`: "mixtral-8x7b-32768"
60
+ - `LLM_ANTHROPIC_API_MODEL`: "claude-3-opus-20240229"
61
+
62
+ In addition, there are some community sharing variables that you can just ignore.
63
+ Those variables are not required to run the AI Comic Factory on your own website or computer
64
+ (they are meant to create a connection with the Hugging Face community,
65
+ and thus only make sense for official Hugging Face apps):
66
+ - `NEXT_PUBLIC_ENABLE_COMMUNITY_SHARING`: you don't need this
67
+ - `COMMUNITY_API_URL`: you don't need this
68
+ - `COMMUNITY_API_TOKEN`: you don't need this
69
+ - `COMMUNITY_API_ID`: you don't need this
70
+
71
+ Please read the `.env` default config file for more informations.
72
+ To customise a variable locally, you should create a `.env.local`
73
+ (do not commit this file as it will contain your secrets).
74
+
75
+ -> If you intend to run it with local, cloud-hosted and/or proprietary models **you are going to need to code πŸ‘¨β€πŸ’»**.
76
+
77
+ ## The LLM API (Large Language Model)
78
+
79
+ Currently the AI Comic Factory uses [zephyr-7b-beta](https://huggingface.co/HuggingFaceH4/zephyr-7b-beta) through an [Inference Endpoint](https://huggingface.co/docs/inference-endpoints/index).
80
+
81
+ You have multiple options:
82
+
83
+ ### Option 1: Use an Inference API model
84
+
85
+ This is a new option added recently, where you can use one of the models from the Hugging Face Hub. By default we suggest to use [zephyr-7b-beta](https://huggingface.co/HuggingFaceH4/zephyr-7b-beta) as it will provide better results than the 7b model.
86
+
87
+ To activate it, create a `.env.local` configuration file:
88
+
89
+ ```bash
90
+ LLM_ENGINE="INFERENCE_API"
91
+
92
+ HF_API_TOKEN="Your Hugging Face token"
93
+
94
+ # "HuggingFaceH4/zephyr-7b-beta" is used by default, but you can change this
95
+ # note: You should use a model able to generate JSON responses,
96
+ # so it is storngly suggested to use at least the 34b model
97
+ HF_INFERENCE_API_MODEL="HuggingFaceH4/zephyr-7b-beta"
98
+ ```
99
+
100
+ ### Option 2: Use an Inference Endpoint URL
101
+
102
+ If you would like to run the AI Comic Factory on a private LLM running on the Hugging Face Inference Endpoint service, create a `.env.local` configuration file:
103
+
104
+ ```bash
105
+ LLM_ENGINE="INFERENCE_ENDPOINT"
106
+
107
+ HF_API_TOKEN="Your Hugging Face token"
108
+
109
+ HF_INFERENCE_ENDPOINT_URL="path to your inference endpoint url"
110
+ ```
111
+
112
+ To run this kind of LLM locally, you can use [TGI](https://github.com/huggingface/text-generation-inference) (Please read [this post](https://github.com/huggingface/text-generation-inference/issues/726) for more information about the licensing).
113
+
114
+ ### Option 3: Use an OpenAI API Key
115
+
116
+ This is a new option added recently, where you can use OpenAI API with an OpenAI API Key.
117
+
118
+ To activate it, create a `.env.local` configuration file:
119
+
120
+ ```bash
121
+ LLM_ENGINE="OPENAI"
122
+
123
+ # default openai api base url is: https://api.openai.com/v1
124
+ LLM_OPENAI_API_BASE_URL="A custom OpenAI API Base URL if you have some special privileges"
125
+
126
+ LLM_OPENAI_API_MODEL="gpt-4-turbo"
127
+
128
+ AUTH_OPENAI_API_KEY="Yourown OpenAI API Key"
129
+ ```
130
+ ### Option 4: (new, experimental) use Groq
131
+
132
+ ```bash
133
+ LLM_ENGINE="GROQ"
134
+
135
+ LLM_GROQ_API_MODEL="mixtral-8x7b-32768"
136
+
137
+ AUTH_GROQ_API_KEY="Your own GROQ API Key"
138
+ ```
139
+ ### Option 5: (new, experimental) use Anthropic (Claude)
140
+
141
+ ```bash
142
+ LLM_ENGINE="ANTHROPIC"
143
+
144
+ LLM_ANTHROPIC_API_MODEL="claude-3-opus-20240229"
145
+
146
+ AUTH_ANTHROPIC_API_KEY="Your own ANTHROPIC API Key"
147
+ ```
148
+
149
+ ### Option 6: Fork and modify the code to use a different LLM system
150
+
151
+ Another option could be to disable the LLM completely and replace it with another LLM protocol and/or provider (eg. Claude, Replicate), or a human-generated story instead (by returning mock or static data).
152
+
153
+ ### Notes
154
+
155
+ It is possible that I modify the AI Comic Factory to make it easier in the future (eg. add support for Claude or Replicate)
156
+
157
+ ## The Rendering API
158
+
159
+ This API is used to generate the panel images. This is an API I created for my various projects at Hugging Face.
160
+
161
+ I haven't written documentation for it yet, but basically it is "just a wrapper β„’" around other existing APIs:
162
+
163
+ - The [hysts/SD-XL](https://huggingface.co/spaces/hysts/SD-XL?duplicate=true) Space by [@hysts](https://huggingface.co/hysts)
164
+ - And other APIs for making videos, adding audio etc.. but you won't need them for the AI Comic Factory
165
+
166
+ ### Option 1: Deploy VideoChain yourself
167
+
168
+ You will have to [clone](https://huggingface.co/spaces/jbilcke-hf/VideoChain-API?duplicate=true) the [source-code](https://huggingface.co/spaces/jbilcke-hf/VideoChain-API/tree/main)
169
+
170
+ Unfortunately, I haven't had the time to write the documentation for VideoChain yet.
171
+ (When I do I will update this document to point to the VideoChain's README)
172
+
173
+
174
+ ### Option 2: Use Replicate
175
+
176
+ To use Replicate, create a `.env.local` configuration file:
177
+
178
+ ```bash
179
+ RENDERING_ENGINE="REPLICATE"
180
+
181
+ RENDERING_REPLICATE_API_MODEL="stabilityai/sdxl"
182
+
183
+ RENDERING_REPLICATE_API_MODEL_VERSION="da77bc59ee60423279fd632efb4795ab731d9e3ca9705ef3341091fb989b7eaf"
184
+
185
+ AUTH_REPLICATE_API_TOKEN="Your Replicate token"
186
+ ```
187
+
188
+ ### Option 3: Use another SDXL API
189
+
190
+ If you fork the project you will be able to modify the code to use the Stable Diffusion technology of your choice (local, open-source, proprietary, your custom HF Space etc).
191
+
192
+ It would even be something else, such as Dall-E.
components.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "$schema": "https://ui.shadcn.com/schema.json",
3
+ "style": "default",
4
+ "rsc": true,
5
+ "tsx": true,
6
+ "tailwind": {
7
+ "config": "tailwind.config.js",
8
+ "css": "app/globals.css",
9
+ "baseColor": "stone",
10
+ "cssVariables": false
11
+ },
12
+ "aliases": {
13
+ "components": "@/components",
14
+ "utils": "@/lib/utils"
15
+ }
16
+ }
next.config.js ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /** @type {import('next').NextConfig} */
2
+ const nextConfig = {
3
+ output: 'standalone',
4
+
5
+ experimental: {
6
+ serverActions: {
7
+ bodySizeLimit: '8mb',
8
+ },
9
+ }
10
+ }
11
+
12
+ module.exports = nextConfig
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "@jbilcke/comic-factory",
3
+ "version": "1.2.1",
4
+ "private": true,
5
+ "scripts": {
6
+ "dev": "next dev",
7
+ "build": "next build",
8
+ "start": "next start",
9
+ "lint": "next lint"
10
+ },
11
+ "dependencies": {
12
+ "@aitube/clap": "0.0.27",
13
+ "@anthropic-ai/sdk": "^0.19.1",
14
+ "@huggingface/hub": "^0.15.1",
15
+ "@huggingface/inference": "^2.6.1",
16
+ "@radix-ui/react-accordion": "^1.1.2",
17
+ "@radix-ui/react-avatar": "^1.0.3",
18
+ "@radix-ui/react-checkbox": "^1.0.4",
19
+ "@radix-ui/react-collapsible": "^1.0.3",
20
+ "@radix-ui/react-dialog": "^1.0.4",
21
+ "@radix-ui/react-dropdown-menu": "^2.0.5",
22
+ "@radix-ui/react-icons": "^1.3.0",
23
+ "@radix-ui/react-label": "^2.0.2",
24
+ "@radix-ui/react-menubar": "^1.0.3",
25
+ "@radix-ui/react-popover": "^1.0.6",
26
+ "@radix-ui/react-select": "^1.2.2",
27
+ "@radix-ui/react-separator": "^1.0.3",
28
+ "@radix-ui/react-slider": "^1.1.2",
29
+ "@radix-ui/react-slot": "^1.0.2",
30
+ "@radix-ui/react-switch": "^1.0.3",
31
+ "@radix-ui/react-toast": "^1.1.4",
32
+ "@radix-ui/react-tooltip": "^1.0.6",
33
+ "@types/node": "20.4.2",
34
+ "@types/react": "18.3.0",
35
+ "@types/react-dom": "18.3.0",
36
+ "@types/uuid": "^9.0.2",
37
+ "autoprefixer": "10.4.18",
38
+ "class-variance-authority": "^0.6.1",
39
+ "clsx": "^2.1.0",
40
+ "cmdk": "^0.2.0",
41
+ "cookies-next": "^2.1.2",
42
+ "date-fns": "^2.30.0",
43
+ "encoding": "^0.1.13",
44
+ "eslint": "8.45.0",
45
+ "eslint-config-next": "13.4.10",
46
+ "groq-sdk": "^0.3.1",
47
+ "html2canvas": "^1.4.1",
48
+ "i": "^0.3.7",
49
+ "konva": "^9.2.2",
50
+ "lucide-react": "^0.260.0",
51
+ "next": "14.2.3",
52
+ "npm": "^10.7.0",
53
+ "openai": "^4.29.2",
54
+ "pick": "^0.0.1",
55
+ "postcss": "8.4.37",
56
+ "query-string": "^9.0.0",
57
+ "react": "18.3.1",
58
+ "react-circular-progressbar": "^2.1.0",
59
+ "react-contenteditable": "^3.3.7",
60
+ "react-dom": "18.3.1",
61
+ "react-draggable": "^4.4.6",
62
+ "react-hook-consent": "^3.5.3",
63
+ "react-icons": "^4.11.0",
64
+ "react-konva": "^18.2.10",
65
+ "react-virtualized-auto-sizer": "^1.0.20",
66
+ "replicate": "^0.29.0",
67
+ "sbd": "^1.0.19",
68
+ "sharp": "^0.33.4",
69
+ "tailwind-merge": "^2.2.2",
70
+ "tailwindcss": "3.4.1",
71
+ "tailwindcss-animate": "^1.0.6",
72
+ "ts-node": "^10.9.1",
73
+ "typescript": "^5.4.5",
74
+ "use-file-picker": "^2.1.2",
75
+ "usehooks-ts": "2.9.1",
76
+ "uuid": "^9.0.0",
77
+ "yaml": "^2.4.5",
78
+ "zustand": "^4.5.1"
79
+ },
80
+ "devDependencies": {
81
+ "@types/qs": "^6.9.7",
82
+ "@types/react-virtualized": "^9.21.22",
83
+ "@types/sbd": "^1.0.3"
84
+ }
85
+ }
postcss.config.js ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ module.exports = {
2
+ plugins: {
3
+ tailwindcss: {},
4
+ autoprefixer: {},
5
+ },
6
+ }
tailwind.config.js ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /** @type {import('tailwindcss').Config} */
2
+ module.exports = {
3
+ darkMode: ["class"],
4
+ content: [
5
+ './pages/**/*.{ts,tsx}',
6
+ './components/**/*.{ts,tsx}',
7
+ './app/**/*.{ts,tsx}',
8
+ './src/**/*.{ts,tsx}',
9
+ './src/lib/fonts.ts'
10
+ ],
11
+ theme: {
12
+ container: {
13
+ center: true,
14
+ padding: "2rem",
15
+ screens: {
16
+ "2xl": "1400px",
17
+ },
18
+ },
19
+ extend: {
20
+ spacing: {
21
+ 17: '4.25rem', // 68px
22
+ 18: '4.5rem', // 72px
23
+ 19: '4.75rem', // 76px
24
+ 20: '5rem', // 80px
25
+ 21: '5.25rem', // 84px
26
+ 22: '5.5rem', // 88px
27
+ 22: '5.5rem', // 88px
28
+ 26: '6.5rem', // 104px
29
+ },
30
+ fontFamily: {
31
+ indieflower: ['var(--font-indieflower)'],
32
+ thegirlnextdoor: ['var(--font-the-girl-next-door)'],
33
+ komika: ['var(--font-komika)'],
34
+ actionman: ['var(--font-action-man)'],
35
+ karantula: ['var(--font-karantula)'],
36
+ manoskope: ['var(--font-manoskope)'],
37
+ paeteround: ['var(--font-paete-round)'],
38
+ qarmic: ['var(--font-qarmic-sans)'],
39
+ archrival: ['var(--font-sf-arch-rival)'],
40
+ cartoonist: ['var(--font-sf-cartoonist-hand)'],
41
+ toontime: ['var(--font-sf-toontime)'],
42
+ vtc: ['var(--font-vtc-letterer-pro)'],
43
+ digitalstrip: ['var(--font-digital-strip-bb)'],
44
+ },
45
+ fontSize: {
46
+ "7xs": "5px",
47
+ "7xs": "6px",
48
+ "6xs": "7px",
49
+ "5xs": "8px",
50
+ "4xs": "9px",
51
+ "3xs": "10px",
52
+ "2xs": "11px"
53
+ },
54
+ keyframes: {
55
+ "accordion-down": {
56
+ from: { height: 0 },
57
+ to: { height: "var(--radix-accordion-content-height)" },
58
+ },
59
+ "accordion-up": {
60
+ from: { height: "var(--radix-accordion-content-height)" },
61
+ to: { height: 0 },
62
+ },
63
+ },
64
+ animation: {
65
+ "accordion-down": "accordion-down 0.2s ease-out",
66
+ "accordion-up": "accordion-up 0.2s ease-out",
67
+ },
68
+ screens: {
69
+ 'print': { 'raw': 'print' },
70
+ },
71
+ gridTemplateColumns: {
72
+ '12': 'repeat(12, minmax(0, 1fr))',
73
+ '16': 'repeat(16, minmax(0, 1fr))',
74
+ },
75
+ gridTemplateRows: {
76
+ '12': 'repeat(12, minmax(0, 1fr))',
77
+ '16': 'repeat(16, minmax(0, 1fr))',
78
+ }
79
+ },
80
+ },
81
+ plugins: [require("tailwindcss-animate")],
82
+ }
tsconfig.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "compilerOptions": {
3
+ "target": "ES2022",
4
+ "lib": ["dom", "dom.iterable", "esnext"],
5
+ "allowJs": true,
6
+ "skipLibCheck": true,
7
+ "strict": true,
8
+ "forceConsistentCasingInFileNames": true,
9
+ "noEmit": true,
10
+ "esModuleInterop": true,
11
+ "module": "esnext",
12
+ "moduleResolution": "node",
13
+ "resolveJsonModule": true,
14
+ "isolatedModules": true,
15
+ "jsx": "preserve",
16
+ "incremental": true,
17
+ "plugins": [
18
+ {
19
+ "name": "next"
20
+ }
21
+ ],
22
+ "paths": {
23
+ "@/*": ["./src/*"]
24
+ }
25
+ },
26
+ "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
27
+ "exclude": ["node_modules"]
28
+ }