lunarflu HF Staff commited on
Commit
8a0bc2e
Β·
1 Parent(s): bf85dbe

[app.py] 1.1 PR

Browse files
Files changed (1) hide show
  1. app.py +59 -472
app.py CHANGED
@@ -1,503 +1,90 @@
1
  import discord
2
- import os
3
- import threading
4
  import gradio as gr
5
- import requests
6
- import json
7
- import random
8
- import time
9
- import re
10
- from discord import Embed, Color
11
- from discord.ext import commands
12
- # aa
13
- from gradio_client import Client
14
- from PIL import Image
15
- #from ratelimiter import RateLimiter
16
-
17
- import asyncio
18
- import concurrent.futures
19
- import multiprocessing
20
-
21
- import shutil # for doing image movement magic
22
-
23
- #import tempfile
24
- #import glob
25
-
26
- import uuid
27
- import glob
28
 
29
- #todo
30
- # experiment with animeGANv2
31
- #βœ… tasks for concurrent coroutines (now jobs)
32
- # ratelimits
33
 
34
- # enlarge each of 4 images?
35
- # Error: [Errno 104] Connection reset by peer?
36
 
37
- # clean up old threads
38
- # safety for on_reaction_add?
39
- # could use one channel, use threads to organize it. Otherwise may be too split and harder to keep track of
40
- # lock generation after ~120s, can change
41
- # restructure using slash commands? generate -> deepfloydif -> prompt -> thread -> combined -> upscale -> thread
42
 
43
- HF_TOKEN = os.getenv('HF_TOKEN')
44
- DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
 
 
45
 
46
- df = Client("huggingface-projects/IF", HF_TOKEN)
47
- #jojogan = Client("akhaliq/JoJoGAN", HF_TOKEN)
48
- falconclient = Client("HuggingFaceH4/falcon-chat", HF_TOKEN)
49
 
50
- intents = discord.Intents.default()
51
- intents.message_content = True
52
 
53
- bot = commands.Bot(command_prefix='!', intents=intents)
54
 
55
- #---------------------------------------------------------------------------------------------------------------------------------------------
56
- @bot.event
57
  async def on_ready():
58
- print('Logged on as', bot.user)
59
- bot.log_channel = bot.get_channel(1100458786826747945) # 1100458786826747945 = bot-test, 1107006391547342910 = lunarbot server
60
- #---------------------------------------------------------------------------------------------------------------------------------------------
61
- async def safetychecks(ctx):
62
- failure_emoji = '<:disagree:1098628957521313892>'
63
- try:
64
- if ctx.author.bot:
65
- print(f"Error: The bot is not allowed to use its own commands.")
66
- await ctx.message.add_reaction(failure_emoji)
67
- return False
68
-
69
- #βœ…βœ… check if the bot is offline
70
- offline_bot_role_id = 1103676632667017266
71
- bot_member = ctx.guild.get_member(bot.user.id)
72
- if any(role.id == offline_bot_role_id for role in bot_member.roles):
73
- print(f"Error: {ctx.author} The bot is offline or under maintenance. (Remove the offline-bot role to bring it online)")
74
- thread = await ctx.message.create_thread(name=f'Offline Error')
75
- await thread.send(f"Error: {ctx.author.mention} The bot is offline or under maintenance. (Remove the offline-bot role to bring it online)")
76
- await ctx.message.add_reaction(failure_emoji)
77
- return False
78
-
79
- '''
80
- # review this, may be able to remove
81
- #βœ…βœ… check if the command is in the allowed channel(s)
82
- bot_test = 1100458786826747945
83
- deepfloydif_channel = 1119313215675973714
84
- falcon_channel = 1119313248056004729
85
-
86
- channel_ids = [bot_test, deepfloydif_channel, jojo_channel, spidey_channel, sketch_channel, falcon_channel]
87
- if ctx.channel.id not in channel_ids:
88
- print(f"{ctx.author}, commands are not permitted in {ctx.channel}")
89
- thread = await ctx.message.create_thread(name=f'Channel Error')
90
- await thread.send(f"Error: {ctx.author.mention} commands are not permitted in {ctx.channel}")
91
- await ctx.message.add_reaction(failure_emoji)
92
- return False
93
- '''
94
-
95
- '''
96
- #βœ…βœ… check if the user has the required role(s)
97
- guild_id = 879548962464493619
98
- verified_role_id = 900063512829755413 # @verified = 900063512829755413, HF = 897376942817419265, fellows = 963431900825919498
99
- huggingfolks_role_id = 897376942817419265
100
- fellows_role_id = 963431900825919498
101
- contentcreator_role_id = 928589475968323636
102
- betatester_role_id = 1113511652990668893
103
-
104
- allowed_role_ids = [huggingfolks_role_id, fellows_role_id, contentcreator_role_id, betatester_role_id]
105
- guild = bot.get_guild(guild_id)
106
- user_roles = ctx.author.roles
107
- has_allowed_role = any(role.id in allowed_role_ids for role in user_roles)
108
- if not has_allowed_role:
109
- print(f"Error: {ctx.author} does not have any of the required roles to use that command.")
110
- thread = await ctx.message.create_thread(name=f'Perms Error')
111
- await thread.send(f"Error: {ctx.author.mention} does not have any of the required roles to use that command.")
112
- await ctx.message.add_reaction(failure_emoji)
113
- return False
114
- '''
115
-
116
- return True
117
 
118
- # ping lunarflu if any safety check ever fails
119
- except Exception as e:
120
- print(f"Error: safetychecks failed somewhere, command will not continue, {e}")
121
- await ctx.message.reply(f"❌ <@811235357663297546> SC failed somewhere ❌ {e}") # this will always ping, as long as the bot has access to the channel
122
- await ctx.message.add_reaction(failure_emoji)
123
- #----------------------------------------------------------------------------------------------------------------------------------------------
124
- async def fullqueue(e, thread): # can improve this using jobs from gradio
125
- error_message = str(e)
126
- if "Error: Expecting value: line 1 column 1 (char 0)" in error_message:
127
- await thread.send("Queue is full! Please try again.")
128
- elif "Error: Queue is full! Please try again." in error_message:
129
- await thread.send("Queue is full! Please try again.")
130
- # raised when the space we depend on needs to build after some inactivity (~10h)
131
- elif "local variable 'stage_1_results' referenced before assignment" in error_message:
132
- await thread.send("Space is building! Please try again after a few minutes.")
133
 
134
- #----------------------------------------------------------------------------------------------------------------------------------------------
135
- # deepfloydif stage 1 generation βœ…
136
- def inference(prompt):
137
- negative_prompt = ''
138
- seed = random.randint(0, 1000)
139
- #seed = 1
140
- number_of_images = 4
141
- guidance_scale = 7
142
- custom_timesteps_1 = 'smart50'
143
- number_of_inference_steps = 50
144
-
145
- stage_1_results, stage_1_param_path, stage_1_result_path = df.predict(
146
- prompt, negative_prompt, seed, number_of_images, guidance_scale, custom_timesteps_1, number_of_inference_steps, api_name='/generate64')
147
-
148
- return [stage_1_results, stage_1_param_path, stage_1_result_path]
149
- #----------------------------------------------------------------------------------------------------------------------------------------------
150
- # deepfloydif stage 2 upscaling βœ…
151
- def inference2(index, stage_1_result_path):
152
- selected_index_for_stage_2 = index
153
- seed_2 = 0
154
- guidance_scale_2 = 4
155
- custom_timesteps_2 = 'smart50'
156
- number_of_inference_steps_2 = 50
157
- result_path = df.predict(stage_1_result_path, selected_index_for_stage_2, seed_2,
158
- guidance_scale_2, custom_timesteps_2, number_of_inference_steps_2, api_name='/upscale256')
159
-
160
- return result_path
161
- #----------------------------------------------------------------------------------------------------------------------------------------------
162
- # βœ…
163
- async def react1234(reaction_emojis, combined_image_dfif):
164
- for emoji in reaction_emojis:
165
- await combined_image_dfif.add_reaction(emoji)
166
- #----------------------------------------------------------------------------------------------------------------------------------------------
167
- # Stage 1 βœ…
168
- @bot.command()
169
- async def deepfloydif(ctx, *, prompt: str):
170
- thread = None
171
  try:
172
- try:
173
- if await safetychecks(ctx): #βœ…
174
- if ctx.channel.id == 1119313215675973714:
175
- await ctx.message.add_reaction('<a:loading:1114111677990981692>')
176
- dfif_command_message_id = ctx.message.id # we will use this in some magic later on
177
- thread = await ctx.message.create_thread(name=f'DeepfloydIF | {prompt}', auto_archive_duration=60) # could also just use prompt, no deepfloydif
178
- # create thread -> send new message inside thread + combined_image -> add reactions -> dfif2
179
-
180
- #current_time = int(time.time())
181
- #random.seed(current_time)
182
-
183
- negative_prompt = ''
184
- seed = random.randint(0, 1000)
185
- #seed = 1
186
- number_of_images = 4
187
- guidance_scale = 7
188
- custom_timesteps_1 = 'smart50'
189
- number_of_inference_steps = 50
190
- api_name = '/generate64'
191
- await thread.send(f'{ctx.author.mention}Generating images in thread, can take ~1 minute...')
192
-
193
- except Exception as e:
194
- print(f"Error: {e}")
195
- if thread is None:
196
- thread = await ctx.message.create_thread(name=f'DFIF1 Error')
197
- await thread.send(f"{ctx.author.mention} Error before stage 1 generation, {e}. If error code: 50035, upscale can still work.")
198
- await fullqueue(e, thread)
199
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
200
- await ctx.message.add_reaction('<:disagree:1098628957521313892>')
201
- await thread.edit(archived=True)
202
- #generationβœ…-------------------------------------------------------
203
- try:
204
- #stage_1_results, stage_1_param_path, stage_1_result_path = df.predict(
205
- # prompt, negative_prompt, seed, number_of_images, guidance_scale, custom_timesteps_1, number_of_inference_steps, api_name='/generate64')
206
-
207
- # run blocking function in executor
208
- #await thread.send(f'βœ…running blocking function in executor')
209
- loop = asyncio.get_running_loop()
210
- result = await loop.run_in_executor(None, inference, prompt)
211
- #await thread.send(f'{ctx.author.mention}after executor')
212
- #await thread.send(f'βœ…run_in_executor ran successfully')
213
- stage_1_results = result[0]
214
- stage_1_result_path = result[2]
215
-
216
- partialpath = stage_1_result_path[5:] #magic for later
217
-
218
- except Exception as e:
219
- print(f"Error: {e}")
220
- if thread is None:
221
- thread = await ctx.message.create_thread(name=f'Generation Error')
222
- await thread.send(f"{ctx.author.mention} Error during stage 1 generation, {e}")
223
- await fullqueue(e, thread)
224
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
225
- await ctx.message.add_reaction('<:disagree:1098628957521313892>')
226
- await thread.edit(archived=True)
227
- #posting imagesβœ…----------------------------------------------------------------
228
- try:
229
- #await thread.send(f'βœ…combining images...')
230
- #old, see: https://huggingface.co/spaces/DeepFloyd/IF/commit/fb79844b1d0b013a28ac435a36f804d8030fba50
231
- #png_files = [f for f in os.listdir(stage_1_results) if f.endswith('.png')]
232
- png_files = list(glob.glob(f"{stage_1_results}/**/*.png"))
233
-
234
- if png_files:
235
- first_png = png_files[0]
236
- second_png = png_files[1]
237
- third_png = png_files[2]
238
- fourth_png = png_files[3]
239
-
240
- first_png_path = os.path.join(stage_1_results, first_png)
241
- second_png_path = os.path.join(stage_1_results, second_png)
242
- third_png_path = os.path.join(stage_1_results, third_png)
243
- fourth_png_path = os.path.join(stage_1_results, fourth_png)
244
-
245
- img1 = Image.open(first_png_path)
246
- img2 = Image.open(second_png_path)
247
- img3 = Image.open(third_png_path)
248
- img4 = Image.open(fourth_png_path)
249
-
250
- combined_image = Image.new('RGB', (img1.width * 2, img1.height * 2))
251
-
252
- combined_image.paste(img1, (0, 0))
253
- combined_image.paste(img2, (img1.width, 0))
254
- combined_image.paste(img3, (0, img1.height))
255
- combined_image.paste(img4, (img1.width, img1.height))
256
-
257
- combined_image_path = os.path.join(stage_1_results, f'{partialpath}{dfif_command_message_id}.png')
258
- combined_image.save(combined_image_path)
259
-
260
- with open(combined_image_path, 'rb') as f:
261
- combined_image_dfif = await thread.send(f'{ctx.author.mention}React with the image number you want to upscale!', file=discord.File(
262
- f, f'{partialpath}{dfif_command_message_id}.png')) # named something like: tmpgtv4qjix1111269940599738479.png
263
-
264
- #await thread.send(f'βœ…reacting with 1234...')
265
- emoji_list = ['↖️', '↗️', '↙️', 'β†˜οΈ']
266
- await react1234(emoji_list, combined_image_dfif)
267
-
268
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
269
- await ctx.message.add_reaction('<:agree:1098629085955113011>')
270
- ''' individual images
271
- if png_files:
272
- for i, png_file in enumerate(png_files):
273
- png_file_path = os.path.join(stage_1_results, png_file)
274
- img = Image.open(png_file_path)
275
- image_path = os.path.join(stage_1_results, f'{i+1}{partialpath}.png')
276
- img.save(image_path)
277
- with open(image_path, 'rb') as f:
278
- await thread.send(f'{ctx.author.mention}Image {i+1}', file=discord.File(f, f'{i+1}{partialpath}.png'))
279
- await asyncio.sleep(1)
280
-
281
- '''
282
-
283
- except Exception as e:
284
- print(f"Error: {e}")
285
- if thread is None:
286
- thread = await ctx.message.create_thread(name=f'Posting Error')
287
- await thread.send(f"{ctx.author.mention} Encountered error while posting combined image in thread, {e}")
288
- await fullqueue(e, thread)
289
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
290
- await ctx.message.add_reaction('<:disagree:1098628957521313892>')
291
- await thread.edit(archived=True)
292
- #deepfloydif try/except
293
  except Exception as e:
294
  print(f"Error: {e}")
295
- if thread is None:
296
- thread = await ctx.message.create_thread(name=f'deepfloydif Error')
297
- await thread.send(f"{ctx.author.mention} Overall error with deepfloydif, {e}")
298
- await fullqueue(e, thread)
299
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
300
- await ctx.message.add_reaction('<:disagree:1098628957521313892>')
301
- await thread.edit(archived=True)
302
- #----------------------------------------------------------------------------------------------------------------------------
303
- # Stage 2 βœ…
304
- async def dfif2(index: int, stage_1_result_path, thread, dfif_command_message_id): # add safetychecks
305
- try:
306
- parent_channel = thread.parent
307
- dfif_command_message = await parent_channel.fetch_message(dfif_command_message_id)
308
- await dfif_command_message.remove_reaction('<:agree:1098629085955113011>', bot.user)
309
- await dfif_command_message.add_reaction('<a:loading:1114111677990981692>')
310
-
311
- number = index + 1
312
- if number == 1:
313
- position = "top left"
314
- elif number == 2:
315
- position = "top right"
316
- elif number == 3:
317
- position = "bottom left"
318
- elif number == 4:
319
- position = "bottom right"
320
- await thread.send(f"Upscaling the {position} image...")
321
-
322
- # run blocking function in executor
323
- loop = asyncio.get_running_loop()
324
- result_path = await loop.run_in_executor(None, inference2, index, stage_1_result_path)
325
 
326
- #await thread.send(f"βœ…upscale done")
327
- with open(result_path, 'rb') as f:
328
- await thread.send(f'Here is the upscaled image! :) ', file=discord.File(f, 'result.png'))
329
-
330
- await dfif_command_message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
331
- await dfif_command_message.add_reaction('<:agree:1098629085955113011>')
332
- await thread.edit(archived=True)
333
 
 
 
 
 
 
334
  except Exception as e:
335
  print(f"Error: {e}")
336
- parent_channel = thread.parent
337
- dfif_command_message = await parent_channel.fetch_message(dfif_command_message_id)
338
- await dfif_command_message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
339
- await dfif_command_message.add_reaction('<:disagree:1098628957521313892>')
340
- await thread.send(f"Error during stage 2 upscaling, {e}")
341
- await fullqueue(e, thread)
342
- await thread.edit(archived=True)
343
- #----------------------------------------------------------------------------------------------------------------------------
344
- # react detector for stage 2 βœ…
345
- @bot.event
346
- async def on_reaction_add(reaction, user): # ctx = await bot.get_context(reaction.message)? could try later, might simplify
347
- try:
348
- #ctx = await bot.get_context(reaction.message)
349
- # safety checks first βœ…
350
-
351
-
352
- if not user.bot:
353
- thread = reaction.message.channel
354
- threadparentid = thread.parent.id
355
- if threadparentid == 1119313215675973714: # testing-the-bot, should be whatever the deepfloydif channel is
356
- # 811235357663297546 = lunarflu
357
- if reaction.message.attachments:
358
- if user.id == reaction.message.mentions[0].id: # if user.id == reaction.message.mentions[0].id:
359
- # magic begins
360
- #await reaction.message.channel.send("βœ…reaction detected")
361
- attachment = reaction.message.attachments[0]
362
- image_name = attachment.filename # named something like: tmpgtv4qjix1111269940599738479.png
363
- # remove .png first
364
- partialpathmessageid = image_name[:-4] # should be tmpgtv4qjix1111269940599738479
365
- # extract partialpath, messageid
366
- partialpath = partialpathmessageid[:11] # tmpgtv4qjix
367
- messageid = partialpathmessageid[11:] # 1111269940599738479
368
- # add /tmp/ to partialpath, save as new variable
369
- fullpath = "/tmp/" + partialpath # should be /tmp/tmpgtv4qjix
370
- #await reaction.message.channel.send(f"βœ…fullpath extracted, {fullpath}")
371
- emoji = reaction.emoji
372
-
373
- if emoji == "↖️":
374
- index = 0
375
- elif emoji == "↗️":
376
- index = 1
377
- elif emoji == "↙️":
378
- index = 2
379
- elif emoji == "β†˜οΈ":
380
- index = 3
381
-
382
- #await reaction.message.channel.send(f"βœ…index extracted, {index}")
383
- index = index
384
- stage_1_result_path = fullpath
385
- thread = reaction.message.channel
386
- dfif_command_message_id = messageid
387
- ctx = await bot.get_context(reaction.message)
388
- #await reaction.message.channel.send(f"βœ…calling dfif2")
389
- await dfif2(index, stage_1_result_path, thread, dfif_command_message_id)
390
 
391
- except Exception as e:
392
- print(f"Error: {e} (known error, does not cause issues, fix later)")
393
- #----------------------------------------------------------------------------------------------------------------------------
394
- chathistory = None
395
- falcon_users = []
396
- #falcon_threads = []
397
- falcon_dictionary = {}
398
- falcon_userid_threadid_dictionary = {}
399
 
400
- @bot.command()
401
- async def falcon(ctx, *, prompt: str):
402
- # todo: need to be careful with these, rework into something simpler
 
403
  try:
404
- global falcon_users
405
- #global falcon_threads # deprecated
406
- global falcon_dictionary
407
- global falcon_userid_threadid_dictionary
408
-
409
- # dict[generation, authorid]
410
- # dict[threadid, authorid]
411
 
412
- if not ctx.author.bot:
413
- if ctx.channel.id == 1119313248056004729: # initial thread creation inside #falcon
414
- #if ctx.author.id not in falcon_users: # deprecated
415
- if ctx.author.id not in falcon_userid_threadid_dictionary:
416
- await ctx.message.add_reaction('<a:loading:1114111677990981692>')
417
- thread = await ctx.message.create_thread(name=f'{ctx.author}')
418
- #falcon_users = [ctx.author.id] + falcon_users # deprecated
419
- #falcon_threads = [thread.id] + falcon_threads # deprecated
420
- await thread.send(f"[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; The Falcon model and system prompt can be found here: https://huggingface.co/spaces/HuggingFaceH4/falcon-chat]")
421
-
422
- # initial generation here
423
- chathistory = falconclient.predict(
424
- fn_index=5
425
- ) # []
426
- instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
427
- job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
428
- while job.done() == False:
429
- status = job.status() # could be spammy, let's test anyways
430
- #print(status)
431
- else:
432
- file_paths = job.outputs()
433
- full_generation = file_paths[-1] # tmp12345678.json
434
- with open(full_generation, 'r') as file:
435
- data = json.load(file)
436
- output_text = data[-1][-1] # we output this as the bot
437
 
438
- falcon_dictionary[ctx.author.id] = full_generation # 1234567890: tmp12345678.json
439
- falcon_userid_threadid_dictionary[ctx.author.id] = thread.id
 
 
 
 
 
440
 
441
- print(output_text)
442
- await thread.send(f"{output_text}")
443
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
444
- #elif ctx.author.id in falcon_users: # deprecated
445
- elif ctx.author.id in falcon_userid_threadid_dictionary:
446
- # get the threadid, grab the last message in that thread, link to that message
447
- thread_id = falcon_userid_threadid_dictionary[ctx.author.id]
448
- thread_link = f"https://discord.com/channels/879548962464493619/1119313248056004729/{thread_id}"
449
- await ctx.reply(f"{ctx.author.mention}, you already have an existing conversation here {thread_link}! Use !falconclear in the #falcon channel to start a new one.")
450
- #------------------------------------
451
- # post all other generations here
452
- #if ctx.channel.id in falcon_threads: # deprecated
453
- if ctx.channel.id in falcon_userid_threadid_dictionary.values():
454
- if ctx.channel.id == falcon_userid_threadid_dictionary[ctx.author.id]:
455
- await ctx.message.add_reaction('<a:loading:1114111677990981692>')
456
- chathistory = falcon_dictionary[ctx.author.id]
457
 
458
- instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
459
- job = falconclient.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking, similar to run_in_executor (but better)
460
- while job.done() == False:
461
- status = job.status() # could be spammy, let's test anyways
462
- #print(status)
463
- else:
464
- file_paths = job.outputs()
465
- full_generation = file_paths[-1] # tmp12345678.json
466
- with open(full_generation, 'r') as file:
467
- data = json.load(file)
468
- output_text = data[-1][-1] # we output this as the bot
469
- falcon_dictionary[ctx.author.id] = full_generation
470
- print(output_text)
471
- await ctx.reply(f"{output_text}")
472
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
473
-
474
- except Exception as e:
475
- print(f"Error: {e}")
476
- await ctx.reply(f"{e} cc <@811235357663297546> (falconprivate error)")
477
- await ctx.message.remove_reaction('<a:loading:1114111677990981692>', bot.user)
478
- await ctx.message.add_reaction('<:disagree:1098628957521313892>')
479
- #----------------------------------------------------------------------------------------------------------------------------
480
- @bot.command()
481
- async def falconclear(ctx):
482
- if not ctx.author.bot:
483
- if ctx.channel.id == 1119313248056004729:
484
- if ctx.author.id in falcon_userid_threadid_dictionary:
485
- if ctx.author.id in falcon_dictionary:
486
- del falcon_userid_threadid_dictionary[ctx.author.id]
487
- del falcon_dictionary[ctx.author.id]
488
- await ctx.reply(f"{ctx.author.mention}'s conversation has been cleared. Feel free to start a new one!")
489
- #----------------------------------------------------------------------------------------------------------------------------
490
- # hackerllama magic to run the bot in a Hugging Face Space
491
  def run_bot():
492
- bot.run(DISCORD_TOKEN)
493
 
494
- threading.Thread(target=run_bot).start()
495
-
496
- def greet(name):
497
- return "Hello " + name + "!"
498
 
499
- demo = gr.Interface(fn=greet, inputs="text", outputs="text")
500
- #demo.queue(concurrency_count=10)
 
 
 
 
 
 
501
  demo.queue(concurrency_count=20)
502
- demo.launch()
503
- #----------------------------------------------------------------------------------------------------------------------------
 
1
  import discord
2
+ from discord import app_commands
 
3
  import gradio as gr
4
+ import os
5
+ import threading
6
+ from falcon import try_falcon
7
+ from falcon import continue_falcon
8
+ from deepfloydif import deepfloydif_stage_1
9
+ from deepfloydif import deepfloydif_stage_2_react_check
10
+
11
+ # HF GUILD SETTINGS
12
+ MY_GUILD_ID = 1077674588122648679 if os.getenv("TEST_ENV", False) else 879548962464493619
13
+ MY_GUILD = discord.Object(id=MY_GUILD_ID)
14
+ DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
 
 
 
 
 
 
 
 
 
 
 
 
15
 
 
 
 
 
16
 
17
+ class MyClient(discord.Client):
18
+ """This structure allows slash commands to work instantly."""
19
 
20
+ def __init__(self, *, intents: discord.Intents):
21
+ super().__init__(intents=intents)
22
+ self.tree = app_commands.CommandTree(self)
 
 
23
 
24
+ async def setup_hook(self):
25
+ # This copies the global commands over to our guild
26
+ self.tree.copy_global_to(guild=MY_GUILD)
27
+ await self.tree.sync(guild=MY_GUILD)
28
 
 
 
 
29
 
30
+ client = MyClient(intents=discord.Intents.all())
 
31
 
 
32
 
33
+ @client.event
 
34
  async def on_ready():
35
+ print(f"Logged in as {client.user} (ID: {client.user.id})")
36
+ print("------")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
 
39
+ @client.tree.command()
40
+ @app_commands.describe(prompt="Enter some text to chat with the bot! Like this: /falcon Hello, how are you?")
41
+ async def falcon(interaction: discord.Interaction, prompt: str):
42
+ """Command that begins a new conversation with Falcon"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  try:
44
+ await try_falcon(interaction, prompt)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  except Exception as e:
46
  print(f"Error: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
 
 
 
 
 
 
 
 
48
 
49
+ @client.event
50
+ async def on_message(message):
51
+ """Checks channel and continues Falcon conversation if it's the right Discord Thread"""
52
+ try:
53
+ await continue_falcon(message)
54
  except Exception as e:
55
  print(f"Error: {e}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
 
 
 
 
 
 
 
 
57
 
58
+ @client.tree.command()
59
+ @app_commands.describe(prompt="Enter a prompt to generate an image! Can generate realistic text, too!")
60
+ async def deepfloydif(interaction: discord.Interaction, prompt: str):
61
+ """DeepfloydIF stage 1 generation"""
62
  try:
63
+ await deepfloydif_stage_1(interaction, prompt, client)
64
+ except Exception as e:
65
+ print(f"Error: {e}")
 
 
 
 
66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
67
 
68
+ @client.event
69
+ async def on_reaction_add(reaction, user):
70
+ """Checks for a reaction in order to call dfif2"""
71
+ try:
72
+ await deepfloydif_stage_2_react_check(reaction, user)
73
+ except Exception as e:
74
+ print(f"Error: {e} (known error, does not cause issues, low priority)")
75
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
  def run_bot():
78
+ client.run(DISCORD_TOKEN)
79
 
 
 
 
 
80
 
81
+ threading.Thread(target=run_bot).start()
82
+ """This allows us to run the Discord bot in a Python thread"""
83
+ with gr.Blocks() as demo:
84
+ gr.Markdown("""
85
+ # Huggingbots Server
86
+ This space hosts the huggingbots discord bot.
87
+ Currently supported models are Falcon and DeepfloydIF
88
+ """)
89
  demo.queue(concurrency_count=20)
90
+ demo.launch()