lunarflu HF Staff commited on
Commit
58c201b
·
1 Parent(s): 2b1e726

Synced repo using 'sync_with_huggingface' Github Action

Browse files
Files changed (7) hide show
  1. Makefile +10 -0
  2. app.py +41 -24
  3. deepfloydif.py +15 -15
  4. falcon.py +11 -15
  5. musicgen.py +76 -0
  6. pyproject.toml +7 -0
  7. requirements.txt +1 -1
Makefile ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ .PHONY: quality style
2
+
3
+ check_dirs := app.py deepfloydif.py falcon.py
4
+
5
+ quality:
6
+ black --check $(check_dirs)
7
+ ruff $(check_dirs)
8
+ style:
9
+ black $(check_dirs)
10
+ ruff $(check_dirs) --fix
app.py CHANGED
@@ -1,12 +1,13 @@
1
- import discord
2
- from discord import app_commands
3
- import gradio as gr
4
  import os
5
  import threading
6
- from falcon import try_falcon
7
- from falcon import continue_falcon
8
- from deepfloydif import deepfloydif_stage_1
9
- from deepfloydif import deepfloydif_stage_2_react_check
 
 
 
 
10
 
11
  # HF GUILD SETTINGS
12
  MY_GUILD_ID = 1077674588122648679 if os.getenv("TEST_ENV", False) else 879548962464493619
@@ -14,20 +15,18 @@ MY_GUILD = discord.Object(id=MY_GUILD_ID)
14
  DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
15
 
16
 
17
- class MyClient(discord.Client):
18
  """This structure allows slash commands to work instantly."""
19
 
20
- def __init__(self, *, intents: discord.Intents):
21
- super().__init__(intents=intents)
22
- self.tree = app_commands.CommandTree(self)
23
 
24
  async def setup_hook(self):
25
- # This copies the global commands over to our guild
26
- self.tree.copy_global_to(guild=MY_GUILD)
27
- await self.tree.sync(guild=MY_GUILD)
28
 
29
 
30
- client = MyClient(intents=discord.Intents.all())
31
 
32
 
33
  @client.event
@@ -36,12 +35,16 @@ async def on_ready():
36
  print("------")
37
 
38
 
39
- @client.tree.command()
40
- @app_commands.describe(prompt="Enter some text to chat with the bot! Like this: /falcon Hello, how are you?")
41
- async def falcon(interaction: discord.Interaction, prompt: str):
 
 
 
 
42
  """Command that begins a new conversation with Falcon"""
43
  try:
44
- await try_falcon(interaction, prompt)
45
  except Exception as e:
46
  print(f"Error: {e}")
47
 
@@ -55,12 +58,26 @@ async def on_message(message):
55
  print(f"Error: {e}")
56
 
57
 
58
- @client.tree.command()
59
- @app_commands.describe(prompt="Enter a prompt to generate an image! Can generate realistic text, too!")
60
- async def deepfloydif(interaction: discord.Interaction, prompt: str):
 
 
 
 
61
  """DeepfloydIF stage 1 generation"""
62
  try:
63
- await deepfloydif_stage_1(interaction, prompt, client)
 
 
 
 
 
 
 
 
 
 
64
  except Exception as e:
65
  print(f"Error: {e}")
66
 
@@ -87,4 +104,4 @@ with gr.Blocks() as demo:
87
  Currently supported models are Falcon and DeepfloydIF
88
  """)
89
  demo.queue(concurrency_count=20)
90
- demo.launch()
 
 
 
 
1
  import os
2
  import threading
3
+
4
+ import discord
5
+ import gradio as gr
6
+ from deepfloydif import deepfloydif_stage_1, deepfloydif_stage_2_react_check
7
+ from discord import app_commands
8
+ from discord.ext import commands
9
+ from falcon import continue_falcon, try_falcon
10
+ from musicgen import music_create
11
 
12
  # HF GUILD SETTINGS
13
  MY_GUILD_ID = 1077674588122648679 if os.getenv("TEST_ENV", False) else 879548962464493619
 
15
  DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
16
 
17
 
18
+ class Bot(commands.Bot):
19
  """This structure allows slash commands to work instantly."""
20
 
21
+ def __init__(self):
22
+ super().__init__(command_prefix="/", intents=discord.Intents.all())
 
23
 
24
  async def setup_hook(self):
25
+ await self.tree.sync(guild=discord.Object(MY_GUILD_ID))
26
+ print(f"Synced slash commands for {self.user}.")
 
27
 
28
 
29
+ client = Bot()
30
 
31
 
32
  @client.event
 
35
  print("------")
36
 
37
 
38
+ @client.hybrid_command(
39
+ name="falcon",
40
+ with_app_command=True,
41
+ description="Enter some text to chat with the bot! Like this: /falcon Hello, how are you?",
42
+ )
43
+ @app_commands.guilds(MY_GUILD)
44
+ async def falcon(ctx, prompt: str):
45
  """Command that begins a new conversation with Falcon"""
46
  try:
47
+ await try_falcon(ctx, prompt)
48
  except Exception as e:
49
  print(f"Error: {e}")
50
 
 
58
  print(f"Error: {e}")
59
 
60
 
61
+ @client.hybrid_command(
62
+ name="deepfloydif",
63
+ with_app_command=True,
64
+ description="Enter a prompt to generate an image! Can generate realistic text, too!",
65
+ )
66
+ @app_commands.guilds(MY_GUILD)
67
+ async def deepfloydif(ctx, prompt: str):
68
  """DeepfloydIF stage 1 generation"""
69
  try:
70
+ await deepfloydif_stage_1(ctx, prompt, client)
71
+ except Exception as e:
72
+ print(f"Error: {e}")
73
+
74
+
75
+ @client.hybrid_command(name="musicgen", with_app_command=True, description="Enter a prompt to generate music!")
76
+ @app_commands.guilds(MY_GUILD)
77
+ async def musicgen(ctx, prompt: str):
78
+ """Generates music based on a prompt"""
79
+ try:
80
+ await music_create(ctx, prompt)
81
  except Exception as e:
82
  print(f"Error: {e}")
83
 
 
104
  Currently supported models are Falcon and DeepfloydIF
105
  """)
106
  demo.queue(concurrency_count=20)
107
+ demo.launch()
deepfloydif.py CHANGED
@@ -1,11 +1,12 @@
1
- import discord
2
- from gradio_client import Client
3
- import os
4
- import random
5
- from PIL import Image
6
  import asyncio
7
  import glob
 
8
  import pathlib
 
 
 
 
 
9
 
10
  HF_TOKEN = os.getenv("HF_TOKEN")
11
  deepfloydif_client = Client("huggingface-projects/IF", HF_TOKEN)
@@ -87,23 +88,21 @@ def combine_images(png_files, stage_1_images, partial_path):
87
  return combined_image_path
88
 
89
 
90
- async def deepfloydif_stage_1(interaction, prompt, client):
91
  """DeepfloydIF command (generate images with realistic text using slash commands)"""
92
  try:
93
- if interaction.user.id != BOT_USER_ID:
94
- if interaction.channel.id == DEEPFLOYDIF_CHANNEL_ID:
95
  if os.environ.get("TEST_ENV") == "True":
96
  print("Safety checks passed for deepfloydif_stage_1")
97
- await interaction.response.send_message("Working on it!")
98
- channel = interaction.channel
99
  # interaction.response message can't be used to create a thread, so we create another message
100
- message = await channel.send("DeepfloydIF Thread")
101
  thread = await message.create_thread(name=f"{prompt}", auto_archive_duration=60)
102
  await thread.send(
103
  "[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; Additional information on the"
104
  " DeepfloydIF model can be found here: https://huggingface.co/spaces/DeepFloyd/IF"
105
  )
106
- await thread.send(f"{interaction.user.mention} Generating images in thread, can take ~1 minute...")
107
 
108
  loop = asyncio.get_running_loop()
109
  result = await loop.run_in_executor(None, deepfloydif_stage_1_inference, prompt)
@@ -119,13 +118,14 @@ async def deepfloydif_stage_1(interaction, prompt, client):
119
  print("Images combined for deepfloydif_stage_1")
120
  with open(combined_image_path, "rb") as f:
121
  combined_image_dfif = await thread.send(
122
- f"{interaction.user.mention} React with the image number you want to upscale!",
123
  file=discord.File(f, f"{partial_path}.png"),
124
  )
125
  emoji_list = ["↖️", "↗️", "↙️", "↘️"]
126
  await react_1234(emoji_list, combined_image_dfif)
127
  else:
128
- await thread.send(f"{interaction.user.mention} No PNG files were found, cannot post them!")
 
129
  except Exception as e:
130
  print(f"Error: {e}")
131
 
@@ -192,4 +192,4 @@ async def deepfloydif_stage_2(index: int, path_for_stage_2_upscaling, thread):
192
  await thread.send("Here is the upscaled image!", file=discord.File(f, "result.png"))
193
  await thread.edit(archived=True)
194
  except Exception as e:
195
- print(f"Error: {e}")
 
 
 
 
 
 
1
  import asyncio
2
  import glob
3
+ import os
4
  import pathlib
5
+ import random
6
+
7
+ import discord
8
+ from gradio_client import Client
9
+ from PIL import Image
10
 
11
  HF_TOKEN = os.getenv("HF_TOKEN")
12
  deepfloydif_client = Client("huggingface-projects/IF", HF_TOKEN)
 
88
  return combined_image_path
89
 
90
 
91
+ async def deepfloydif_stage_1(ctx, prompt, client):
92
  """DeepfloydIF command (generate images with realistic text using slash commands)"""
93
  try:
94
+ if ctx.author.id != BOT_USER_ID:
95
+ if ctx.channel.id == DEEPFLOYDIF_CHANNEL_ID:
96
  if os.environ.get("TEST_ENV") == "True":
97
  print("Safety checks passed for deepfloydif_stage_1")
 
 
98
  # interaction.response message can't be used to create a thread, so we create another message
99
+ message = await ctx.send(f"**{prompt}** - {ctx.author.mention}")
100
  thread = await message.create_thread(name=f"{prompt}", auto_archive_duration=60)
101
  await thread.send(
102
  "[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; Additional information on the"
103
  " DeepfloydIF model can be found here: https://huggingface.co/spaces/DeepFloyd/IF"
104
  )
105
+ await thread.send(f"{ctx.author.mention} Generating images in thread, can take ~1 minute...")
106
 
107
  loop = asyncio.get_running_loop()
108
  result = await loop.run_in_executor(None, deepfloydif_stage_1_inference, prompt)
 
118
  print("Images combined for deepfloydif_stage_1")
119
  with open(combined_image_path, "rb") as f:
120
  combined_image_dfif = await thread.send(
121
+ f"{ctx.author.mention} React with the image number you want to upscale!",
122
  file=discord.File(f, f"{partial_path}.png"),
123
  )
124
  emoji_list = ["↖️", "↗️", "↙️", "↘️"]
125
  await react_1234(emoji_list, combined_image_dfif)
126
  else:
127
+ await thread.send(f"{ctx.author.mention} No PNG files were found, cannot post them!")
128
+
129
  except Exception as e:
130
  print(f"Error: {e}")
131
 
 
192
  await thread.send("Here is the upscaled image!", file=discord.File(f, "result.png"))
193
  await thread.edit(archived=True)
194
  except Exception as e:
195
+ print(f"Error: {e}")
falcon.py CHANGED
@@ -1,7 +1,8 @@
1
- from gradio_client import Client
2
- import os
3
  import asyncio
4
  import json
 
 
 
5
 
6
  HF_TOKEN = os.getenv("HF_TOKEN")
7
  falcon_userid_threadid_dictionary = {}
@@ -56,19 +57,17 @@ def falcon_initial_generation(prompt, instructions, thread):
56
  return output_text
57
 
58
 
59
- async def try_falcon(interaction, prompt):
60
  """Generates text based on a given prompt"""
61
  try:
62
  global falcon_userid_threadid_dictionary # tracks userid-thread existence
63
  global threadid_conversation
64
 
65
- if interaction.user.id != BOT_USER_ID:
66
- if interaction.channel.id == FALCON_CHANNEL_ID:
67
  if os.environ.get("TEST_ENV") == "True":
68
  print("Safetychecks passed for try_falcon")
69
- await interaction.response.send_message("Working on it!")
70
- channel = interaction.channel
71
- message = await channel.send("Creating thread...")
72
  thread = await message.create_thread(name=prompt, auto_archive_duration=60) # interaction.user
73
  await thread.send(
74
  "[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; The Falcon model and system"
@@ -79,7 +78,7 @@ async def try_falcon(interaction, prompt):
79
  print("Running falcon_initial_generation...")
80
  loop = asyncio.get_running_loop()
81
  output_text = await loop.run_in_executor(None, falcon_initial_generation, prompt, instructions, thread)
82
- falcon_userid_threadid_dictionary[thread.id] = interaction.user.id
83
 
84
  await thread.send(output_text)
85
  except Exception as e:
@@ -92,11 +91,8 @@ async def continue_falcon(message):
92
  if not message.author.bot:
93
  global falcon_userid_threadid_dictionary # tracks userid-thread existence
94
  if message.channel.id in falcon_userid_threadid_dictionary: # is this a valid thread?
95
- if (
96
- falcon_userid_threadid_dictionary[message.channel.id] == message.author.id
97
- ): # more than that - is this specifically the right user for this thread?
98
- if os.environ.get("TEST_ENV") == "True":
99
- print("Safetychecks passed for continue_falcon")
100
  global instructions
101
  global threadid_conversation
102
  await message.add_reaction("🔁")
@@ -133,4 +129,4 @@ async def continue_falcon(message):
133
  await message.reply(output_text)
134
  except Exception as e:
135
  print(f"continue_falcon Error: {e}")
136
- await message.reply(f"Error: {e} <@811235357663297546> (continue_falcon error)")
 
 
 
1
  import asyncio
2
  import json
3
+ import os
4
+
5
+ from gradio_client import Client
6
 
7
  HF_TOKEN = os.getenv("HF_TOKEN")
8
  falcon_userid_threadid_dictionary = {}
 
57
  return output_text
58
 
59
 
60
+ async def try_falcon(ctx, prompt):
61
  """Generates text based on a given prompt"""
62
  try:
63
  global falcon_userid_threadid_dictionary # tracks userid-thread existence
64
  global threadid_conversation
65
 
66
+ if ctx.author.id != BOT_USER_ID:
67
+ if ctx.channel.id == FALCON_CHANNEL_ID:
68
  if os.environ.get("TEST_ENV") == "True":
69
  print("Safetychecks passed for try_falcon")
70
+ message = await ctx.send(f"**{prompt}** - {ctx.author.mention}")
 
 
71
  thread = await message.create_thread(name=prompt, auto_archive_duration=60) # interaction.user
72
  await thread.send(
73
  "[DISCLAIMER: HuggingBot is a **highly experimental** beta feature; The Falcon model and system"
 
78
  print("Running falcon_initial_generation...")
79
  loop = asyncio.get_running_loop()
80
  output_text = await loop.run_in_executor(None, falcon_initial_generation, prompt, instructions, thread)
81
+ falcon_userid_threadid_dictionary[thread.id] = ctx.author.id
82
 
83
  await thread.send(output_text)
84
  except Exception as e:
 
91
  if not message.author.bot:
92
  global falcon_userid_threadid_dictionary # tracks userid-thread existence
93
  if message.channel.id in falcon_userid_threadid_dictionary: # is this a valid thread?
94
+ if falcon_userid_threadid_dictionary[message.channel.id] == message.author.id:
95
+ print("Safetychecks passed for continue_falcon")
 
 
 
96
  global instructions
97
  global threadid_conversation
98
  await message.add_reaction("🔁")
 
129
  await message.reply(output_text)
130
  except Exception as e:
131
  print(f"continue_falcon Error: {e}")
132
+ await message.reply(f"Error: {e} <@811235357663297546> (continue_falcon error)")
musicgen.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import asyncio
2
+ import os
3
+
4
+ import discord
5
+ from gradio_client import Client
6
+
7
+ BOT_USER_ID = 1102236653545861151 # real
8
+ MUSIC_CHANNEL_ID = 1140990231730987058 # real
9
+
10
+ musicgen = Client("huggingface-projects/transformers-musicgen", hf_token=os.getenv("HF_TOKEN"))
11
+
12
+
13
+ def music_create_job(prompt):
14
+ """Generates music based on a given prompt"""
15
+ try:
16
+ job = musicgen.submit(prompt, api_name="/predict")
17
+ while not job.done():
18
+ pass
19
+ files = job.outputs()
20
+ files = files[0]
21
+
22
+ return files
23
+
24
+ except Exception as e:
25
+ print(f"music_create_job Error: {e}")
26
+
27
+
28
+ async def music_create(ctx, prompt):
29
+ """Runs music_create_job in executor"""
30
+ try:
31
+ if ctx.author.id != BOT_USER_ID:
32
+ if ctx.channel.id == MUSIC_CHANNEL_ID:
33
+ if os.environ.get("TEST_ENV") == "True":
34
+ print("Safetychecks passed for music_create")
35
+
36
+ message = await ctx.send(f"**{prompt}** - {ctx.author.mention}")
37
+ thread = await message.create_thread(name=prompt, auto_archive_duration=60)
38
+
39
+ await thread.send(
40
+ "[DISCLAIMER: HuggingBot is a beta feature; The MusicGen"
41
+ " model can be found here: https://huggingface.co/spaces/facebook/MusicGen]"
42
+ )
43
+ if os.environ.get("TEST_ENV") == "True":
44
+ print("Running music_create_job...")
45
+
46
+ loop = asyncio.get_running_loop()
47
+ files = await loop.run_in_executor(None, music_create_job, prompt)
48
+
49
+ audio, video = files[0], files[1]
50
+
51
+ with open(audio, "rb") as file:
52
+ discord_file = discord.File(file)
53
+ await thread.send(file=discord_file)
54
+
55
+ with open(video, "rb") as file:
56
+ discord_file = discord.File(file)
57
+ await thread.send(file=discord_file)
58
+
59
+ embed = discord.Embed()
60
+ embed.set_thumbnail(url="https://abs.twimg.com/icons/apple-touch-icon-192x192.png")
61
+ tweet1 = "https://twitter.com/intent/tweet?text="
62
+ tweet2 = "I%20generated%20this%20audio%20using%20MusicGen"
63
+ tweet3 = "%20in%20the%20%F0%9F%A4%97%20@huggingface%20Discord!"
64
+ tweet4 = "%0Ahf.co/join/discord%0A%0APrompt:%20"
65
+ prompt = prompt.replace(" ", "%20")
66
+ intent_link = f"{tweet1}{tweet2}{tweet3}{tweet4}{prompt}"
67
+ embed.add_field(
68
+ name="Twitter",
69
+ value=f"[Share it!]({intent_link})",
70
+ inline=True,
71
+ )
72
+
73
+ await thread.send(embed=embed)
74
+
75
+ except Exception as e:
76
+ print(f"music_create Error: {e}")
pyproject.toml ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ [tool.black]
2
+ line-length = 119
3
+ target_version = ['py37', 'py38', 'py39', 'py310']
4
+ preview = true
5
+
6
+ [tool.ruff]
7
+ line-length = 119
requirements.txt CHANGED
@@ -1,2 +1,2 @@
1
  discord.py
2
- gradio
 
1
  discord.py
2
+ gradio