broadfield-dev commited on
Commit
df534fc
·
verified ·
1 Parent(s): 40e3c5a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +299 -0
app.py ADDED
@@ -0,0 +1,299 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from huggingface_hub import InferenceClient, HfApi, HfFileSystem
2
+ import gradio as gr
3
+ import requests
4
+ import random
5
+ import prompts
6
+ import uuid
7
+ import json
8
+ import re
9
+ import os
10
+ fs = HfFileSystem()
11
+
12
+ loc_folder="chat_history"
13
+ loc_file="chat_json"
14
+ user_="community-pool/"
15
+ repo_="test3"
16
+ clients = [
17
+ {'type':'image','name':'black-forest-labs/FLUX.1-dev','rank':'op','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
18
+ {'type':'text','name':'deepseek-ai/DeepSeek-V2.5-1210','rank':'op','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
19
+ {'type':'text','name':'Qwen/Qwen2.5-Coder-32B-Instruct','rank':'op','max_tokens':32768,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
20
+ {'type':'text','name':'meta-llama/Meta-Llama-3-8B','rank':'op','max_tokens':32768,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
21
+ {'type':'text','name':'Snowflake/snowflake-arctic-embed-l-v2.0','rank':'op','max_tokens':4096,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
22
+ {'type':'text','name':'Snowflake/snowflake-arctic-embed-m-v2.0','rank':'op','max_tokens':4096,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
23
+ {'type':'text','name':'HuggingFaceTB/SmolLM2-1.7B-Instruct','rank':'op','max_tokens':4096,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
24
+ {'type':'text','name':'Qwen/QwQ-32B-Preview','rank':'op','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
25
+ {'type':'text','name':'meta-llama/Llama-3.3-70B-Instruct','rank':'pro','max_tokens':16384,'schema':{'bos':'<|im_start|>','eos':'<|im_end|>'}},
26
+ {'type':'text','name':'mistralai/Mixtral-8x7B-Instruct-v0.1','rank':'op','max_tokens':40000,'schema':{'bos':'<s>','eos':'</s>'}},
27
+ ]
28
+ def file_template(inp):
29
+ if "readme.md" in inp.lower():
30
+ template=prompts.README
31
+ else:template="NONE"
32
+ return template
33
+ def format_prompt(message, mod, system):
34
+ eos=f"{clients[int(mod)]['schema']['eos']}\n"
35
+ bos=f"{clients[int(mod)]['schema']['bos']}\n"
36
+ prompt=""
37
+ prompt+=bos
38
+ prompt+=system
39
+ prompt+=eos
40
+ prompt+=bos
41
+ prompt += f"[INST] {message} [/INST]"
42
+ prompt+=eos
43
+ prompt+=bos
44
+ return prompt
45
+ def generate(prompt,history,mod=2,tok=4000,seed=1,role="ASSISTANT",data=None):
46
+ #print("#####",history,"######")
47
+
48
+ gen_images=False
49
+ client=InferenceClient(clients[int(mod)]['name'])
50
+ client_tok=clients[int(mod)]['max_tokens']
51
+ good_seed=[947385642222,7482965345792,8584806344673]
52
+
53
+ if not os.path.isdir(loc_folder):os.mkdir(loc_folder)
54
+
55
+ if os.path.isfile(f'{loc_folder}/{loc_file}.json'):
56
+ with open(f'{loc_folder}/{loc_file}.json','r') as word_dict:
57
+ lod=json.loads(word_dict.read())
58
+ word_dict.close()
59
+ else:
60
+ lod=[]
61
+ if role == "MANAGER":
62
+ system_prompt = prompts.MANAGER.replace("**TIMELINE**",data[4]).replace("**HISTORY**",str(history))
63
+ formatted_prompt = format_prompt(prompt, mod, system_prompt)
64
+ elif role == "PATHMAKER":
65
+ system_prompt = prompts.PATH_MAKER.replace("**FILE_LIST**",str(data[3])).replace("**CURRENT_OR_NONE**",str(data[4])).replace("**PROMPT**",json.dumps(data[0],indent=4)).replace("**HISTORY**",str(history))
66
+ formatted_prompt = format_prompt(prompt, mod, system_prompt)
67
+ elif role == "CREATE_FILE":
68
+ system_prompt = prompts.CREATE_FILE.replace("**FILE_LIST**",str(data[3])).replace("**TIMELINE**",data[4]).replace("**FILENAME**",str(data[1])).replace("**TEMPLATE_OR_NONE**",str(data[2]))
69
+ formatted_prompt = format_prompt(prompt, mod, system_prompt)
70
+ elif role == "SEARCH":
71
+ system_prompt = prompts.SEARCH.replace("**DATA**",data)
72
+ formatted_prompt = format_prompt(f'USER:{prompt}', mod, system_prompt)
73
+ else: system_prompt = "";formatted_prompt = format_prompt(f'USER:{prompt}', mod, system_prompt)
74
+
75
+ if tok==None:tok=client_tok-len(formatted_prompt)+10
76
+ print("tok",tok)
77
+ generate_kwargs = dict(
78
+ temperature=0.9,
79
+ max_new_tokens=tok, #total tokens - input tokens
80
+ top_p=0.99,
81
+ repetition_penalty=1.0,
82
+ do_sample=True,
83
+ seed=seed,
84
+ )
85
+ output = ""
86
+ if role=="MANAGER":
87
+ print("Running Manager")
88
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
89
+ for response in stream:
90
+ output += response.token.text
91
+ yield output
92
+ yield history
93
+ yield prompt
94
+
95
+ elif role=="PATHMAKER":
96
+ print("Runnning ", role)
97
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
98
+ #prompt=f"We just completed role:{role}, now choose the next tool to complete the task:{prompt}, or COMPLETE"
99
+ for response in stream:
100
+ output += response.token.text
101
+ print(output)
102
+ yield output
103
+ yield history
104
+ yield prompt
105
+
106
+ elif role=="CREATE_FILE":
107
+ print("Running Create File")
108
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=True)
109
+ for response in stream:
110
+ output += response.token.text
111
+
112
+ #print(file_content)
113
+ print(output)
114
+ yield 'test1'
115
+ yield data[1]
116
+ yield output
117
+
118
+ def parse_json(inp):
119
+ print("PARSE INPUT")
120
+ print(inp)
121
+ if type(inp)==type(""):
122
+ lines=""
123
+ if "```" in inp:
124
+ start = inp.find("```json") + 7
125
+ end = inp.find("```", start)
126
+ if start >= 0 and end >= 0:
127
+ inp= inp[start:end]
128
+ else:
129
+ inp="NONE"
130
+ print("Extracted Lines")
131
+ print(inp)
132
+ try:
133
+ out_json=eval(inp)
134
+ out1=str(out_json['filename'])
135
+ out2=str(out_json['filecontent'])
136
+ return out1,out2
137
+ except Exception as e:
138
+ print(e)
139
+ return "None","None"
140
+ if type(inp)==type({}):
141
+ out1=str(inp['filename'])
142
+ out2=str(inp['filecontent'])
143
+ return out1,out2
144
+ def build_space(repo_name,file_name,file_content,access_token=""):
145
+ try:
146
+ repo_path=user_+str(repo_)
147
+ if not access_token:
148
+ #access_token=os.environ['HF_TOKEN']
149
+ return [{'role':'assistant','content': 'ENTER A HUGGINGFACE TOKEN'}]
150
+ api=HfApi(endpoint="https://huggingface.co", token=access_token)
151
+ repo_url = api.create_repo(
152
+ repo_id=repo_path,
153
+ repo_type="space",
154
+ space_sdk="gradio",
155
+ exist_ok=True,
156
+ private=False,
157
+ )
158
+
159
+ local_file_path=str(uuid.uuid4())
160
+ with open(local_file_path, 'w') as f:
161
+ f.write(str(file_content))
162
+ f.close()
163
+ # Upload a local file to the Space
164
+ commit_message = "Adding file test: "+ str(file_name)
165
+
166
+ api.upload_file(path_or_fileobj=local_file_path, path_in_repo=file_name, repo_id=repo_path, repo_type='space', commit_message=commit_message)
167
+ print("File uploaded successfully.")
168
+ # Commit changes
169
+ commit_message += "\nInitial commit to the repository."+ f'{repo_path}/' + f'{file_name}'
170
+ #api.commit_repo(space_id, message=commit_message)
171
+ return [{'role':'assistant','content': commit_message+'\nCommit Success' }]
172
+ except Exception as e:
173
+ print("ERROR ",e)
174
+ return [{'role':'assistant','content': 'There was an Error: ' + str(e)}]
175
+
176
+
177
+ def agent(prompt_in,history,mod=2,tok_in=""):
178
+ print(prompt_in)
179
+ print('mod ',mod)
180
+ in_data=[None,None,None,None,None,]
181
+ #in_data[0]=prompt_in['text']
182
+ in_data[0]=prompt_in
183
+ prompt=prompt_in
184
+ fn=""
185
+ com=""
186
+ go=True
187
+ MAX_DATA=int(clients[int(mod)]['max_tokens'])*2
188
+ if not history:history=[{'role':'user','content':prompt_in['text']}]
189
+ while go == True:
190
+ try:
191
+ file_list = fs.ls(f'spaces/{user_}{repo_}',detail=False)
192
+ except Exception as e:
193
+ print(e)
194
+ file_list=["NO FILES YET"]
195
+ print('file list\n',file_list)
196
+
197
+
198
+ seed = random.randint(1,9999999999999)
199
+ c=0
200
+ #history = [history[-4:]]
201
+ if len(str(history)) > MAX_DATA*4:
202
+ history = [history[-2:]]
203
+ print('history',history)
204
+ role="PATHMAKER"
205
+ in_data[3]=file_list
206
+ outph= list(generate(prompt,history,mod,2400,seed,role,in_data))[0]
207
+
208
+ in_data[4]=outph
209
+ print(outph)
210
+ history.extend([{'role':'assistant','content':str(outph)}])
211
+ yield history
212
+ role="MANAGER"
213
+ outp=generate(prompt,history,mod,128,seed,role,in_data)
214
+ outp0=list(outp)[0].split('<|im_end|>')[0]
215
+ #outp0 = re.sub('[^a-zA-Z0-9\s.,?!%()]', '', outpp)
216
+ history.extend([{'role':'assistant','content':str(outp0)}])
217
+ yield history
218
+ for line in outp0.split("\n"):
219
+ if "action:" in line:
220
+ try:
221
+ com_line = line.split('action:')[1]
222
+ fn = com_line.split('action_input=')[0]
223
+ com = com_line.split('action_input=')[1].split('<|im_end|>')[0]
224
+ #com = com_line.split('action_input=')[1].replace('<|im_end|>','').replace("}","").replace("]","").replace("'","")
225
+ print(com)
226
+ except Exception as e:
227
+ pass
228
+ fn="NONE"
229
+
230
+ if 'CREATE_FILE' in fn:
231
+ print('CREATE_FILE called')
232
+ in_data[1]=com
233
+ temp1=file_template(com)
234
+ in_data[2]=temp1
235
+ in_data[3]=file_list
236
+ out_o =generate(prompt,history,mod=mod,tok=10000,seed=seed,role="CREATE_FILE",data=in_data)
237
+ out_w=list(out_o)
238
+ ret1,ret2 = parse_json(out_w[2].split('<|im_end|>')[0])
239
+ print('ret1',ret1)
240
+ print('ret2',ret2)
241
+ build_out = build_space(repo_,ret1,ret2,access_token=tok_in)
242
+ if build_out[0]["content"]=="ENTER A HUGGINGFACE TOKEN":
243
+ yield [{'role':'assistant','content':"ENTER A HUGGINGFACE TOKEN"}]
244
+ go=False
245
+ break
246
+ history+=[{'role':'system','content':f'observation:{build_out}'}]
247
+ yield history
248
+
249
+ elif 'READ_FILE' in fn:
250
+ try:
251
+ file_read = fs.read_text(f'spaces/{user_}{repo_}/{com}',detail=False)
252
+ except Exception as e:
253
+ print(e)
254
+ file_read="FILE HAS NO CONTENT"
255
+ print('file list\n',file_read)
256
+ history+=[{'role':'system','content':f'RETURNED FILE CONTENT: NAME: spaces/{user_}{repo_}/{com} CONTENT:{build_out}'}]
257
+ yield history
258
+ elif 'IMAGE' in fn:
259
+ print('IMAGE called')
260
+ #out_im=gen_im(prompt,seed)
261
+ #yield [{'role':'assistant','content': out_im}]
262
+ elif 'SEARCH' in fn:
263
+ print('SEARCH called')
264
+ elif 'COMPLETE' in fn:
265
+ print('COMPLETE')
266
+ go=False
267
+ break
268
+ elif 'NONE' in fn:
269
+ print('ERROR ACTION NOT FOUND')
270
+ history+=[{'role':'system','content':f'observation:The last thing we attempted resulted in an error, check formatting on the tool call'}]
271
+ else:pass;seed = random.randint(1,9999999999999)
272
+
273
+ with gr.Blocks() as ux:
274
+ with gr.Row():
275
+ with gr.Column():
276
+ gr.HTML("""<center><div style='font-size:xx-large;font-weight:900;'>Chatbo</div><br>
277
+ <div style='font-size:large;font-weight:700;'>This will make changes to your Huggingface File System</div><br>
278
+ <div style='font-size:large;font-weight:900;'>Use at your own risk!</div><br>
279
+ """)
280
+ chatbot=gr.Chatbot(type='messages',show_label=False, show_share_button=False, show_copy_button=True, layout="panel")
281
+ prompt=gr.MultimodalTextbox(label="Prompt",file_count="multiple", file_types=["image"])
282
+ mod_c=gr.Dropdown(choices=[n['name'] for n in clients],value='Qwen/Qwen2.5-Coder-32B-Instruct',type='index')
283
+ tok_in=gr.Textbox(label='HF TOKEN')
284
+ #chat_ux=gr.ChatInterface(fn=agent,chatbot=chatbot,additional_inputs=[mod_c]).load()
285
+ #chat_ux.additional_inputs=[mod_c]
286
+ #chat_ux.load()
287
+ with gr.Row():
288
+ submit_b = gr.Button()
289
+ stop_b = gr.Button("Stop")
290
+ clear = gr.ClearButton([chatbot,prompt])
291
+ with gr.Row(visible=False):
292
+ stt=gr.Textbox()
293
+ with gr.Column():
294
+ gr.HTML()
295
+ #html_view=gr.HTML("""<iframe src='https://huggingface.co/spaces/community-pool/test1/tree/main' height='1000' width='200'>Viewer Space</iframe>""")
296
+ sub_b = submit_b.click(agent, [prompt,chatbot,mod_c,tok_in],chatbot)
297
+ sub_p = prompt.submit(agent, [prompt,chatbot,mod_c,tok_in],chatbot)
298
+ stop_b.click(None,None,None, cancels=[sub_b,sub_p])
299
+ ux.queue(default_concurrency_limit=20).launch(max_threads=40)