lunarflu HF Staff commited on
Commit
66e4610
·
1 Parent(s): 35d7ae2

[falcon] memory update

Browse files
Files changed (1) hide show
  1. app.py +33 -55
app.py CHANGED
@@ -563,69 +563,47 @@ async def on_reaction_add(reaction, user): # ctx = await bot.get_context(reac
563
 
564
 
565
  #----------------------------------------------------------------------------------------------------------------------------
 
 
566
  @bot.command()
567
  async def falcon(ctx, *, prompt: str):
568
  try:
569
  if await safetychecks(ctx):
570
  if ctx.channel.id == 1116089829147557999:
571
-
572
-
573
- result = falconspace.predict(fn_index=5) # result will be something like = /tmp/tmpjmz7f_9u.json, which will contain []
574
-
575
- instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. The conversation may begin with a unique UUID, and if it does, the UUID can be ignored. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
576
- randomuuid = str(uuid.uuid4())
577
- prompt = randomuuid + " " + prompt
578
 
579
- #fn 5 -> fn 1
580
- # will give something like = /tmp/tmpklf60u2h.json, which will contain something like [["What day is today?", "Today"]] (prompt, generation)
581
- job = falconspace.submit(prompt, result, instructions, 0.8, 0.9, fn_index=1) # This is not blocking
582
- while job.done() == False:
583
- status = job.status()
584
- print(status)
585
- time.sleep(0.1)
586
- if job.done() == True:
587
- print(job.result())
588
-
589
- directory = '/tmp/'
590
-
591
- max_length = 0
592
- max_length_file = None
593
-
594
- for filename in os.listdir(directory):
595
- if filename.endswith('.json'):
596
- filepath = os.path.join(directory, filename)
597
- with open(filepath, 'r') as file:
598
- data = json.load(file)
599
- for item in data:
600
- if randomuuid in item[0]: # uuid should be in first item
601
- text = item[1] if len(item) > 1 else '' # if there is no item[1], it won't work
602
- length = len(text)
603
- if length > max_length: # can just pass the text variable to the discord bot
604
- outputtext = text
605
- max_length = length
606
- max_length_file = filepath # may not be needed
607
-
608
- if max_length_file is not None:
609
- print(f"The JSON file '{max_length_file}' contains the largest amount of text after the UUID.")
610
- await ctx.reply(f"{outputtext}")
611
- await ctx.message.add_reaction('<:agree:1098629085955113011>')
612
  else:
613
- print("No JSON file containing the UUID was found.")
614
- await ctx.message.add_reaction('<:disagree:1098628957521313892>')
615
-
616
- '''
617
- result2 = falconspace.predict(
618
- prompt, # str in 'Type an input and press Enter' Textbox component
619
- result, # str (filepath to JSON file) in 'parameter_9' Chatbot component
620
- instructions, # str in 'Instructions' Textbox component
621
- 0.8, # int | float (numeric value between 0.1 and 2.0) in 'Temperature' Slider component
622
- 0.9, # int | float (numeric value between 0.1 and 0.99) in 'p (nucleus sampling)' Slider component
623
- fn_index=1
624
- )
625
 
626
- await asyncio.sleep(5)
627
- '''
628
-
 
 
 
 
 
 
 
 
 
 
 
 
 
629
 
630
  except Exception as e:
631
  print(f"Error: {e}")
 
563
 
564
 
565
  #----------------------------------------------------------------------------------------------------------------------------
566
+ chathistory = None
567
+
568
  @bot.command()
569
  async def falcon(ctx, *, prompt: str):
570
  try:
571
  if await safetychecks(ctx):
572
  if ctx.channel.id == 1116089829147557999:
573
+ global chathistory
 
 
 
 
 
 
574
 
575
+ if chathistory is not None: # This handles all subsequent discussions/prompts to the chatbot/model
576
+ prompt = "Nice, thanks!" # adjust this for every other prompt
577
+ instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
578
+ job = client.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking
579
+ while job.done() == False:
580
+ status = job.status()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
581
  else:
582
+ file_paths = job.outputs()
583
+ full_generation = file_paths[-1]
584
+ chathistory = full_generation # we store this and pass it in the loop
585
+ with open(full_generation, 'r') as file:
586
+ data = json.load(file)
587
+ output_text = data[-1][-1] # we output this as the bot
588
+ print(output_text)
589
+ await ctx.reply(f"{output_text}")
 
 
 
 
590
 
591
+ if chathistory == None: # This handles the start of a conversation/chathistory file with the model
592
+ chathistory = client.predict(
593
+ fn_index=5
594
+ )
595
+ instructions = "The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins."
596
+ job = client.submit(prompt, chathistory, instructions, 0.8, 0.9, fn_index=1) # This is not blocking
597
+ while job.done() == False:
598
+ status = job.status()
599
+ else:
600
+ file_paths = job.outputs()
601
+ full_generation = file_paths[-1]
602
+ chathistory = full_generation # we store this and pass it in the loop
603
+ with open(full_generation, 'r') as file:
604
+ data = json.load(file)
605
+ output_text = data[-1][-1] # we output this as the bot
606
+ await ctx.reply(f"{output_text}")
607
 
608
  except Exception as e:
609
  print(f"Error: {e}")