leandrocarneiro commited on
Commit
51e1c58
1 Parent(s): bd5a0eb

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +3 -3
  2. rag.py +9 -2
app.py CHANGED
@@ -56,16 +56,16 @@ with gr.Blocks(title='BotNews') as page:
56
  with gr.Row():
57
  output1 = gr.Textbox(label="Notícia gerada por IA", lines=25)
58
  gr.Markdown("<hr>")
59
- gr.Markdown("## Prompt para a Notícia")
60
  gr.Markdown(" ")
61
  gr.Markdown("<b>Instrução:</b> Preencha abaixo com um comando para ser executado sobre a notícia (Por exemplo: 'Resuma em tópicos' ou 'Adicione um tom sarcástico').")
62
  gr.Markdown(" ")
63
  with gr.Row():
64
  input6 = gr.Textbox(label="Ajustar a notícia com IA")
65
  with gr.Row():
66
- button2 = gr.Button("Gerar resposta")
67
  with gr.Row():
68
- output2 = gr.Textbox(label="Resposta gerada por IA", lines=25)
69
 
70
  button1.click(call_generate_news, inputs=[input1, input2, input3, input4, input5], outputs=[output1])
71
  button2.click(call_invoke_llm, inputs=[output1, input6, input5], outputs=[output2])
 
56
  with gr.Row():
57
  output1 = gr.Textbox(label="Notícia gerada por IA", lines=25)
58
  gr.Markdown("<hr>")
59
+ gr.Markdown("## Ajustar a notícia com IA")
60
  gr.Markdown(" ")
61
  gr.Markdown("<b>Instrução:</b> Preencha abaixo com um comando para ser executado sobre a notícia (Por exemplo: 'Resuma em tópicos' ou 'Adicione um tom sarcástico').")
62
  gr.Markdown(" ")
63
  with gr.Row():
64
  input6 = gr.Textbox(label="Ajustar a notícia com IA")
65
  with gr.Row():
66
+ button2 = gr.Button("Ajustar notícia")
67
  with gr.Row():
68
+ output2 = gr.Textbox(label="Notícia ajustada por IA", lines=25)
69
 
70
  button1.click(call_generate_news, inputs=[input1, input2, input3, input4, input5], outputs=[output1])
71
  button2.click(call_invoke_llm, inputs=[output1, input6, input5], outputs=[output2])
rag.py CHANGED
@@ -4,6 +4,7 @@
4
  #from langchain.embeddings import OpenAIEmbeddings
5
  from langchain_openai import OpenAIEmbeddings
6
  from langchain_community.embeddings import HuggingFaceEmbeddings
 
7
  from langchain_community.vectorstores import Chroma
8
  from langchain_community.document_loaders import DirectoryLoader
9
  from langchain.text_splitter import RecursiveCharacterTextSplitter
@@ -14,6 +15,7 @@ from langchain.memory import ConversationBufferMemory
14
  from langchain.chains import ConversationalRetrievalChain
15
  import os
16
  import csv
 
17
 
18
  def read_csv_to_dict(filename):
19
  data_dict = {}
@@ -45,9 +47,14 @@ def generate_embeddings_and_vectorstore(path, model):
45
  #print(docs)
46
  if model == 'openai':
47
  fc_embeddings = OpenAIEmbeddings(openai_api_key=os.environ['OPENAI_KEY'])
 
48
  else:
49
- fc_embeddings = HuggingFaceEmbeddings(model_name = 'intfloat/multilingual-e5-large-instruct')
50
- vectorstore = Chroma.from_documents(docs, fc_embeddings)
 
 
 
 
51
  print('total de docs no vectorstore=',len(vectorstore.get()['documents']))
52
 
53
  return vectorstore
 
4
  #from langchain.embeddings import OpenAIEmbeddings
5
  from langchain_openai import OpenAIEmbeddings
6
  from langchain_community.embeddings import HuggingFaceEmbeddings
7
+ from langchain_together.embeddings import TogetherEmbeddings
8
  from langchain_community.vectorstores import Chroma
9
  from langchain_community.document_loaders import DirectoryLoader
10
  from langchain.text_splitter import RecursiveCharacterTextSplitter
 
15
  from langchain.chains import ConversationalRetrievalChain
16
  import os
17
  import csv
18
+ import time
19
 
20
  def read_csv_to_dict(filename):
21
  data_dict = {}
 
47
  #print(docs)
48
  if model == 'openai':
49
  fc_embeddings = OpenAIEmbeddings(openai_api_key=os.environ['OPENAI_KEY'])
50
+ vectorstore = Chroma.from_documents(docs, fc_embeddings)
51
  else:
52
+ #fc_embeddings = HuggingFaceEmbeddings(model_name = 'intfloat/multilingual-e5-large-instruct')
53
+ #vectorstore = Chroma.from_documents(docs, fc_embeddings)
54
+ fc_embeddings = TogetherEmbeddings(model = 'togethercomputer/m2-bert-80M-8k-retrieval', together_api_key = os.environ['TOGETHER_KEY'])
55
+ for doc in docs:
56
+ vectorstore = Chroma.from_documents(documents=[doc], embedding=fc_embeddings)
57
+ time.sleep(0.5)
58
  print('total de docs no vectorstore=',len(vectorstore.get()['documents']))
59
 
60
  return vectorstore