davidoneilai commited on
Commit
6c6d1fa
1 Parent(s): 5308686

add difficultie

Browse files
server/app.py CHANGED
@@ -2,10 +2,13 @@ from fastapi import FastAPI
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from inference import rag_chain
4
  from pydantic import BaseModel
5
- from fastapi.staticfiles import StaticFiles
 
 
6
 
7
  class Body(BaseModel):
8
  subject: str
 
9
 
10
 
11
  app = FastAPI()
@@ -17,15 +20,21 @@ app.add_middleware(
17
  allow_headers=["*"],
18
  )
19
 
 
20
  @app.post("/generate_questions")
21
  async def generate_questions(body: Body):
22
  subject = body.subject
23
- query = f"Quero que você gere questões de biologia, sendo do assunto: {subject}."
 
24
  res = rag_chain.invoke(f"""{query}""")
25
- return res
 
 
26
 
27
- # app.mount("/", StaticFiles(directory="static", html=True), name="static")
 
28
 
29
  if __name__ == "__main__":
30
  import uvicorn
31
- uvicorn.run("app:app", host="0.0.0.0", port=8000)
 
 
2
  from fastapi.middleware.cors import CORSMiddleware
3
  from inference import rag_chain
4
  from pydantic import BaseModel
5
+ from fastapi.staticfiles import StaticFiles
6
+ from data.load_data import retriever_pre
7
+
8
 
9
  class Body(BaseModel):
10
  subject: str
11
+ difficultie: str
12
 
13
 
14
  app = FastAPI()
 
20
  allow_headers=["*"],
21
  )
22
 
23
+
24
  @app.post("/generate_questions")
25
  async def generate_questions(body: Body):
26
  subject = body.subject
27
+ difficultie = body.difficultie
28
+ query = f"Quero que você gere questões de biologia, sendo do assunto: {subject} e sendo da dificuldade: {difficultie}."
29
  res = rag_chain.invoke(f"""{query}""")
30
+ return {
31
+ "res": res,
32
+ }
33
 
34
+
35
+ app.mount("/", StaticFiles(directory="static", html=True), name="static")
36
 
37
  if __name__ == "__main__":
38
  import uvicorn
39
+
40
+ uvicorn.run("app:app", host="0.0.0.0", port=8000)
server/data/load_data.py CHANGED
@@ -6,8 +6,8 @@ from langchain.retrievers.self_query.base import SelfQueryRetriever
6
  from llm.gemini import gemini_embeddings, llm
7
  from utils.questions_parser import parse_question
8
 
 
9
  try:
10
-
11
  vectorstore = Chroma(
12
  persist_directory="./chroma_db", embedding_function=gemini_embeddings
13
  )
@@ -44,6 +44,7 @@ except Exception as e:
44
  persist_directory="./chroma_db", embedding_function=gemini_embeddings
45
  )
46
 
 
47
  metadata_field_info = [
48
  AttributeInfo(
49
  name="topico",
@@ -67,7 +68,7 @@ metadata_field_info = [
67
  ),
68
  ]
69
 
70
- document_content_description = "Questões de biologia"
71
 
72
  retriever = SelfQueryRetriever.from_llm(
73
  llm, vectorstore, document_content_description, metadata_field_info, verbose=True
 
6
  from llm.gemini import gemini_embeddings, llm
7
  from utils.questions_parser import parse_question
8
 
9
+
10
  try:
 
11
  vectorstore = Chroma(
12
  persist_directory="./chroma_db", embedding_function=gemini_embeddings
13
  )
 
44
  persist_directory="./chroma_db", embedding_function=gemini_embeddings
45
  )
46
 
47
+
48
  metadata_field_info = [
49
  AttributeInfo(
50
  name="topico",
 
68
  ),
69
  ]
70
 
71
+ document_content_description = "Questões de matérias do ensino médio."
72
 
73
  retriever = SelfQueryRetriever.from_llm(
74
  llm, vectorstore, document_content_description, metadata_field_info, verbose=True
server/inference.py CHANGED
@@ -2,9 +2,14 @@ from langchain.schema.runnable import RunnablePassthrough
2
  from langchain_google_genai import ChatGoogleGenerativeAI
3
  from langchain_core.runnables import RunnableLambda
4
 
5
- from llm.gemini import questions_template, format_questions_instructions, questions_parser
 
 
 
 
6
  from data.load_data import retriever
7
 
 
8
  def get_questions(_dict):
9
  question = _dict["question"]
10
  context = _dict["context"]
@@ -13,9 +18,9 @@ def get_questions(_dict):
13
  question=question,
14
  format_questions_instructions=format_questions_instructions,
15
  )
16
-
17
  tries = 0
18
-
19
  while tries < 3:
20
  try:
21
  chat = ChatGoogleGenerativeAI(model="gemini-pro")
@@ -36,5 +41,3 @@ rag_chain = {
36
  "context": retriever | RunnableLambda(format_docs),
37
  "question": RunnablePassthrough(),
38
  } | RunnableLambda(get_questions)
39
-
40
-
 
2
  from langchain_google_genai import ChatGoogleGenerativeAI
3
  from langchain_core.runnables import RunnableLambda
4
 
5
+ from llm.gemini import (
6
+ questions_template,
7
+ format_questions_instructions,
8
+ questions_parser,
9
+ )
10
  from data.load_data import retriever
11
 
12
+
13
  def get_questions(_dict):
14
  question = _dict["question"]
15
  context = _dict["context"]
 
18
  question=question,
19
  format_questions_instructions=format_questions_instructions,
20
  )
21
+
22
  tries = 0
23
+
24
  while tries < 3:
25
  try:
26
  chat = ChatGoogleGenerativeAI(model="gemini-pro")
 
41
  "context": retriever | RunnableLambda(format_docs),
42
  "question": RunnablePassthrough(),
43
  } | RunnableLambda(get_questions)
 
 
server/utils.py DELETED
File without changes
web/src/App.jsx CHANGED
@@ -13,6 +13,7 @@ import 'react-toastify/dist/ReactToastify.css';
13
  function App() {
14
 
15
  const [subject, setSubject] = useState("");
 
16
 
17
  const [menuState, setMenuState] = useState(true);
18
  const [isLoading, setIsLoading] = useState(false);
@@ -30,6 +31,11 @@ function App() {
30
  { label: "Biofísica", value: "biofisica" },
31
  ]
32
 
 
 
 
 
 
33
  const handleSubmit = async (e) => {
34
  e.preventDefault();
35
  setIsLoading(true);
@@ -40,7 +46,7 @@ function App() {
40
  "Content-Type": "application/json"
41
  },
42
  body: JSON.stringify({
43
- subject
44
  })
45
  });
46
  console.log(res)
@@ -127,8 +133,19 @@ function App() {
127
  ))}
128
  </SelectContent>
129
  </Select>
 
 
 
 
 
 
 
 
 
 
 
130
  <button className="h-10 bg-purple-500 rounded px-2.5 py-1 mt-5 hover:brightness-110 transition-all flex items-center justify-center disabled:hover:brightness-75 disabled:brightness-75"
131
- disabled={isLoading || !subject}>
132
  {
133
  isLoading ?
134
  <div className="animate-spin h-5 w-5 border-2 border-white border-r-purple-500 rounded-full"></div>
 
13
  function App() {
14
 
15
  const [subject, setSubject] = useState("");
16
+ const [difficultie, setDifficultie] = useState("");
17
 
18
  const [menuState, setMenuState] = useState(true);
19
  const [isLoading, setIsLoading] = useState(false);
 
31
  { label: "Biofísica", value: "biofisica" },
32
  ]
33
 
34
+ const difficulties = [
35
+ { label: "Fácil", value: "facil" },
36
+ { label: "Intermediária", value: "intermediaria" },
37
+ { label: "Difícil", value: "dificil" },
38
+ ]
39
  const handleSubmit = async (e) => {
40
  e.preventDefault();
41
  setIsLoading(true);
 
46
  "Content-Type": "application/json"
47
  },
48
  body: JSON.stringify({
49
+ subject, difficultie
50
  })
51
  });
52
  console.log(res)
 
133
  ))}
134
  </SelectContent>
135
  </Select>
136
+ <h2>Selecione uma dificuldade:</h2>
137
+ <Select onValueChange={value => setDifficultie(value)}>
138
+ <SelectTrigger className="w-[180px]">
139
+ <SelectValue placeholder="Dificuldade" />
140
+ </SelectTrigger>
141
+ <SelectContent onChange={e => console.log(e)}>
142
+ {difficulties && difficulties.map(({ label, value }) => (
143
+ <SelectItem key={value} value={value}>{label}</SelectItem>
144
+ ))}
145
+ </SelectContent>
146
+ </Select>
147
  <button className="h-10 bg-purple-500 rounded px-2.5 py-1 mt-5 hover:brightness-110 transition-all flex items-center justify-center disabled:hover:brightness-75 disabled:brightness-75"
148
+ disabled={isLoading || !subject || !difficultie}>
149
  {
150
  isLoading ?
151
  <div className="animate-spin h-5 w-5 border-2 border-white border-r-purple-500 rounded-full"></div>
web/tsconfig.json CHANGED
@@ -1,10 +1,10 @@
1
  {
2
- "compilerOptions": {
3
- "baseUrl": ".",
4
- "paths": {
5
- "@/*": [
6
- "./src/*"
7
- ]
8
- }
9
  }
10
  }
 
 
1
  {
2
+ "compilerOptions": {
3
+ "baseUrl": ".",
4
+ "paths": {
5
+ "@/*": [
6
+ "./src/*"
7
+ ]
 
8
  }
9
  }
10
+ }