BhanuPrakashSamoju commited on
Commit
f135bfe
1 Parent(s): a651f4c

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +19 -42
main.py CHANGED
@@ -1,50 +1,27 @@
1
  from fastapi import FastAPI
2
- # from transformers import pipeline
3
- from txtai.embeddings import Embeddings
4
- from txtai.pipeline import Extractor
5
 
6
  # NOTE - we configure docs_url to serve the interactive Docs at the root path
7
  # of the app. This way, we can use the docs as a landing page for the app on Spaces.
8
  app = FastAPI(docs_url="/")
9
 
10
- # Create embeddings model with content support
11
- embeddings = Embeddings({"path": "sentence-transformers/all-MiniLM-L6-v2", "content": True})
12
- embeddings.load('index')
 
 
 
13
 
14
  # Create extractor instance
15
- extractor = Extractor(embeddings, "google/flan-t5-base")
16
-
17
- # pipe = pipeline("text2text-generation", model="google/flan-t5-small")
18
-
19
-
20
- # @app.get("/generate")
21
- # def generate(text: str):
22
- # """
23
- # Using the text2text-generation pipeline from `transformers`, generate text
24
- # from the given input text. The model used is `google/flan-t5-small`, which
25
- # can be found [here](https://huggingface.co/google/flan-t5-small).
26
- # """
27
- # output = pipe(text)
28
- # return {"output": output[0]["generated_text"]}
29
-
30
-
31
- def prompt(question):
32
- return f"""Answer the following question using only the context below. Say 'no answer' when the question can't be answered.
33
- Question: {question}
34
- Context: """
35
-
36
-
37
- def search(query, question=None):
38
- # Default question to query if empty
39
- if not question:
40
- question = query
41
-
42
- return extractor([("answer", query, prompt(question), False)])[0][1]
43
-
44
-
45
- @app.get("/rag")
46
- def rag(question: str):
47
- # question = "what is the document about?"
48
- answer = search(question)
49
- # print(question, answer)
50
- return {answer}
 
1
  from fastapi import FastAPI
2
+ from pydantic import BaseModel
 
 
3
 
4
  # NOTE - we configure docs_url to serve the interactive Docs at the root path
5
  # of the app. This way, we can use the docs as a landing page for the app on Spaces.
6
  app = FastAPI(docs_url="/")
7
 
8
+ class ModelOutputEvaluate(BaseModel):
9
+ question: str
10
+ answer: str
11
+ context: str | None = None
12
+ prompt: str
13
+
14
 
15
  # Create extractor instance
16
+ @app.post("/evaluate/")
17
+ async def create_evaluation_scenario(item: ModelOutputEvaluate):
18
+ output = {
19
+ "input": item,
20
+ "score" : "0"
21
+ }
22
+ return output
23
+ # def evaluate(question: str):
24
+ # # question = "what is the document about?"
25
+ # answer = search(question)
26
+ # # print(question, answer)
27
+ # return {answer}