ankurmondal commited on
Commit
8205000
1 Parent(s): 51adddb

updated the files

Browse files
Files changed (4) hide show
  1. Dockerfile +9 -10
  2. README.md +3 -3
  3. main.py +50 -0
  4. requirements.txt +2 -1
Dockerfile CHANGED
@@ -1,28 +1,27 @@
1
  # Use the official Python 3.9 image
2
  FROM python:3.9
3
-
4
  # Set the working directory to /code
5
  WORKDIR /code
6
-
7
  # Copy the current directory contents into the container at /code
8
  COPY ./requirements.txt /code/requirements.txt
9
-
10
  # Install requirements.txt
11
  RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
12
-
13
  # Set up a new user named "user" with user ID 1000
14
  RUN useradd -m -u 1000 user
15
  # Switch to the "user" user
16
  USER user
17
  # Set home to the user's home directory
18
  ENV HOME=/home/user \
19
- PATH=/home/user/.local/bin:$PATH
20
-
21
  # Set the working directory to the user's home directory
22
  WORKDIR $HOME/app
23
-
24
  # Copy the current directory contents into the container at $HOME/app setting the owner to the user
25
  COPY --chown=user . $HOME/app
26
-
27
- # Start the FastAPI app on port 7860, the default port expected by Spaces
28
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
 
1
  # Use the official Python 3.9 image
2
  FROM python:3.9
3
+
4
  # Set the working directory to /code
5
  WORKDIR /code
6
+
7
  # Copy the current directory contents into the container at /code
8
  COPY ./requirements.txt /code/requirements.txt
9
+
10
  # Install requirements.txt
11
  RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
12
+
13
  # Set up a new user named "user" with user ID 1000
14
  RUN useradd -m -u 1000 user
15
  # Switch to the "user" user
16
  USER user
17
  # Set home to the user's home directory
18
  ENV HOME=/home/user \
19
+ PATH=/home/user/.local/bin:$PATH
20
+
21
  # Set the working directory to the user's home directory
22
  WORKDIR $HOME/app
23
+
24
  # Copy the current directory contents into the container at $HOME/app setting the owner to the user
25
  COPY --chown=user . $HOME/app
26
+
27
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
 
README.md CHANGED
@@ -1,8 +1,8 @@
1
  ---
2
  title: Text Generation
3
- emoji: 💻
4
- colorFrom: purple
5
- colorTo: green
6
  sdk: docker
7
  pinned: false
8
  license: mit
 
1
  ---
2
  title: Text Generation
3
+ emoji: 🌍
4
+ colorFrom: green
5
+ colorTo: yellow
6
  sdk: docker
7
  pinned: false
8
  license: mit
main.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ # from transformers import pipeline
3
+ from txtai.embeddings import Embeddings
4
+ from txtai.pipeline import Extractor
5
+
6
+ # NOTE - we configure docs_url to serve the interactive Docs at the root path
7
+ # of the app. This way, we can use the docs as a landing page for the app on Spaces.
8
+ app = FastAPI(docs_url="/")
9
+
10
+ # Create embeddings model with content support
11
+ embeddings = Embeddings({"path": "sentence-transformers/all-MiniLM-L6-v2", "content": True})
12
+ embeddings.load('index')
13
+
14
+ # Create extractor instance
15
+ extractor = Extractor(embeddings, "google/flan-t5-base")
16
+
17
+ # pipe = pipeline("text2text-generation", model="google/flan-t5-small")
18
+
19
+
20
+ # @app.get("/generate")
21
+ # def generate(text: str):
22
+ # """
23
+ # Using the text2text-generation pipeline from `transformers`, generate text
24
+ # from the given input text. The model used is `google/flan-t5-small`, which
25
+ # can be found [here](https://huggingface.co/google/flan-t5-small).
26
+ # """
27
+ # output = pipe(text)
28
+ # return {"output": output[0]["generated_text"]}
29
+
30
+
31
+ def prompt(question):
32
+ return f"""Answer the following question using only the context below. Say 'no answer' when the question can't be answered.
33
+ Question: {question}
34
+ Context: """
35
+
36
+
37
+ def search(query, question=None):
38
+ # Default question to query if empty
39
+ if not question:
40
+ question = query
41
+
42
+ return extractor([("answer", query, prompt(question), False)])[0][1]
43
+
44
+
45
+ @app.get("/rag")
46
+ def rag(question: str):
47
+ # question = "what is the document about?"
48
+ answer = search(question)
49
+ # print(question, answer)
50
+ return {answer}
requirements.txt CHANGED
@@ -3,4 +3,5 @@ requests==2.27.*
3
  uvicorn[standard]==0.17.*
4
  sentencepiece==0.1.*
5
  torch==1.11.*
6
- transformers==4.*
 
 
3
  uvicorn[standard]==0.17.*
4
  sentencepiece==0.1.*
5
  torch==1.11.*
6
+ transformers==4.*
7
+ txtai==6.0.*