Spaces:
Runtime error
Runtime error
limcheekin
commited on
Commit
•
ed04c7a
1
Parent(s):
e1911b7
feat: updated docker file to use the open-text-embeddings package from pypi and clean up
Browse files- Dockerfile +2 -4
- index.html +1 -1
- open/__init__.py +0 -0
- open/text/embeddings/server/__main__.py +0 -37
- open/text/embeddings/server/app.py +0 -116
- server-requirements.txt +0 -5
Dockerfile
CHANGED
@@ -19,7 +19,7 @@ RUN chmod +x *.sh && \
|
|
19 |
|
20 |
# Stage 3 - final runtime image
|
21 |
# Grab a fresh copy of the Python image
|
22 |
-
FROM python:3.
|
23 |
|
24 |
# Include global args in this stage of the build
|
25 |
ARG MODEL
|
@@ -31,11 +31,9 @@ ENV HOST=0.0.0.0
|
|
31 |
ENV PORT=7860
|
32 |
|
33 |
COPY --from=build-image ${MODEL} ${MODEL}
|
34 |
-
COPY open/text/embeddings ./open/text/embeddings
|
35 |
-
COPY server-requirements.txt ./
|
36 |
COPY ./start_server.sh ./
|
37 |
COPY ./index.html ./
|
38 |
-
RUN pip install --no-cache-dir -
|
39 |
chmod +x ./start_server.sh
|
40 |
|
41 |
# Expose a port for the server
|
|
|
19 |
|
20 |
# Stage 3 - final runtime image
|
21 |
# Grab a fresh copy of the Python image
|
22 |
+
FROM python:3.11-slim
|
23 |
|
24 |
# Include global args in this stage of the build
|
25 |
ARG MODEL
|
|
|
31 |
ENV PORT=7860
|
32 |
|
33 |
COPY --from=build-image ${MODEL} ${MODEL}
|
|
|
|
|
34 |
COPY ./start_server.sh ./
|
35 |
COPY ./index.html ./
|
36 |
+
RUN pip install --no-cache-dir open-text-embeddings[server] && \
|
37 |
chmod +x ./start_server.sh
|
38 |
|
39 |
# Expose a port for the server
|
index.html
CHANGED
@@ -7,7 +7,7 @@
|
|
7 |
<h1>BAAI/bge-large-en OpenAI API-Compatible Endpoint</h1>
|
8 |
<p>
|
9 |
With the utilization of the
|
10 |
-
<a href="https://
|
11 |
>open-text-embeddings</a
|
12 |
>
|
13 |
package, we are excited to introduce the text embeddings model hosted in
|
|
|
7 |
<h1>BAAI/bge-large-en OpenAI API-Compatible Endpoint</h1>
|
8 |
<p>
|
9 |
With the utilization of the
|
10 |
+
<a href="https://pypi.org/project/open-text-embeddings/"
|
11 |
>open-text-embeddings</a
|
12 |
>
|
13 |
package, we are excited to introduce the text embeddings model hosted in
|
open/__init__.py
DELETED
File without changes
|
open/text/embeddings/server/__main__.py
DELETED
@@ -1,37 +0,0 @@
|
|
1 |
-
"""FastAPI server for open-text-embeddings.
|
2 |
-
|
3 |
-
To run this example:
|
4 |
-
|
5 |
-
```bash
|
6 |
-
pip install -r --no-cache-dir server-requirements.txt
|
7 |
-
```
|
8 |
-
|
9 |
-
Then run:
|
10 |
-
```
|
11 |
-
MODEL=intfloat/e5-large-v2 python -m open.text.embeddings.server
|
12 |
-
```
|
13 |
-
|
14 |
-
Then visit http://localhost:8000/docs to see the interactive API docs.
|
15 |
-
|
16 |
-
"""
|
17 |
-
import uvicorn
|
18 |
-
from fastapi.responses import HTMLResponse
|
19 |
-
from open.text.embeddings.server.app import create_app
|
20 |
-
import os
|
21 |
-
|
22 |
-
app = create_app()
|
23 |
-
|
24 |
-
# Read the content of index.html once and store it in memory
|
25 |
-
with open("index.html", "r") as f:
|
26 |
-
content = f.read()
|
27 |
-
|
28 |
-
|
29 |
-
@app.get("/", response_class=HTMLResponse)
|
30 |
-
async def read_items():
|
31 |
-
return content
|
32 |
-
|
33 |
-
if __name__ == "__main__":
|
34 |
-
uvicorn.run(app,
|
35 |
-
host=os.environ["HOST"],
|
36 |
-
port=int(os.environ["PORT"])
|
37 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
open/text/embeddings/server/app.py
DELETED
@@ -1,116 +0,0 @@
|
|
1 |
-
|
2 |
-
from typing import List, Optional, Union
|
3 |
-
from starlette.concurrency import run_in_threadpool
|
4 |
-
from fastapi import FastAPI, APIRouter
|
5 |
-
from fastapi.middleware.cors import CORSMiddleware
|
6 |
-
from pydantic import BaseModel, Field
|
7 |
-
from langchain.embeddings import HuggingFaceEmbeddings
|
8 |
-
from langchain.embeddings import HuggingFaceInstructEmbeddings
|
9 |
-
from langchain.embeddings import HuggingFaceBgeEmbeddings
|
10 |
-
import os
|
11 |
-
|
12 |
-
router = APIRouter()
|
13 |
-
|
14 |
-
DEFAULT_MODEL_NAME = "intfloat/e5-large-v2"
|
15 |
-
E5_EMBED_INSTRUCTION = "passage: "
|
16 |
-
E5_QUERY_INSTRUCTION = "query: "
|
17 |
-
BGE_EN_QUERY_INSTRUCTION = "Represent this sentence for searching relevant passages: "
|
18 |
-
BGE_ZH_QUERY_INSTRUCTION = "为这个句子生成表示以用于检索相关文章:"
|
19 |
-
|
20 |
-
|
21 |
-
def create_app():
|
22 |
-
app = FastAPI(
|
23 |
-
title="Open Text Embeddings API",
|
24 |
-
version="0.0.2",
|
25 |
-
)
|
26 |
-
app.add_middleware(
|
27 |
-
CORSMiddleware,
|
28 |
-
allow_origins=["*"],
|
29 |
-
allow_credentials=True,
|
30 |
-
allow_methods=["*"],
|
31 |
-
allow_headers=["*"],
|
32 |
-
)
|
33 |
-
app.include_router(router)
|
34 |
-
|
35 |
-
return app
|
36 |
-
|
37 |
-
|
38 |
-
class CreateEmbeddingRequest(BaseModel):
|
39 |
-
model: Optional[str] = Field(
|
40 |
-
description="The model to use for generating embeddings.", default=None)
|
41 |
-
input: Union[str, List[str]] = Field(description="The input to embed.")
|
42 |
-
user: Optional[str] = Field(default=None)
|
43 |
-
|
44 |
-
model_config = {
|
45 |
-
"json_schema_extra": {
|
46 |
-
"examples": [
|
47 |
-
{
|
48 |
-
"input": "The food was delicious and the waiter...",
|
49 |
-
}
|
50 |
-
]
|
51 |
-
}
|
52 |
-
}
|
53 |
-
|
54 |
-
|
55 |
-
class Embedding(BaseModel):
|
56 |
-
embedding: List[float]
|
57 |
-
|
58 |
-
|
59 |
-
class CreateEmbeddingResponse(BaseModel):
|
60 |
-
data: List[Embedding]
|
61 |
-
|
62 |
-
|
63 |
-
embeddings = None
|
64 |
-
|
65 |
-
|
66 |
-
def _create_embedding(
|
67 |
-
model: Optional[str],
|
68 |
-
input: Union[str, List[str]]
|
69 |
-
):
|
70 |
-
global embeddings
|
71 |
-
|
72 |
-
if embeddings is None:
|
73 |
-
if model and model != "text-embedding-ada-002":
|
74 |
-
model_name = model
|
75 |
-
else:
|
76 |
-
model_name = os.environ["MODEL"]
|
77 |
-
print("Loading model:", model_name)
|
78 |
-
encode_kwargs = {
|
79 |
-
"normalize_embeddings": bool(os.environ.get("NORMALIZE_EMBEDDINGS", ""))
|
80 |
-
}
|
81 |
-
print("encode_kwargs", encode_kwargs)
|
82 |
-
if "e5" in model_name:
|
83 |
-
embeddings = HuggingFaceInstructEmbeddings(model_name=model_name,
|
84 |
-
embed_instruction=E5_EMBED_INSTRUCTION,
|
85 |
-
query_instruction=E5_QUERY_INSTRUCTION,
|
86 |
-
encode_kwargs=encode_kwargs)
|
87 |
-
elif model_name.startswith("BAAI/bge-") and model_name.endswith("-en"):
|
88 |
-
embeddings = HuggingFaceBgeEmbeddings(model_name=model_name,
|
89 |
-
query_instruction=BGE_EN_QUERY_INSTRUCTION,
|
90 |
-
encode_kwargs=encode_kwargs)
|
91 |
-
elif model_name.startswith("BAAI/bge-") and model_name.endswith("-zh"):
|
92 |
-
embeddings = HuggingFaceBgeEmbeddings(model_name=model_name,
|
93 |
-
query_instruction=BGE_ZH_QUERY_INSTRUCTION,
|
94 |
-
encode_kwargs=encode_kwargs)
|
95 |
-
else:
|
96 |
-
embeddings = HuggingFaceEmbeddings(
|
97 |
-
model_name=model_name, encode_kwargs=encode_kwargs)
|
98 |
-
|
99 |
-
if isinstance(input, str):
|
100 |
-
return CreateEmbeddingResponse(data=[Embedding(embedding=embeddings.embed_query(input))])
|
101 |
-
else:
|
102 |
-
data = [Embedding(embedding=embedding)
|
103 |
-
for embedding in embeddings.embed_documents(input)]
|
104 |
-
return CreateEmbeddingResponse(data=data)
|
105 |
-
|
106 |
-
|
107 |
-
@router.post(
|
108 |
-
"/v1/embeddings",
|
109 |
-
response_model=CreateEmbeddingResponse,
|
110 |
-
)
|
111 |
-
async def create_embedding(
|
112 |
-
request: CreateEmbeddingRequest
|
113 |
-
):
|
114 |
-
return await run_in_threadpool(
|
115 |
-
_create_embedding, **request.dict(exclude={"user"})
|
116 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server-requirements.txt
DELETED
@@ -1,5 +0,0 @@
|
|
1 |
-
fastapi
|
2 |
-
sse-starlette
|
3 |
-
sentence_transformers
|
4 |
-
langchain
|
5 |
-
uvicorn
|
|
|
|
|
|
|
|
|
|
|
|