|
|
|
FROM python:3.10 |
|
|
|
|
|
ENV DEBIAN_FRONTEND=noninteractive \ |
|
RUSTUP_HOME=/root/.rustup \ |
|
CARGO_HOME=/root/.cargo \ |
|
PATH=/root/.cargo/bin:$PATH \ |
|
TRANSFORMERS_CACHE=/app/cache \ |
|
HF_HOME=/app/cache |
|
|
|
|
|
WORKDIR /app |
|
|
|
|
|
RUN apt-get update && apt-get install -y --no-install-recommends \ |
|
build-essential cmake git curl wget ninja-build libgomp1 ca-certificates \ |
|
gcc g++ libffi-dev libgcc-s1 libstdc++6 libopenblas-dev \ |
|
&& rm -rf /var/lib/apt/lists/* \ |
|
&& curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ |
|
&& rustup default stable |
|
|
|
|
|
RUN mkdir -p /app/cache && chmod -R 777 /app/cache |
|
|
|
|
|
COPY requirements.txt . |
|
RUN sed -i '/llama-cpp-python/d' requirements.txt \ |
|
&& pip install --no-cache-dir -r requirements.txt |
|
|
|
|
|
|
|
RUN git clone --recursive https://github.com/abetlen/llama-cpp-python.git /tmp/llama-cpp-python \ |
|
&& cd /tmp/llama-cpp-python \ |
|
|
|
&& git submodule update --init --recursive \ |
|
|
|
&& python -m pip install --no-cache-dir . \ |
|
&& rm -rf /tmp/llama-cpp-python |
|
|
|
|
|
COPY app.py cv_embeddings.json cv_text.txt ./ |
|
|
|
|
|
EXPOSE 7860 |
|
|
|
|
|
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] |