s-ish commited on
Commit
787e6dc
·
verified ·
1 Parent(s): ae521f4

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +27 -9
Dockerfile CHANGED
@@ -1,14 +1,17 @@
1
- FROM ollama/ollama:latest
2
 
3
- # Install Python3, pip, and bash
4
  RUN apt-get update && apt-get install -y \
5
  python3 \
6
  python3-pip \
7
  curl \
8
  bash \
9
- supervisor \
10
  && rm -rf /var/lib/apt/lists/*
11
 
 
 
 
12
  # Set working directory
13
  WORKDIR /app
14
 
@@ -21,10 +24,6 @@ RUN pip3 install --break-system-packages --no-cache-dir -r requirements.txt
21
  # Copy application
22
  COPY app.py .
23
 
24
- # Create supervisor config to manage both services
25
- RUN mkdir -p /etc/supervisor/conf.d && \
26
- echo '[supervisord]\nnodaemon=true\n\n[program:ollama]\ncommand=/bin/ollama serve\nstartsecs=10\nautorestart=true\nstdout_logfile=/dev/stdout\nstdout_logfile_maxbytes=0\n\n[program:fastapi]\ncommand=/usr/bin/python3 /app/app.py\nstartsecs=5\nautorestart=true\nstdout_logfile=/dev/stdout\nstdout_logfile_maxbytes=0' > /etc/supervisor/conf.d/services.conf
27
-
28
  # Expose ports
29
  EXPOSE 7860 11434
30
 
@@ -32,5 +31,24 @@ EXPOSE 7860 11434
32
  ENV OLLAMA_HOST=0.0.0.0:11434
33
  ENV PORT=7860
34
 
35
- # Start supervisor (manages both ollama and fastapi)
36
- CMD ["/usr/bin/supervisord", "-c", "/etc/supervisor/supervisord.conf"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM ubuntu:24.04
2
 
3
+ # Install everything we need
4
  RUN apt-get update && apt-get install -y \
5
  python3 \
6
  python3-pip \
7
  curl \
8
  bash \
9
+ wget \
10
  && rm -rf /var/lib/apt/lists/*
11
 
12
+ # Download and install Ollama
13
+ RUN curl -fsSL https://ollama.ai/install.sh | sh
14
+
15
  # Set working directory
16
  WORKDIR /app
17
 
 
24
  # Copy application
25
  COPY app.py .
26
 
 
 
 
 
27
  # Expose ports
28
  EXPOSE 7860 11434
29
 
 
31
  ENV OLLAMA_HOST=0.0.0.0:11434
32
  ENV PORT=7860
33
 
34
+ # Simple startup script
35
+ RUN cat > /app/run.sh << 'EOF'
36
+ #!/bin/bash
37
+ set -e
38
+
39
+ echo "Starting Ollama..."
40
+ /usr/bin/ollama serve > /tmp/ollama.log 2>&1 &
41
+ OLLAMA_PID=$!
42
+
43
+ echo "Waiting for Ollama to start..."
44
+ sleep 3
45
+
46
+ echo "Starting FastAPI..."
47
+ exec python3 /app/app.py
48
+ EOF
49
+
50
+ RUN chmod +x /app/run.sh
51
+
52
+ # Override entrypoint completely
53
+ ENTRYPOINT []
54
+ CMD ["/bin/bash", "/app/run.sh"]