|
#!/bin/bash |
|
|
|
|
|
|
|
set -e |
|
|
|
|
|
RED='\033[0;31m' |
|
GREEN='\033[0;32m' |
|
YELLOW='\033[1;33m' |
|
BLUE='\033[0;34m' |
|
NC='\033[0m' |
|
|
|
echo -e "${BLUE}π Minimal LLM Setup${NC}" |
|
echo "====================" |
|
|
|
|
|
if ! command -v docker &> /dev/null; then |
|
echo -e "${RED}β Docker not found. Please install Docker first.${NC}" |
|
exit 1 |
|
fi |
|
|
|
|
|
if ! docker compose version &> /dev/null; then |
|
echo -e "${RED}β Docker Compose not found. Please install Docker Compose.${NC}" |
|
exit 1 |
|
fi |
|
|
|
|
|
if docker run --rm --gpus all nvidia/cuda:11.8-base-ubuntu22.04 nvidia-smi &>/dev/null; then |
|
echo -e "${GREEN}β
NVIDIA Docker detected${NC}" |
|
GPU_AVAILABLE=true |
|
else |
|
echo -e "${YELLOW}β οΈ No GPU detected, running on CPU${NC}" |
|
GPU_AVAILABLE=false |
|
fi |
|
|
|
|
|
echo -e "${BLUE}π Creating project structure...${NC}" |
|
mkdir -p app |
|
|
|
|
|
if [ ! -f "app/main.py" ]; then |
|
echo -e "${BLUE}π Creating Streamlit app...${NC}" |
|
|
|
echo "# Streamlit app created. Copy the main.py content here." |
|
fi |
|
|
|
|
|
if [ "$GPU_AVAILABLE" = false ]; then |
|
echo -e "${YELLOW}π§ Configuring for CPU mode...${NC}" |
|
sed -i 's/deploy:/# deploy:/g' docker-compose.yml || true |
|
sed -i 's/resources:/# resources:/g' docker-compose.yml || true |
|
sed -i 's/reservations:/# reservations:/g' docker-compose.yml || true |
|
sed -i 's/devices:/# devices:/g' docker-compose.yml || true |
|
sed -i 's/- driver: nvidia/# - driver: nvidia/g' docker-compose.yml || true |
|
sed -i 's/count: 1/# count: 1/g' docker-compose.yml || true |
|
sed -i 's/capabilities: \[gpu\]/# capabilities: [gpu]/g' docker-compose.yml || true |
|
fi |
|
|
|
|
|
echo -e "${BLUE}π¨ Building and starting services...${NC}" |
|
docker compose up --build -d |
|
|
|
|
|
echo -e "${BLUE}β³ Waiting for services to start...${NC}" |
|
|
|
|
|
echo -n "Waiting for Ollama" |
|
for i in {1..30}; do |
|
if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then |
|
echo -e "${GREEN} β
${NC}" |
|
break |
|
fi |
|
echo -n "." |
|
sleep 2 |
|
done |
|
|
|
|
|
echo -n "Waiting for Streamlit" |
|
for i in {1..30}; do |
|
if curl -s http://localhost:8501/_stcore/health > /dev/null 2>&1; then |
|
echo -e "${GREEN} β
${NC}" |
|
break |
|
fi |
|
echo -n "." |
|
sleep 2 |
|
done |
|
|
|
|
|
echo -e "${BLUE}π₯ Checking model download...${NC}" |
|
docker logs model-setup | tail -5 |
|
|
|
echo |
|
echo -e "${GREEN}π Setup completed!${NC}" |
|
echo "===================" |
|
echo |
|
echo -e "${BLUE}π Access points:${NC}" |
|
echo " β’ Streamlit UI: http://localhost:8501" |
|
echo " β’ Ollama API: http://localhost:11434" |
|
echo |
|
echo -e "${BLUE}π Useful commands:${NC}" |
|
echo " β’ Check logs: docker compose logs -f" |
|
echo " β’ Stop services: docker compose down" |
|
echo " β’ Restart: docker compose restart" |
|
echo " β’ Shell access: docker exec -it ollama-engine bash" |
|
echo |
|
echo -e "${BLUE}π§ͺ Test API:${NC}" |
|
echo ' curl -X POST http://localhost:11434/api/generate \' |
|
echo ' -H "Content-Type: application/json" \' |
|
echo ' -d '"'"'{"model": "mistral:7b-instruct", "prompt": "Hello!"}'\' |
|
|
|
# Auto-open browser (optional) |
|
if command -v xdg-open &> /dev/null; then |
|
echo |
|
read -p "Open browser automatically? (y/N): " -n 1 -r |
|
echo |
|
if [[ $REPLY =~ ^[Yy]$ ]]; then |
|
xdg-open http://localhost:8501 |
|
fi |
|
elif command -v open &> /dev/null; then |
|
echo |
|
read -p "Open browser automatically? (y/N): " -n 1 -r |
|
echo |
|
if [[ $REPLY =~ ^[Yy]$ ]]; then |
|
open http://localhost:8501 |
|
fi |
|
fi |
|
|
|
echo |
|
echo -e "${GREEN}Happy chatting! π€${NC}" |