#安装 Python
Write-Host "Installing Python..."
# $pythonInstaller = "https://www.python.org/ftp/python/3.10.6/python-3.10.6-amd64.exe"
$pythonInstallerPath = "$PWD\build\windows\python-3.10.6-amd64.exe"
# Invoke-WebRequest -Uri $pythonInstaller -OutFile $pythonInstallerPath
Start-Process -FilePath $pythonInstallerPath -ArgumentList "/quiet InstallAllUsers=1 PrependPath=1" -Wait

#安装 Git
Write-Host "Installing Git..."
# $gitInstaller = "https://github.com/git-for-windows/git/releases/download/v2.34.1.windows.1/Git-2.34.1-64-bit.exe"
$gitInstallerPath = "$PWD\build\windows\Git-2.34.1-64-bit.exe"
# Invoke-WebRequest -Uri $gitInstaller -OutFile $gitInstallerPath
Start-Process -FilePath $gitInstallerPath -ArgumentList "/VERYSILENT /NORESTART" -Wait

#安装 stable-diffusion-webui
Write-Host "Installing stable-diffusion-webui..."
# $repoUrl = "https://github.com/AUTOMATIC1111/stable-diffusion-webui.git"
$repoPath = "$PWD\build\windows\stable-diffusion-webui"
# git clone $repoUrl $repoPath
Set-Location $repoPath
pip install  -i https://pypi.tuna.tsinghua.edu.cn/simple -r "$repoPath\webui\requirements.txt"

#安装依赖项
# Write-Host "Installing dependencies..."
pip install -i https://pypi.tuna.tsinghua.edu.cn/simple torch torchvision

#启动 stable-diffusion-webui
# Write-Host "Starting stable-diffusion-webui..."
python "$repoPath\webui\launch.py"  --skip-torch-cuda-test
# Start-Process -FilePath "$repoPath\update.bat" -Wait
Start-Process -FilePath "$repoPath\webui\webui-user.bat" -Wait


#安装 ollama
Write-Host "Installing ollama..."
# $ollamaRepoUrl = "https://github.com/ollama/ollama.git"
$ollamaRepoPath = "$PWD\build\windows"
# git clone $ollamaRepoUrl $ollamaRepoPath
Set-Location $ollamaRepoPath
# pip install -r requirements.txt

#安装 ollama 依赖项
Write-Host "Installing ollama dependencies..."
# pip install -r requirements-dev.txt

#启动 ollama
Write-Host "Starting ollama..."
# python ollama.py

Start-Process -FilePath "$ollamaRepoPath\ollama.exe" -ArgumentList "serve"  -Wait


Write-Host "Installation complete! Access stable-diffusion-webui at http://localhost:7860 and ollama at http://localhost:8080"
