Update prebuilt-binaries file-dl naming according to repo - gguf-imat.py

#7
Files changed (1) hide show
  1. gguf-imat.py +3 -3
gguf-imat.py CHANGED
@@ -30,12 +30,12 @@ def download_llama_release():
30
  response = requests.get(latest_release_url)
31
  if response.status_code == 200:
32
  latest_release_tag = response.url.split("/")[-1]
33
- download_url = f"https://github.com/ggerganov/llama.cpp/releases/download/{latest_release_tag}/llama-{latest_release_tag}-bin-win-cublas-cu12.2.0-x64.zip"
34
  response = requests.get(download_url)
35
  if response.status_code == 200:
36
- with open(f"llama-{latest_release_tag}-bin-win-cublas-cu12.2.0-x64.zip", "wb") as f:
37
  f.write(response.content)
38
- with zipfile.ZipFile(f"llama-{latest_release_tag}-bin-win-cublas-cu12.2.0-x64.zip", "r") as zip_ref:
39
  zip_ref.extractall(os.path.join(base_dir, "bin"))
40
  print("Downloading latest 'llama.cpp' prebuilt Windows binaries...")
41
  print("Download and extraction completed successfully.")
 
30
  response = requests.get(latest_release_url)
31
  if response.status_code == 200:
32
  latest_release_tag = response.url.split("/")[-1]
33
+ download_url = f"https://github.com/ggerganov/llama.cpp/releases/download/{latest_release_tag}/llama-{latest_release_tag}-bin-win-cuda-cu12.2.0-x64.zip"
34
  response = requests.get(download_url)
35
  if response.status_code == 200:
36
+ with open(f"llama-{latest_release_tag}-bin-win-cuda-cu12.2.0-x64.zip", "wb") as f:
37
  f.write(response.content)
38
+ with zipfile.ZipFile(f"llama-{latest_release_tag}-bin-win-cuda-cu12.2.0-x64.zip", "r") as zip_ref:
39
  zip_ref.extractall(os.path.join(base_dir, "bin"))
40
  print("Downloading latest 'llama.cpp' prebuilt Windows binaries...")
41
  print("Download and extraction completed successfully.")