Spaces:
Runtime error
Runtime error
Upload llama.cpp/pyproject.toml with huggingface_hub
Browse files- llama.cpp/pyproject.toml +44 -0
llama.cpp/pyproject.toml
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[tool.poetry]
|
2 |
+
name = "llama-cpp-scripts"
|
3 |
+
version = "0.0.0"
|
4 |
+
description = "Scripts that ship with llama.cpp"
|
5 |
+
authors = ["GGML <ggml@ggml.ai>"]
|
6 |
+
readme = "README.md"
|
7 |
+
homepage = "https://ggml.ai"
|
8 |
+
repository = "https://github.com/ggerganov/llama.cpp"
|
9 |
+
keywords = ["ggml", "gguf", "llama.cpp"]
|
10 |
+
packages = [{ include = "*.py", from = "." }]
|
11 |
+
classifiers = [
|
12 |
+
"Programming Language :: Python :: 3",
|
13 |
+
"License :: OSI Approved :: MIT License",
|
14 |
+
"Operating System :: OS Independent",
|
15 |
+
]
|
16 |
+
|
17 |
+
[tool.poetry.dependencies]
|
18 |
+
python = ">=3.9"
|
19 |
+
numpy = "^1.25.0"
|
20 |
+
sentencepiece = ">=0.1.98,<=0.2.0"
|
21 |
+
transformers = ">=4.35.2,<5.0.0"
|
22 |
+
protobuf = ">=4.21.0,<5.0.0"
|
23 |
+
gguf = { path = "./gguf-py" }
|
24 |
+
torch = { version = "^2.2.0", source = "pytorch" }
|
25 |
+
|
26 |
+
[tool.poetry.dev-dependencies]
|
27 |
+
pytest = "^5.2"
|
28 |
+
|
29 |
+
|
30 |
+
# Force wheel + cpu
|
31 |
+
# For discussion and context see https://github.com/python-poetry/poetry#6409
|
32 |
+
[[tool.poetry.source]]
|
33 |
+
name = "pytorch"
|
34 |
+
url = "https://download.pytorch.org/whl/cpu"
|
35 |
+
priority = "explicit"
|
36 |
+
|
37 |
+
[build-system]
|
38 |
+
requires = ["poetry-core>=1.0.0"]
|
39 |
+
build-backend = "poetry.core.masonry.api"
|
40 |
+
|
41 |
+
[tool.poetry.scripts]
|
42 |
+
llama-convert-hf-to-gguf = "convert_hf_to_gguf:main"
|
43 |
+
llama-convert-llama-ggml-to-gguf = "convert_llama_ggml_to_gguf:main"
|
44 |
+
llama-ggml-vk-generate-shaders = "ggml_vk_generate_shaders:main"
|