Spaces:
Sleeping
Sleeping
curr working version with ReadME update
Browse files- README.md +18 -0
- Tester.py +0 -32
- app.py +4 -3
- 新建 Text Document.txt +0 -0
README.md
CHANGED
|
@@ -10,4 +10,22 @@ pinned: false
|
|
| 10 |
license: mit
|
| 11 |
---
|
| 12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
| 10 |
license: mit
|
| 11 |
---
|
| 12 |
|
| 13 |
+
## Files
|
| 14 |
+
|
| 15 |
+
Everything running right now is simply Google Gemini in app.py
|
| 16 |
+
|
| 17 |
+
model.py - some models we have tried but abandoned at last
|
| 18 |
+
|
| 19 |
+
app.py - current working version supported by Hugging Face Space
|
| 20 |
+
|
| 21 |
+
# Code
|
| 22 |
+
|
| 23 |
+
Basically:
|
| 24 |
+
1. Call Gemini
|
| 25 |
+
2. Add it into the interface
|
| 26 |
+
3. Run the model
|
| 27 |
+
4. Clear the response
|
| 28 |
+
5. Output
|
| 29 |
+
|
| 30 |
+
|
| 31 |
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
Tester.py
DELETED
|
@@ -1,32 +0,0 @@
|
|
| 1 |
-
import subprocess
|
| 2 |
-
|
| 3 |
-
file_path = "model.py"
|
| 4 |
-
|
| 5 |
-
# Self-defined inputs for each iteration, donno if it'll burn my computer
|
| 6 |
-
inputs = [
|
| 7 |
-
{"T_o": "What can I have for lunch today? I enjoys eating human beings and metals"},
|
| 8 |
-
{"T_o": "What can I drink after exercising?"},
|
| 9 |
-
{"T_o": "What can I do to improve my health?"},
|
| 10 |
-
{"T_o": "What should I do in Chinese New Year?"},
|
| 11 |
-
]
|
| 12 |
-
|
| 13 |
-
def run_with_inputs():
|
| 14 |
-
for i, config in enumerate(inputs):
|
| 15 |
-
print(f"Running test {i + 1} with input: {config['T_o']}")
|
| 16 |
-
|
| 17 |
-
command = [
|
| 18 |
-
"python",
|
| 19 |
-
file_path,
|
| 20 |
-
f"--T_o", config["T_o"]
|
| 21 |
-
]
|
| 22 |
-
|
| 23 |
-
try:
|
| 24 |
-
result = subprocess.run(command, capture_output=True, text=True, check=True)
|
| 25 |
-
print(f"Output for test {i + 1}:")
|
| 26 |
-
print(result.stdout)
|
| 27 |
-
except subprocess.CalledProcessError as e:
|
| 28 |
-
print(f"Error running test {i + 1}:")
|
| 29 |
-
print(e.stderr)
|
| 30 |
-
|
| 31 |
-
if __name__ == "__main__":
|
| 32 |
-
run_with_inputs()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app.py
CHANGED
|
@@ -22,9 +22,9 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
| 22 |
print("Loading model...")
|
| 23 |
#generator = LlamaModel() #can work, but super slow ~2min
|
| 24 |
#generator = GPT2Model() #can work, but not showing correct output
|
| 25 |
-
#generator = GPTNeoXModel()
|
| 26 |
-
#generator = DistilGPT2Model()
|
| 27 |
-
#generator = LLaMA2Model()
|
| 28 |
|
| 29 |
genai.configure(api_key="AIzaSyAJF6isCNu6XfGA5TBFddXu9BTfAKaPF30")
|
| 30 |
model = genai.GenerativeModel("gemini-1.5-flash")
|
|
@@ -41,6 +41,7 @@ def gradio_interface(question):
|
|
| 41 |
try:
|
| 42 |
# Access the first candidate's content using attributes
|
| 43 |
content = answer.candidates[0].content.parts[0].text
|
|
|
|
| 44 |
# Remove newline characters
|
| 45 |
cleaned_content = content.replace("\n", ", ").strip()
|
| 46 |
except (AttributeError, IndexError) as e:
|
|
|
|
| 22 |
print("Loading model...")
|
| 23 |
#generator = LlamaModel() #can work, but super slow ~2min
|
| 24 |
#generator = GPT2Model() #can work, but not showing correct output
|
| 25 |
+
#generator = GPTNeoXModel() #storage requirements
|
| 26 |
+
#generator = DistilGPT2Model() #Can work, but not showing correct output
|
| 27 |
+
#generator = LLaMA2Model() #can work, too slow though
|
| 28 |
|
| 29 |
genai.configure(api_key="AIzaSyAJF6isCNu6XfGA5TBFddXu9BTfAKaPF30")
|
| 30 |
model = genai.GenerativeModel("gemini-1.5-flash")
|
|
|
|
| 41 |
try:
|
| 42 |
# Access the first candidate's content using attributes
|
| 43 |
content = answer.candidates[0].content.parts[0].text
|
| 44 |
+
|
| 45 |
# Remove newline characters
|
| 46 |
cleaned_content = content.replace("\n", ", ").strip()
|
| 47 |
except (AttributeError, IndexError) as e:
|
新建 Text Document.txt
DELETED
|
File without changes
|