Update use.py
Browse files
use.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
import torch
|
| 2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 3 |
|
| 4 |
-
def run_pin_inference(prompt, model_id="LH-Tech-AI/Pin-Tiny", subfolder="Pin-25M"):
|
| 5 |
# 1. Device Setup
|
| 6 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 7 |
print(f"Using device: {device}")
|
|
@@ -40,7 +40,7 @@ def run_pin_inference(prompt, model_id="LH-Tech-AI/Pin-Tiny", subfolder="Pin-25M
|
|
| 40 |
# --- Sample test ---
|
| 41 |
if __name__ == "__main__":
|
| 42 |
user_query = "What is the weather like today?"
|
| 43 |
-
answer = run_pin_inference(user_query, model_id="LH-Tech-AI/Pin", subfolder="Pin-25M")
|
| 44 |
|
| 45 |
print(f"\nUser: {user_query}")
|
| 46 |
print(f"Pin: {answer}")
|
|
|
|
| 1 |
import torch
|
| 2 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 3 |
|
| 4 |
+
def run_pin_inference(prompt, model_id="LH-Tech-AI/Pin-Tiny", subfolder="Pin-Ultra-25M"):
|
| 5 |
# 1. Device Setup
|
| 6 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 7 |
print(f"Using device: {device}")
|
|
|
|
| 40 |
# --- Sample test ---
|
| 41 |
if __name__ == "__main__":
|
| 42 |
user_query = "What is the weather like today?"
|
| 43 |
+
answer = run_pin_inference(user_query, model_id="LH-Tech-AI/Pin", subfolder="Pin-Ultra-25M")
|
| 44 |
|
| 45 |
print(f"\nUser: {user_query}")
|
| 46 |
print(f"Pin: {answer}")
|