Spaces:
Runtime error
Runtime error
File size: 1,291 Bytes
3c3a8dc fb6e979 3c3a8dc |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
import os
import requests
import torch
from transformers import AutoTokenizer, AutoConfig, AutoModelForCausalLM
class RemoteModelProxy:
def __init__(self, model_id):
self.model_id = model_id
self.tokenizer = AutoTokenizer.from_pretrained(model_id, trust_remote_code=True)
# Load the configuration and remove the quantization configuration
config = AutoConfig.from_pretrained(model_id, trust_remote_code=True)
if hasattr(config, 'quantization_config'):
del config.quantization_config
self.config = config
self.model = AutoModelForCausalLM.from_pretrained(model_id, config=self.config, trust_remote_code=True)
def classify_text(self, text):
inputs = self.tokenizer(text, return_tensors="pt", padding=True, truncation=True)
logits = self.model(**inputs)
probabilities = torch.softmax(logits, dim=-1).tolist()[0]
predicted_class = torch.argmax(logits, dim=-1).item()
return {
"Predicted Class": predicted_class,
"Probabilities": probabilities
}
if __name__ == "__main__":
model_id = "deepseek-ai/DeepSeek-V3"
proxy = RemoteModelProxy(model_id)
result = proxy.classify_text("Your input text here")
print(result) |