K00B404 commited on
Commit
b409549
1 Parent(s): 4b706ef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -13
app.py CHANGED
@@ -4,24 +4,19 @@ import gradio as gr
4
  #client = InferenceClient("""K00B404/BagOMistral_14X_Coders-ties-7B""")
5
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
6
  import torch
 
7
 
8
- # Replace 'your-username' and 'your-model-name' with your actual username and model name
9
- tokenizer = AutoTokenizer.from_pretrained('K00B404/Merged_Beowolx-CodePro_Medusa2-7B-Mistral-I-v0-2')
10
- model = AutoModelForSequenceClassification.from_pretrained('K00B404/Merged_Beowolx-CodePro_Medusa2-7B-Mistral-I-v0-2')
11
 
12
- # Example input sequence
13
- input_sequence = "This is an example sentence."
14
 
15
- # Tokenize the input sequence
16
- inputs = tokenizer(input_sequence, return_tensors="pt")
17
 
18
- # Run the input through the model
19
- outputs = model(**inputs)
20
 
21
- # Get the predicted class label
22
- predicted_class = outputs[0].argmax(-1).item()
23
-
24
- print("Predicted class:", predicted_class)
25
 
26
  """
27
  def format_prompt(message, history):
 
4
  #client = InferenceClient("""K00B404/BagOMistral_14X_Coders-ties-7B""")
5
  from transformers import AutoTokenizer, AutoModelForSequenceClassification
6
  import torch
7
+ from transformers import AutoModelForCausalLM, AutoTokenizer
8
 
9
+ model_id = 'K00B404/Merged_Beowolx-CodePro_Medusa2-7B-Mistral-I-v0-2'
10
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
 
11
 
12
+ model = AutoModelForCausalLM.from_pretrained(model_id)
 
13
 
14
+ text = "Hello my name is"
15
+ inputs = tokenizer(text, return_tensors="pt")
16
 
17
+ outputs = model.generate(**inputs, max_new_tokens=20)
18
+ print(tokenizer.decode(outputs[0], skip_special_tokens=True))
19
 
 
 
 
 
20
 
21
  """
22
  def format_prompt(message, history):