Detsutut commited on
Commit
4a92b37
1 Parent(s): c8ba4b0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -5,8 +5,8 @@ import torch
5
  import re
6
 
7
  # Initialize the model
8
- model = AutoModelForCausalLM.from_pretrained("Detsutut/Igea-1B-v0.0.1-Q4_K_M-GGUF")
9
- tokenizer = AutoTokenizer.from_pretrained( "Detsutut/Igea-350M-v0.0.1")
10
 
11
 
12
  gen_pipeline = pipeline(
@@ -16,7 +16,7 @@ gen_pipeline = pipeline(
16
  )
17
 
18
  # Define the function to generate text
19
- def generate_text(input_text, max_new_tokens, temperature, top_p, split_output):
20
  if split_output:
21
  max_new_tokens=30
22
  top_p=0.95
 
5
  import re
6
 
7
  # Initialize the model
8
+ model = AutoModelForCausalLM.from_pretrained("Detsutut/Igea-1B-v0.0.1-Q4_K_M-GGUF", model_file="igea-1b-v0.0.1-q4_k_m.gguf", model_type="mistral", hf=True)
9
+ tokenizer = AutoTokenizer.from_pretrained( "Detsutut/Igea-1B-v0.0.1")
10
 
11
 
12
  gen_pipeline = pipeline(
 
16
  )
17
 
18
  # Define the function to generate text
19
+ def generate_text(input_text, max_new_tokens=30, temperature=1, top_p=0.95, split_output=False):
20
  if split_output:
21
  max_new_tokens=30
22
  top_p=0.95