louisbrulenaudet commited on
Commit
e04bd7d
1 Parent(s): c6c25d5

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -15
app.py CHANGED
@@ -49,7 +49,6 @@ def setup(
49
  return None, None, description
50
 
51
  try:
52
- # Load the model and tokenizer
53
  model = AutoModelForCausalLM.from_pretrained(
54
  model_id,
55
  torch_dtype=torch.bfloat16,
@@ -73,6 +72,20 @@ def setup(
73
 
74
  return None, None, description
75
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
 
77
  def preprocess_conversation(
78
  message: str,
@@ -264,20 +277,6 @@ def generate(
264
 
265
  return "".join(outputs)
266
 
267
-
268
- DESCRIPTION = """\
269
- # Pearl-7B-0211-ties, an xtraordinary 7B model
270
-
271
- This space showcases the <a style='color:white;' href='https://huggingface.co/louisbrulenaudet/Pearl-7B-0211-ties'>Pearl-7B-0211-ties</a>
272
- model by Louis Brulé Naudet, a language model with 7.24 billion parameters that achieves a score exceeding 75.10 on the Open LLM Leaderboard
273
- (average).
274
- """
275
-
276
- model, tokenizer, description = setup(
277
- model_id="louisbrulenaudet/Pearl-7B-0211-ties",
278
- description=DESCRIPTION
279
- )
280
-
281
  chat_interface = gr.ChatInterface(
282
  fn=generate,
283
  additional_inputs=[
 
49
  return None, None, description
50
 
51
  try:
 
52
  model = AutoModelForCausalLM.from_pretrained(
53
  model_id,
54
  torch_dtype=torch.bfloat16,
 
72
 
73
  return None, None, description
74
 
75
+ DESCRIPTION = """\
76
+ # Pearl-7B-0211-ties, an xtraordinary 7B model
77
+
78
+ This space showcases the <a style='color:white;' href='https://huggingface.co/louisbrulenaudet/Pearl-7B-0211-ties'>Pearl-7B-0211-ties</a>
79
+ model by Louis Brulé Naudet, a language model with 7.24 billion parameters that achieves a score exceeding 75.10 on the Open LLM Leaderboard
80
+ (average).
81
+ """
82
+
83
+ model, tokenizer, description = setup(
84
+ model_id="louisbrulenaudet/Pearl-7B-0211-ties",
85
+ description=DESCRIPTION
86
+ )
87
+
88
+ print(model, tokenizer)
89
 
90
  def preprocess_conversation(
91
  message: str,
 
277
 
278
  return "".join(outputs)
279
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
280
  chat_interface = gr.ChatInterface(
281
  fn=generate,
282
  additional_inputs=[