enacimie commited on
Commit
defda42
verified
1 Parent(s): 444344b

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +59 -0
app.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
+ import torch
4
+
5
+ # Cargar modelo y tokenizer (solo una vez)
6
+ model_name = "LiquidAI/LFM2-350M"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
8
+ model = AutoModelForCausalLM.from_pretrained(model_name)
9
+
10
+ # Crear pipeline de generaci贸n
11
+ pipe = pipeline(
12
+ "text-generation",
13
+ model=model,
14
+ tokenizer=tokenizer,
15
+ device=0 if torch.cuda.is_available() else -1, # Usa GPU si est谩 disponible
16
+ max_new_tokens=128,
17
+ do_sample=True,
18
+ temperature=0.7,
19
+ top_p=0.9,
20
+ )
21
+
22
+ def chat_function(message, history):
23
+ # Formatear historial como prompt (simple concatenaci贸n)
24
+ conversation = ""
25
+ for human, assistant in history:
26
+ conversation += f"User: {human}\nAssistant: {assistant}\n"
27
+ conversation += f"User: {message}\nAssistant:"
28
+
29
+ # Generar respuesta
30
+ outputs = pipe(conversation, return_full_text=False)
31
+ response = outputs[0]['generated_text'].strip()
32
+
33
+ # Limpiar posibles repeticiones o cortes
34
+ if "User:" in response:
35
+ response = response.split("User:")[0].strip()
36
+ if "Assistant:" in response:
37
+ response = response.split("Assistant:")[0].strip()
38
+
39
+ return response
40
+
41
+ # Interfaz Gradio
42
+ with gr.Blocks(title="LFM2-350M Chat") as demo:
43
+ gr.Markdown("# 馃 LFM2-350M Chat (Simple)")
44
+ gr.Markdown("Un chat simple usando el modelo `LiquidAI/LFM2-350M`. Ideal para pruebas en Hugging Face Spaces Free.")
45
+
46
+ chatbot = gr.Chatbot(height=400)
47
+ msg = gr.Textbox(label="Escribe tu mensaje", placeholder="Escribe algo...")
48
+ clear = gr.Button("Limpiar")
49
+
50
+ def respond(message, chat_history):
51
+ bot_message = chat_function(message, chat_history)
52
+ chat_history.append((message, bot_message))
53
+ return "", chat_history
54
+
55
+ msg.submit(respond, [msg, chatbot], [msg, chatbot])
56
+ clear.click(lambda: None, None, chatbot, queue=False)
57
+
58
+ if __name__ == "__main__":
59
+ demo.launch()