cristinaimprota commited on
Commit
ca49620
·
verified ·
1 Parent(s): 54f963e

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +136 -0
app.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import AutoTokenizer, AutoModelForCausalLM
2
+ import gradio as gr
3
+ import torch
4
+
5
+ # ==========================
6
+ # Load your model
7
+ # ==========================
8
+
9
+ MODEL_ID = "OSS-forge/DeepSeek-Coder-1.3B-cleaned"
10
+
11
+ device = "cuda" if torch.cuda.is_available() else "cpu"
12
+
13
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_ID)
14
+ if tokenizer.pad_token is None:
15
+ tokenizer.pad_token = tokenizer.eos_token
16
+
17
+ model = AutoModelForCausalLM.from_pretrained(
18
+ MODEL_ID,
19
+ torch_dtype=torch.bfloat16 if torch.cuda.is_available() else torch.float32,
20
+ device_map="auto" if torch.cuda.is_available() else None,
21
+ )
22
+ model.to(device)
23
+ model.eval()
24
+
25
+
26
+ # ==========================
27
+ # Prompt builder
28
+ # ==========================
29
+
30
+ def build_instruction_prompt(instruction: str) -> str:
31
+ return '''
32
+ You are an AI programming assistant, utilizing the DeepSeek Coder model, developed by DeepSeek Company, and you only answer questions related to computer science.
33
+ ### Instruction:
34
+ {}
35
+ ### Response:
36
+ '''.format(instruction.strip()).lstrip()
37
+
38
+
39
+ # ==========================
40
+ # Gradio logic
41
+ # ==========================
42
+
43
+ def generate_code(instruction, chat_history, is_first_time):
44
+ if chat_history is None or is_first_time:
45
+ chat_history = []
46
+
47
+ instruction = instruction.strip()
48
+ if not instruction:
49
+ return chat_history, gr.update(value=instruction), False
50
+
51
+ prompt = build_instruction_prompt(instruction)
52
+
53
+ inputs = tokenizer(
54
+ prompt,
55
+ return_tensors="pt",
56
+ padding=True,
57
+ truncation=True,
58
+ max_length=512,
59
+ ).to(device)
60
+
61
+ try:
62
+ stop_id = tokenizer.convert_tokens_to_ids("<|EOT|>")
63
+ except Exception:
64
+ stop_id = tokenizer.eos_token_id
65
+
66
+ with torch.no_grad():
67
+ outputs = model.generate(
68
+ **inputs,
69
+ max_new_tokens=512,
70
+ do_sample=False,
71
+ pad_token_id=stop_id,
72
+ eos_token_id=stop_id,
73
+ )
74
+
75
+ input_len = inputs["input_ids"].shape[1]
76
+ generated_tokens = outputs[0, input_len:]
77
+ code = tokenizer.decode(generated_tokens, skip_special_tokens=True).strip()
78
+
79
+ user_message = f"**Instruction**:\n{instruction}"
80
+ ai_message = f"**Generated code**:\n```python\n{code}\n```"
81
+
82
+ chat_history = chat_history + [
83
+ {"role": "user", "content": user_message},
84
+ {"role": "assistant", "content": ai_message},
85
+ ]
86
+
87
+ return chat_history, gr.update(value=""), False
88
+
89
+
90
+ def reset_interface():
91
+ return [], gr.update(value=""), True
92
+
93
+
94
+ # ==========================
95
+ # Gradio UI
96
+ # ==========================
97
+
98
+ with gr.Blocks(title="Python Code Generator") as demo:
99
+ gr.Markdown("# 🧠 Python Code Generator")
100
+ gr.Markdown(
101
+ "Generate Python code from natural language instructions using your Hugging Face model."
102
+ )
103
+
104
+ with gr.Row():
105
+ with gr.Column(scale=2):
106
+ instruction_input = gr.Textbox(
107
+ label="Instruction",
108
+ placeholder="Describe the code you want. E.g., 'Write a Python function that checks if a number is prime.'",
109
+ lines=4,
110
+ )
111
+
112
+ is_first = gr.State(True)
113
+
114
+ submit_btn = gr.Button("Generate Code")
115
+ reset_btn = gr.Button("Start Over")
116
+
117
+ with gr.Column(scale=3):
118
+ chat_output = gr.Chatbot(
119
+ label="Conversation",
120
+ height=500,
121
+ )
122
+
123
+ submit_btn.click(
124
+ fn=generate_code,
125
+ inputs=[instruction_input, chat_output, is_first],
126
+ outputs=[chat_output, instruction_input, is_first],
127
+ )
128
+
129
+ reset_btn.click(
130
+ fn=reset_interface,
131
+ outputs=[chat_output, instruction_input, is_first],
132
+ )
133
+
134
+ if __name__ == "__main__":
135
+ print("Launching Gradio interface...")
136
+ demo.queue(max_size=10).launch()