File size: 1,149 Bytes
bf29a63
 
 
 
 
df36ae5
bf29a63
8ef5965
bf29a63
df36ae5
bf29a63
df36ae5
8ef5965
 
df36ae5
 
 
 
 
8ef5965
bf29a63
8ef5965
bf29a63
 
 
 
 
 
df36ae5
 
bf29a63
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch

# تحميل الموديل
model_name = "deepseek-ai/deepseek-coder-1.3b-base"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name).to("cpu")

# دالة توليد الكود
def generate_code(prompt):
    inputs = tokenizer(prompt, return_tensors="pt").to("cpu")
    outputs = model.generate(
        **inputs,
        max_new_tokens=200,       # طول الكود الناتج
        temperature=0.7,          # درجة الإبداع
        do_sample=True,
        top_k=50,
        top_p=0.95
    )
    code = tokenizer.decode(outputs[0], skip_special_tokens=True)
    return code[len(prompt):] if code.startswith(prompt) else code

# واجهة Gradio
gr.Interface(
    fn=generate_code,
    inputs=gr.Textbox(lines=5, placeholder="Describe what code you want...", label="Prompt"),
    outputs=gr.Textbox(label="Generated Code"),
    title="Code Generator - DeepSeek Model",
    description="Generate Python code from a text description using DeepSeek Coder 1.3B Base model"
).launch()