blackhole33 commited on
Commit
67643a3
1 Parent(s): 57989ae

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +62 -0
README.md CHANGED
@@ -13,6 +13,68 @@ base_model: llama-3-8b-bnb-4bit
13
 
14
  # Uploaded model
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
  - **Developed by:** blackhole33
17
  - **License:** apache-2.0
18
  - **Finetuned from model :** llama-3-8b-bnb-4bit
 
13
 
14
  # Uploaded model
15
 
16
+ # Usage model.
17
+
18
+ ```
19
+ import gradio as gr
20
+ from unsloth import FastLanguageModel
21
+
22
+ # Load your pre-trained model
23
+ max_seq_length = 2048
24
+ dtype = None
25
+ load_in_4bit = True
26
+
27
+ model, tokenizer = FastLanguageModel.from_pretrained(
28
+ model_name="blackhole33/llama-3-8b-bnb-4bit",
29
+ max_seq_length=max_seq_length,
30
+ dtype=dtype,
31
+ load_in_4bit=load_in_4bit,
32
+ )
33
+
34
+ FastLanguageModel.for_inference(model) # Enable native 2x faster inference
35
+
36
+ # Alpaca prompt template
37
+ alpaca_prompt = """Quyida vazifani tavsiflovchi ko'rsatma mavjud bo'lib, u qo'shimcha kontekstni ta'minlaydigan kiritish bilan bog'langan. So'rovni to'g'ri to'ldiradigan javob yozing.
38
+
39
+ ### Instruction:
40
+ {}
41
+
42
+ ### Response:
43
+ {}"""
44
+
45
+ # Function to generate response
46
+ def generate_response(instruction):
47
+ inputs = tokenizer(
48
+ [
49
+ alpaca_prompt.format(
50
+ instruction, # instruction
51
+ "" # output - leave this blank for generation!
52
+ )
53
+ ],
54
+ return_tensors="pt",
55
+ ).to("cuda")
56
+
57
+ outputs = model.generate(**inputs, max_new_tokens=250, use_cache=True)
58
+ res = tokenizer.batch_decode(outputs, skip_special_tokens=True)
59
+ return res[0]
60
+
61
+ # Gradio interface
62
+ interface = gr.Interface(
63
+ fn=generate_response,
64
+ inputs=[
65
+ gr.Textbox(lines=2, placeholder="Question"),
66
+ ],
67
+ outputs="text",
68
+ title="Uzbek Language Model Interface",
69
+ description="Enter an instruction and context to get a response from the model.",
70
+ )
71
+
72
+ # Launch the interface
73
+ interface.launch(share=True)
74
+
75
+
76
+ ```
77
+
78
  - **Developed by:** blackhole33
79
  - **License:** apache-2.0
80
  - **Finetuned from model :** llama-3-8b-bnb-4bit