MrOvkill commited on
Commit
952e6c6
1 Parent(s): 81e885b

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +28 -14
README.md CHANGED
@@ -45,24 +45,38 @@ dtype: bfloat16
45
  ## 💻 Usage
46
 
47
  ```python
 
48
  !pip install -qU transformers accelerate
49
 
50
- from transformers import AutoTokenizer
51
- import transformers
52
  import torch
53
 
54
- model = "MrOvkill/Phi-3-Instruct-Bloated"
55
- messages = [{"role": "user", "content": "What is a large language model?"}]
 
56
 
57
- tokenizer = AutoTokenizer.from_pretrained(model)
58
- prompt = tokenizer.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
59
- pipeline = transformers.pipeline(
60
- "text-generation",
61
- model=model,
62
- torch_dtype=torch.float16,
63
- device_map="auto",
64
- )
 
65
 
66
- outputs = pipeline(prompt, max_new_tokens=256, do_sample=True, temperature=0.7, top_k=50, top_p=0.95)
67
- print(outputs[0]["generated_text"])
 
 
 
 
 
 
 
 
 
 
 
68
  ```
 
45
  ## 💻 Usage
46
 
47
  ```python
48
+ # Installation
49
  !pip install -qU transformers accelerate
50
 
51
+ # Imports
52
+ from transformers import AutoTokenizer, AutoModelForCausalLM
53
  import torch
54
 
55
+ # Loading
56
+ tokenizer = AutoTokenizer.from_pretrained("./merge/")
57
+ model = AutoModelForCausalLM.from_pretrained("./merge/")
58
 
59
+ # Completion function
60
+ def infer(prompt, **kwargs):
61
+ inputs = tokenizer(prompt, return_tensors="pt")
62
+ with torch.no_grad():
63
+ outputs = model.generate(**inputs, **kwargs)
64
+ return tokenizer.decode(outputs[0], skip_special_tokens=True)
65
+
66
+ # Some silliness
67
+ infer("<|user|>\nBen is going to the store for some Ice Cream. So is Jerry. They mix up the ice cream at the store. Is the ice cream: (a. Ben's (b. Jerry's (c. Ben and Jerry's <|end|>\n<|assistant|>\nMy answer is (", max_new_tokens=1024)
68
 
69
+ # A proper test
70
+ infer(
71
+ """
72
+ <|user|>
73
+ Explain what a Mixture of Experts is in less than 100 words.
74
+ <|assistant|>
75
+ """,
76
+ max_new_tokens=1024,
77
+ do_sample=False,
78
+ temperature=0.0,
79
+ top_k=50,
80
+ top_p=0.89,
81
+ )
82
  ```