Isaak Carter Augustus commited on
Commit
012e753
1 Parent(s): abd9499

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +17 -3
README.md CHANGED
@@ -6,8 +6,22 @@ tags:
6
  - MiniCPM
7
  - ModelBest
8
  - THUNLP
 
 
9
  ---
10
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  <div align="center">
13
  <h1>
@@ -95,9 +109,9 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
95
  import torch
96
  torch.manual_seed(0)
97
 
98
- path = 'openbmb/MiniCPM-2B-dpo-bf16'
99
  tokenizer = AutoTokenizer.from_pretrained(path)
100
- model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.bfloat16, device_map='cuda', trust_remote_code=True)
101
 
102
  responds, history = model.chat(tokenizer, "山东省最高的山是哪座山, 它比黄山高还是矮?差距多少?", temperature=0.8, top_p=0.8)
103
  print(responds)
@@ -148,4 +162,4 @@ print(responds)
148
  booktitle={OpenBMB Blog},
149
  year={2024}
150
  }
151
- ```
 
6
  - MiniCPM
7
  - ModelBest
8
  - THUNLP
9
+ - mlx
10
+ library_name: mlx
11
  ---
12
 
13
+ # Important:
14
+ made a seperate repo, because the OG creators didn't merge the safensors variant commit from the huggingface team, and due to the model_type property not existing in the config, (for my MiniCPM implementatino in mlx-examples). This is all fixed here.
15
+
16
+ To use in mlx:
17
+
18
+ ```sh
19
+ python -m mlx_lm.generate --model Isaak-Carter/MiniCPM-2B-sft-fp32-safetensors --prompt "hello"
20
+ ```
21
+
22
+
23
+
24
+
25
 
26
  <div align="center">
27
  <h1>
 
109
  import torch
110
  torch.manual_seed(0)
111
 
112
+ path = 'openbmb/MiniCPM-2B-dpo-fp32'
113
  tokenizer = AutoTokenizer.from_pretrained(path)
114
+ model = AutoModelForCausalLM.from_pretrained(path, torch_dtype=torch.float32, device_map='cuda', trust_remote_code=True)
115
 
116
  responds, history = model.chat(tokenizer, "山东省最高的山是哪座山, 它比黄山高还是矮?差距多少?", temperature=0.8, top_p=0.8)
117
  print(responds)
 
162
  booktitle={OpenBMB Blog},
163
  year={2024}
164
  }
165
+ ```