yujiepan commited on
Commit
cda8a0f
1 Parent(s): f68714c

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +7 -4
README.md CHANGED
@@ -13,6 +13,7 @@ Note the model is in float16.
13
 
14
  Codes:
15
  ```python
 
16
  from huggingface_hub import create_repo, upload_folder
17
  import torch
18
  import transformers
@@ -23,14 +24,18 @@ save_path = '/tmp/yujiepan/gptj-tiny-random'
23
  repo_id = 'yujiepan/gptj-tiny-random'
24
 
25
  config = transformers.AutoConfig.from_pretrained(model_id)
26
- config.hidden_size = 8
27
- config.n_embd = 8
28
  config.num_attention_heads = 2
29
  config.n_head = 2
 
 
30
  config.n_layer = 2
 
31
  print(config)
32
 
33
  model = transformers.AutoModelForCausalLM.from_config(config, torch_dtype=torch.float16)
 
34
  model.save_pretrained(save_path)
35
 
36
  tokenizer = transformers.AutoTokenizer.from_pretrained(model_id)
@@ -38,11 +43,9 @@ tokenizer.save_pretrained(save_path)
38
 
39
  # from optimum.intel.openvino import OVModelForCausalLM
40
  # ovmodel = OVModelForCausalLM.from_pretrained(save_path, export=True)
41
- # ovmodel = ovmodel.half()
42
  # ovmodel.save_pretrained(save_path)
43
 
44
  os.system(f'ls -alh {save_path}')
45
-
46
  create_repo(repo_id, exist_ok=True)
47
  upload_folder(repo_id=repo_id, folder_path=save_path)
48
  ```
 
13
 
14
  Codes:
15
  ```python
16
+ from transformers import pipeline
17
  from huggingface_hub import create_repo, upload_folder
18
  import torch
19
  import transformers
 
24
  repo_id = 'yujiepan/gptj-tiny-random'
25
 
26
  config = transformers.AutoConfig.from_pretrained(model_id)
27
+ config.hidden_size = 16
28
+ config.n_embd = 16
29
  config.num_attention_heads = 2
30
  config.n_head = 2
31
+ config.rotary_dim = 4
32
+ config.num_hidden_layers = 2
33
  config.n_layer = 2
34
+ config.torch_dtype = torch.float16
35
  print(config)
36
 
37
  model = transformers.AutoModelForCausalLM.from_config(config, torch_dtype=torch.float16)
38
+ model = model.half()
39
  model.save_pretrained(save_path)
40
 
41
  tokenizer = transformers.AutoTokenizer.from_pretrained(model_id)
 
43
 
44
  # from optimum.intel.openvino import OVModelForCausalLM
45
  # ovmodel = OVModelForCausalLM.from_pretrained(save_path, export=True)
 
46
  # ovmodel.save_pretrained(save_path)
47
 
48
  os.system(f'ls -alh {save_path}')
 
49
  create_repo(repo_id, exist_ok=True)
50
  upload_folder(repo_id=repo_id, folder_path=save_path)
51
  ```