sharpenb commited on
Commit
2f4ef14
1 Parent(s): 3036d8b

Upload folder using huggingface_hub (#2)

Browse files

- c5466ee2790bbcd473f45586c63c437070b530dd11d90c3f27f733198f730a1b (35b4ac58199e000f3a81afb6b4c1a847607647d0)
- 4d617c5f5af44765fe17a483efc26a1752c680b414336f8831fd391c95590230 (e2335283b0ea77842111c1afb8f817493d6a2d6b)

Files changed (2) hide show
  1. config.json +1 -1
  2. smash_config.json +1 -1
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "/covalent/.cache/models/tmplk1hrxca068hgox_",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
1
  {
2
+ "_name_or_path": "/covalent/.cache/models/tmpr1rufhy5hxn_itfj",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
smash_config.json CHANGED
@@ -28,7 +28,7 @@
28
  "quant_llm-int8_weight_bits": 8,
29
  "max_batch_size": 1,
30
  "device": "cuda",
31
- "cache_dir": "/covalent/.cache/models/tmplk1hrxca",
32
  "task": "",
33
  "save_load_fn": "bitsandbytes",
34
  "save_load_fn_args": {}
 
28
  "quant_llm-int8_weight_bits": 8,
29
  "max_batch_size": 1,
30
  "device": "cuda",
31
+ "cache_dir": "/covalent/.cache/models/tmpr1rufhy5",
32
  "task": "",
33
  "save_load_fn": "bitsandbytes",
34
  "save_load_fn_args": {}