step 1000
Browse files- config.json +2 -2
- log/debug_0.log +0 -0
- log/debug_1.log +0 -0
- log/debug_2.log +0 -0
- log/debug_3.log +0 -0
- log/debug_4.log +0 -0
- log/debug_5.log +0 -0
- log/debug_6.log +0 -0
- log/debug_7.log +0 -0
- pytorch_model.bin +2 -2
config.json
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"GPTNeoForCausalLM"
|
@@ -47,7 +47,7 @@
|
|
47 |
"summary_type": "cls_index",
|
48 |
"summary_use_proj": true,
|
49 |
"torch_dtype": "float32",
|
50 |
-
"transformers_version": "4.
|
51 |
"use_cache": true,
|
52 |
"vocab_size": 50002,
|
53 |
"window_size": 256
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "multi-code-clippy",
|
3 |
"activation_function": "gelu_new",
|
4 |
"architectures": [
|
5 |
"GPTNeoForCausalLM"
|
|
|
47 |
"summary_type": "cls_index",
|
48 |
"summary_use_proj": true,
|
49 |
"torch_dtype": "float32",
|
50 |
+
"transformers_version": "4.12.2",
|
51 |
"use_cache": true,
|
52 |
"vocab_size": 50002,
|
53 |
"window_size": 256
|
log/debug_0.log
ADDED
The diff for this file is too large to render.
See raw diff
|
|
log/debug_1.log
ADDED
File without changes
|
log/debug_2.log
ADDED
File without changes
|
log/debug_3.log
ADDED
File without changes
|
log/debug_4.log
ADDED
File without changes
|
log/debug_5.log
ADDED
File without changes
|
log/debug_6.log
ADDED
File without changes
|
log/debug_7.log
ADDED
File without changes
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5449bbd0a4bba1464003fbf1205d5a567412b214df3389802d3302e09961989f
|
3 |
+
size 550399185
|