minhcrafters commited on
Commit
d460322
1 Parent(s): c7a58dd

Upload model

Browse files
Files changed (3) hide show
  1. README.md +2 -2
  2. config.json +2 -3
  3. generation_config.json +1 -2
README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
 
 
2
  library_name: transformers
3
  tags:
4
  - conversational
5
- language:
6
- - en
7
  ---
8
 
9
  # Model Card for DialoGPT-large
 
1
  ---
2
+ language:
3
+ - en
4
  library_name: transformers
5
  tags:
6
  - conversational
 
 
7
  ---
8
 
9
  # Model Card for DialoGPT-large
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "DialoGPT-large-rencebomba_",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
@@ -8,7 +8,6 @@
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
- "pad_token_id": 50256,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
@@ -33,7 +32,7 @@
33
  }
34
  },
35
  "torch_dtype": "float32",
36
- "transformers_version": "4.38.1",
37
  "use_cache": true,
38
  "vocab_size": 50257
39
  }
 
1
  {
2
+ "_name_or_path": "/content/output-large-rencebomba_",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
 
8
  "bos_token_id": 50256,
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
 
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
 
32
  }
33
  },
34
  "torch_dtype": "float32",
35
+ "transformers_version": "4.38.2",
36
  "use_cache": true,
37
  "vocab_size": 50257
38
  }
generation_config.json CHANGED
@@ -2,6 +2,5 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
- "pad_token_id": 50256,
6
- "transformers_version": "4.38.1"
7
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
+ "transformers_version": "4.38.2"
 
6
  }