Files changed (3) hide show
  1. config.json +15 -30
  2. topic_embeddings.safetensors +3 -0
  3. topics.json +0 -0
config.json CHANGED
@@ -1,32 +1,17 @@
1
  {
2
- "_name_or_path": "EleutherAI/pythia-12b",
3
- "architectures": [
4
- "GPTNeoXForCausalLM"
 
 
 
 
5
  ],
6
- "custom_pipelines": {
7
- "text-generation": {
8
- "impl": "instruct_pipeline.InstructionTextGenerationPipeline",
9
- "pt": "AutoModelForCausalLM",
10
- "tf": "TFAutoModelForCausalLM"
11
- }
12
- },
13
- "bos_token_id": 0,
14
- "eos_token_id": 0,
15
- "hidden_act": "gelu",
16
- "hidden_size": 5120,
17
- "initializer_range": 0.02,
18
- "intermediate_size": 20480,
19
- "layer_norm_eps": 1e-05,
20
- "max_position_embeddings": 2048,
21
- "model_type": "gpt_neox",
22
- "num_attention_heads": 40,
23
- "num_hidden_layers": 36,
24
- "rotary_emb_base": 10000,
25
- "rotary_pct": 0.25,
26
- "tie_word_embeddings": false,
27
- "torch_dtype": "bfloat16",
28
- "transformers_version": "4.25.1",
29
- "use_cache": true,
30
- "use_parallel_residual": true,
31
- "vocab_size": 50280
32
- }
 
1
  {
2
+ "calculate_probabilities": false,
3
+ "language": "english",
4
+ "low_memory": false,
5
+ "min_topic_size": 10,
6
+ "n_gram_range": [
7
+ 1,
8
+ 1
9
  ],
10
+ "nr_topics": null,
11
+ "seed_topic_list": null,
12
+ "top_n_words": 10,
13
+ "verbose": true,
14
+ "zeroshot_min_similarity": 0.7,
15
+ "zeroshot_topic_list": null,
16
+ "embedding_model": "sentence-transformers/all-MiniLM-L6-v2"
17
+ }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
topic_embeddings.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7cf67651711d33ab3e9e1b984bc01502d356a674378e79f7ed6ca97b3c8adac0
3
+ size 124504
topics.json ADDED
The diff for this file is too large to render. See raw diff