[model] @models = "glove.v1" vector_size = 50 alpha = 0.75 window = 15 symmetric = true distance_weighting = true iter = 25 initial_learning_rate = 0.05 n_jobs = 8 memory = 4.0 [tokenizer] @tokenizers = "wordlevel_tokenizer.v1" vocab_size = 30000 min_frequency = 0 clean_text = true handle_chinese_chars = true strip_accents = null lowercase = true