from transformers import AutoTokenizer from src.constants import MAX_ATTEMPTS from src.constants import STARTING_INDEX from src.params import ReducerParams from src.predictions import make_predictions from src.text import get_text tokenizer = AutoTokenizer.from_pretrained("gpt2") token_id_predictions, token_predictions = make_predictions(tokenizer) text = get_text() all_tokens = tokenizer.encode(text) INITIAL_STATE = ReducerParams( prompt_text=tokenizer.decode(all_tokens[:STARTING_INDEX]), player_points=0, lm_points=0, current_guesses="", lm_guesses="", remaining_attempts=MAX_ATTEMPTS, guess_field="", button_label="Guess!", bottom_html="", word_number=0, )