import numpy as np
import tensorflow as tf

# 加载外部词典
def load_poems(file_path):
    with open(file_path, 'r', encoding='utf-8') as file:
        lines = file.readlines()
    poems = [line.strip() for line in lines]
    return poems

# 处理唐诗数据
def preprocess_poems(poems):
    vocab = sorted(set(''.join(poems)))
    char_to_idx = {char: idx for idx, char in enumerate(vocab)}
    idx_to_char = {idx: char for char, idx in char_to_idx.items()}
    return char_to_idx, idx_to_char

# 构建生成器处理数据
def data_generator(poems, char_to_idx, batch_size, max_length):
    while True:
        np.random.shuffle(poems)
        for i in range(0, len(poems), batch_size):
            batch_poems = poems[i:i+batch_size]
            X_data = []
            y_data = []
            for poem in batch_poems:
                for j in range(0, len(poem) - 1):
                    X_seq = [char_to_idx[char] for char in poem[:j+1]]
                    X_seq = tf.keras.preprocessing.sequence.pad_sequences([X_seq], maxlen=max_length, padding='pre')[0]
                    X_data.append(X_seq)
                    y_data.append(char_to_idx[poem[j + 1]])
            yield np.array(X_data), np.array(y_data)

# 加载外部词典文件
file_path = 'dictionary.txt'
poems = load_poems(file_path)

# 处理唐诗数据
char_to_idx, idx_to_char = preprocess_poems(poems)

# 定义模型参数
vocab_size = len(char_to_idx)
embedding_dim = 256
rnn_units = 512
batch_size = 64
max_length = 100

# 构建模型
model = tf.keras.Sequential([
    tf.keras.layers.Embedding(vocab_size, embedding_dim, input_length=max_length),
    tf.keras.layers.LSTM(rnn_units),
    tf.keras.layers.Dense(vocab_size, activation='softmax')
])

# 编译模型
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy')

# 构建数据生成器
data_gen = data_generator(poems, char_to_idx, batch_size, max_length)

# 训练模型
model.fit(data_gen, steps_per_epoch=len(poems)//batch_size, epochs=10)


def generate_poem(model, start_char, char_to_idx, idx_to_char, num_generate=100):
    input_eval = [char_to_idx[start_char]]
    input_eval = tf.expand_dims(input_eval, 0)

    generated_text = []

    for _ in range(num_generate):
        predictions = model(input_eval)
        predicted_id = tf.random.categorical(predictions, num_samples=1)[-1, 0].numpy()

        generated_text.append(idx_to_char[predicted_id])

        input_eval = tf.expand_dims([predicted_id], 0)

    return start_char + ''.join(generated_text)

# 使用输入的汉字生成唐诗
start_char = input("请输入一个汉字作为起始字符：")
generated_poem = generate_poem(model, start_char, char_to_idx, idx_to_char)
print(generated_poem)
