import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
import sys


def sep(label = '', cnt=32):
    print('-' * cnt, label, '-' * cnt, sep='')


np.random.seed(1)
tf.random.set_random_seed(1)
n_embedding = 15
ALPHA = 0.1
ITERS = 500

########################################################################################################################
sep('sentence and dict')
sentence = 'I want to drink a glass of apple juice in order to cure my thirsty of now'
# sentence = 'I want to drink a glass of apple juice'
sentence = sentence.lower()

print(sentence)

words = sentence.split(' ')
words_len = len(words)
print(f'Words len: {words_len}')
print(words)

dict_set = set(words)
vocab_len = len(dict_set)
print(f'vocab_len: {vocab_len}')

dict_list = list(dict_set)
idx2word = {i: w for i, w in enumerate(dict_list)}
word2idx = {w: i for i, w in enumerate(dict_list)}
print(idx2word)
print(word2idx)

########################################################################################################################
sep('CBOW')
x_data = []
y_data = []
for i, w in enumerate(words):
    idx = word2idx[w]
    for j in range(max(0, i - 2), min(words_len, i + 2 + 1)):
        if i == j:
            continue
        y_data.append(idx)
        idx_y = word2idx[words[j]]
        x_data.append(idx_y)

sep('check CBOW')
for i1, i2 in zip(x_data, y_data):
    print(idx2word[i1], idx2word[i2])

x_data = np.int32(x_data).reshape(-1, 1)
y_data = np.int32(y_data).reshape(-1, 1)


########################################################################################################################
sep('model')
x = tf.placeholder(tf.int32, [None, 1], name='x')
y = tf.placeholder(tf.int32, [None, 1], name='y')

x_o = tf.one_hot(x, vocab_len)  # (m, vocab)  # ATTENTION: TypeError: one_hot() missing 1 required positional argument: 'depth'
x_o = tf.cast(x_o, dtype=tf.float32)

E = tf.Variable(np.random.normal(0, 1, [vocab_len, n_embedding]), dtype=tf.float32)  # (vocab, hidden)
e = tf.matmul(x_o, E)  # (m, hidden)
E2 = tf.Variable(np.random.normal(0, 1, [n_embedding, vocab_len]), dtype=tf.float32)  # (hidden, vocab)
b = tf.Variable(np.zeros([1, vocab_len]), dtype=tf.float32)  # (1, vocab)
logits = tf.matmul(e, E2) + b  # (m, vocab)

########################################################################################################################
sep('compile model')
print(y, logits)
sys.exit(0)
loss = tf.reduce_mean(
    tf.nn.sparse_softmax_cross_entropy_with_logits(labels=y, logits=logits)
)
optim = tf.train.AdamOptimizer(learning_rate=ALPHA).minimize(loss)

########################################################################################################################
sep('train model')
cost_his = np.zeros(ITERS)
GROUP = int(np.ceil(ITERS / 20))
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())  # ATTENTION Failed precondition: Attempting to use uninitialized value beta2_power

    for step in range(ITERS):
        # _, cost = sess.run(optim, loss, feed_dict={x: x_data, y: y_data})  # ATTENTION TypeError: run() got multiple values for argument 'feed_dict'
        _, cost = sess.run([optim, loss], feed_dict={x: x_data, y: y_data})

        if step % GROUP == 0:
            print(f'#{step + 1}, cost = {cost}')
    if step % GROUP != 0:
        print(f'#{step + 1}, cost = {cost}')

    print('Check and close plotting window to go on')
    plt.plot(cost_his)

    while True:
        sep()
        print(sentence)
        print(idx2word)
        print(word2idx)
        print('Which word to check? Please input index. Negative number for quit.')
        xin = input()
        try:
            xin = int(xin)
        except ValueError as ex:
            print('Please input a number!')
        if xin < 0:
            break
        if xin >= vocab_len:
            print('Please input index in range of the dictionary!')
            continue
        idx = xin
        word = idx2word[idx]
        print('Check:', idx, word)

        Ev = sess.run(E)
        vec_w = Ev[idx]
        cos_arr = []
        for i, vec in enumerate(Ev):
            a = (vec * vec_w).sum()
            b = np.linalg.norm(vec) * np.linalg.norm(vec_w)
            cos_arr.append([i, a / b])
        cos_arr = sorted(cos_arr, key=lambda x: x[1], reverse=True)
        for i, cos in cos_arr:
            print(f'{i}: "{idx2word[i]}" cos with "{word}" = {cos}')

print('Over')