import os
import sys
import jax
from jax import random
from jax import numpy as jnp
from jax.tree_util import tree_map
import flax
from flax import serialization as sr
from flax.training.train_state import TrainState
import matplotlib.pyplot as plt
import optax

is_load_and_eval = 0
base_dir, file_name = os.path.split(os.path.abspath(__file__))
ver = 'v1.0'
save_dir = os.path.join(base_dir, '_save', file_name, ver)
os.makedirs(save_dir, exist_ok=True)
save_path = os.path.join(save_dir, 'weights.bin')

n_samples = 20
x_dim = 10
y_dim = 5

key = random.PRNGKey(0)
k1, k2 = random.split(key)
W = random.normal(k1, (x_dim, y_dim))
b = random.normal(k2, (y_dim,))
true_params = flax.core.freeze({
    'params': {
        'kernel': W,
        'bias': b
    }
})

key_sample, key_noise = random.split(k1)
x_samples = random.normal(key_sample, (n_samples, x_dim))
y_samples = jnp.dot(x_samples, W) + b + 0.1 * random.normal(key_noise, (n_samples, y_dim))
print('x shape:', x_samples.shape, 'y shape:', y_samples.shape)

model = flax.linen.Dense(features=5)

key1, key2 = random.split(random.PRNGKey(0))
x = random.normal(key1, (10,)) # Dummy input data
params = model.init(key2, x) # Initialization call
print('params', jax.tree_util.tree_map(lambda x: x.shape, params)) # Checking output shapes

if is_load_and_eval:
    with open(save_path, 'br') as f:
        print(f'Loading from {save_path}')
        xbytes = f.read()
        params = sr.from_bytes(params, xbytes)
        print('Loaded')

learning_rate = 0.3

# tx = optax.adam(learning_rate=learning_rate)
tx = optax.sgd(learning_rate=learning_rate)

trainState = TrainState.create(
    apply_fn=model.apply,
    params=params,
    tx=tx,
)


@jax.jit
def mse(params, x_batched, y_batched):

    def squared_error(x, y):
        pred = trainState.apply_fn(params, x) - y
        return jnp.inner(pred, pred) / 2.0

    res = jax.vmap(squared_error)(x_batched, y_batched)
    return jnp.mean(res, axis=0)


loss_grad_fn = jax.value_and_grad(mse)

print('Loss for "true" W, b:', mse(true_params, x_samples, y_samples))
if is_load_and_eval:
    sys.exit(0)
loss_his = []
for i in range(101):
    loss_val, grads = loss_grad_fn(trainState.params, x_samples, y_samples)
    loss_his.append(loss_val)
    trainState = trainState.apply_gradients(grads=grads)
    if i % 10 == 0:
        print(f'Loss step {i}:', loss_val)

bytes_output = sr.to_bytes(params)
print('bytes output', bytes_output)
with open(save_path, 'bw') as f:
    print(f'Saving to {save_path}')
    f.write(bytes_output)
    print('Saved.')
dict_output = sr.to_state_dict(params)
print('dict output', dict_output)

plt.plot(loss_his)
plt.show()
