import pandas as pd
import numpy as np
import time
import numpy as py
from numpy.linalg import solve
from sklearn.metrics import mean_squared_error
import matplotlib.pyplot as plt
import seaborn as sns
sns.set()
np.random.seed(1)

# 输出显示全部行
pd.set_option('display.max_rows', None)
#显示所有列
pd.set_option('display.max_columns', None)

start_time = time.time()

names = ['user_id', 'item_id', 'rating', 'timestamp']
df = pd.read_csv('u.data', sep='\t', names=names)
df = df.sort_values(['user_id', 'item_id'], ascending=True)
df1 = pd.read_csv('tmp_all.csv')
data = df
data = data.drop(['timestamp'], axis=1)
user_list_i = []
user_list_f = []
# df = pd.read_csv('movie_ratings.csv')
# df = df.head(20000)
sim_user_data = pd.read_csv('tmp_all.csv')
print(sim_user_data.iloc[0][0], 1111)

n_users = df.user_id.unique().shape[0]
n_items = df.item_id.unique().shape[0]
print(n_users, n_items)
ratings = np.zeros((n_users, n_items))
for row in df.itertuples():
    ratings[row[1]-1, row[2]-1] = row[3]
print(ratings)

print(str(n_users) + ' users')
print(str(n_items) + ' items')
sparsity = float(len(ratings.nonzero()[0]))
sparsity /= (ratings.shape[0] * ratings.shape[1])
sparsity *= 100
print('Sparsity: {:4.2f}%'.format(sparsity))

# 划分训练集和测试集
def train_test_split(ratings):
    test = np.zeros(ratings.shape)
    train = ratings.copy()
    for user in range(ratings.shape[0]):
        seed = len(ratings[user, :].nonzero()[0]) * 0.3  # 训练集：测试集 = 7：3
        seed = int(seed)
        test_ratings = np.random.choice(ratings[user, :].nonzero()[0],
                                        size=10,  # 这里改测试集和训练集的比例，换成size = seed就是7：3
                                        replace=False)
        train[user, test_ratings] = 0.
        test[user, test_ratings] = ratings[user, test_ratings]

    # Test and training are truly disjoint
    assert (np.all((train * test) == 0))
    return train, test
train, test = train_test_split(ratings)

# 相似用户的id和相似度
def sim(user_id, sim_id, pear):
    """
    :param user_id:
    :param sim_id:
    :param pear:
    :return: pear = 0返回用户id， =1 返回用户相似度
    """
    if pear == 0:
        return int(sim_user_data.iloc[sim_id][user_id].split(',')[pear])
    else:
        return float(sim_user_data.iloc[sim_id][user_id].split(',')[pear])

class ExplicitMF():
    def __init__(self,
                 ratings,
                 n_factors=40,
                 item_reg=0.0,
                 user_reg=0.0,
                 alpha=0.1,
                 verbose=True):
        """
        Train a matrix factorization model to predict empty
        entries in a matrix. The terminology assumes a
        ratings matrix which is ~ user x item

        Params
        ======
        ratings : (ndarray)
            User x Item matrix with corresponding ratings

        n_factors : (int)
            Number of latent factors to use in matrix
            factorization model

        item_reg : (float)
            Regularization term for item latent factors

        user_reg : (float)
            Regularization term for user latent factors

        verbose : (bool)
            Whether or not to printout training progress
        """

        self.ratings = ratings
        self.n_users, self.n_items = ratings.shape
        self.n_factors = n_factors
        self.item_reg = item_reg
        self.user_reg = user_reg
        self._v = verbose
        self.alpha = alpha

    def als_step(self,
                 latent_vectors,
                 fixed_vecs,
                 ratings,
                 _lambda,
                 type='user',
                 alpha=0.5):
        """
        One of the two ALS steps. Solve for the latent vectors
        specified by type.
        """
        if type == 'user':
            # Precompute
            YTY = fixed_vecs.T.dot(fixed_vecs)  # VTV
            lambdaI = np.eye(YTY.shape[0]) * _lambda  # λI
            for u in range(latent_vectors.shape[0]):
                # α(Sif + Sig)I
                alphaI = np.eye(YTY.shape[0]) * alpha * (-sim(u, 0, 1) - sim(u, 1, 1) - sim(u, 2, 1) - sim(u, 3, 1)
                                                         - sim(u, 4, 1) + sim(u, 5, 1) + sim(u, 6, 1) + sim(u, 7, 1)
                                                         + sim(u, 8, 1) + sim(u, 9, 1))
                # α(Sif*Uf + Sig*Ug)
                sim_user0_id = sim(u, 0, 0) - 1
                pear0 = -sim(u, 0, 1)
                sim_user1_id = sim(u, 1, 0) - 1
                pear1 = -sim(u, 1, 1)
                sim_user2_id = sim(u, 2, 0) - 1
                pear2 = -sim(u, 2, 1)
                sim_user3_id = sim(u, 3, 0) - 1
                pear3 = -sim(u, 3, 1)
                sim_user4_id = sim(u, 4, 0) - 1
                pear4 = -sim(u, 4, 1)
                sim_user5_id = sim(u, 5, 0) - 1
                pear5 = sim(u, 5, 1)
                sim_user6_id = sim(u, 6, 0) - 1
                pear6 = sim(u, 6, 1)
                sim_user7_id = sim(u, 7, 0) - 1
                pear7 = sim(u, 7, 1)
                sim_user8_id = sim(u, 8, 0) - 1
                pear8 = sim(u, 8, 1)
                sim_user9_id = sim(u, 9, 0) - 1
                pear9 = sim(u, 9, 1)
                alpha_user_I = alpha \
                               * (pear0 * latent_vectors[sim_user0_id, :] + pear1 * latent_vectors[sim_user1_id, :]
                                  + pear2 * latent_vectors[sim_user2_id, :] + pear3 * latent_vectors[sim_user3_id, :]
                                  + pear4 * latent_vectors[sim_user4_id, :] + pear5 * latent_vectors[sim_user5_id, :]
                                  + pear6 * latent_vectors[sim_user6_id, :] + pear7 * latent_vectors[sim_user7_id, :]
                                  + pear8 * latent_vectors[sim_user8_id, :] + pear9 * latent_vectors[sim_user9_id, :])
                latent_vectors[u, :] = solve((YTY + lambdaI + alphaI),
                                             (ratings[u, :].dot(fixed_vecs) + alpha_user_I))
        elif type == 'item':
            # Precompute
            XTX = fixed_vecs.T.dot(fixed_vecs)
            lambdaI = np.eye(XTX.shape[0]) * _lambda

            for i in range(latent_vectors.shape[0]):
                latent_vectors[i, :] = solve((XTX + lambdaI),
                                             ratings[:, i].T.dot(fixed_vecs))
        return latent_vectors

    def train(self, n_iter=10):
        """ Train model for n_iter iterations from scratch."""
        # initialize latent vectors
        self.user_vecs = np.random.random((self.n_users, self.n_factors))
        self.item_vecs = np.random.random((self.n_items, self.n_factors))

        self.partial_train(n_iter)

    def partial_train(self, n_iter):
        """
        Train model for n_iter iterations. Can be
        called multiple times for further training.
        """
        ctr = 1
        while ctr <= n_iter:
            if ctr % 10 == 0 and self._v:
                print('\tcurrent iteration: {}'.format(ctr))
            self.user_vecs = self.als_step(self.user_vecs,
                                           self.item_vecs,
                                           self.ratings,
                                           self.user_reg,
                                           type='user')
            self.item_vecs = self.als_step(self.item_vecs,
                                           self.user_vecs,
                                           self.ratings,
                                           self.item_reg,
                                           type='item')
            ctr += 1

    def predict_all(self):
        """ Predict ratings for every user and item. """
        predictions = np.zeros((self.user_vecs.shape[0],
                                self.item_vecs.shape[0]))
        for u in range(self.user_vecs.shape[0]):
            for i in range(self.item_vecs.shape[0]):
                predictions[u, i] = self.predict(u, i)

        return predictions

    def predict(self, u, i):
        """ Single user and item prediction. """
        return self.user_vecs[u, :].dot(self.item_vecs[i, :].T)

    def calculate_learning_curve(self, iter_array, test):
        """
        Keep track of MSE as a function of training iterations.

        Params
        ======
        iter_array : (list)
            List of numbers of iterations to train for each step of
            the learning curve. e.g. [1, 5, 10, 20]
        test : (2D ndarray)
            Testing dataset (assumed to be user x item).

        The function creates two new class attributes:

        train_mse : (list)
            Training data MSE values for each value of iter_array
        test_mse : (list)
            Test data MSE values for each value of iter_array
        """
        iter_array.sort()
        self.train_mse = []
        self.test_mse = []
        iter_diff = 0
        for (i, n_iter) in enumerate(iter_array):
            if self._v:
                print('Iteration: {}'.format(n_iter))
            if i == 0:
                self.train(n_iter - iter_diff)
            else:
                self.partial_train(n_iter - iter_diff)

            predictions = self.predict_all()

            self.train_mse += [get_mse(predictions, self.ratings)]
            self.test_mse += [get_mse(predictions, test)]
            if self._v:
                print('Train mse: ' + str(self.train_mse[-1]))
                print('Test mse: ' + str(self.test_mse[-1]))
            iter_diff = n_iter

# 计算MSE
def get_mse(pred, actual):
    # Ignore nonzero terms.
    pred = pred[actual.nonzero()].flatten()
    actual = actual[actual.nonzero()].flatten()
    return mean_squared_error(pred, actual)
MF_ALS = ExplicitMF(train, n_factors=10, \
                    user_reg=0.1, item_reg=0.1, alpha=0.1)
iter_array = [1, 2, 5, 10, 25, 50, 100, 150, 200, 250]
MF_ALS.calculate_learning_curve(iter_array, test)


# 画图
def plot_learning_curve(iter_array, model):
    plt.plot(iter_array, model.train_mse, \
             label='Training', linewidth=5)
    plt.plot(iter_array, model.test_mse, \
             label='Test', linewidth=5)


    plt.xticks(fontsize=16)
    plt.yticks(fontsize=16)
    plt.xlabel('iterations', fontsize=30)
    plt.ylabel('MSE', fontsize=30)
    plt.legend(loc='best', fontsize=20)
    plt.show()

plot_learning_curve(iter_array, MF_ALS)
end_time = time.time()
print('运行时间：', end_time - start_time, '秒')
