import tensorflow as tf
import time
import os

tf.logging.set_verbosity(tf.logging.INFO)
print("Using TensorFlow version %s" % (tf.__version__))


class LR():
    def __init__(self, column=None, inputs=None):
        if not inputs:
            raise ValueError("Dataset is not defined.")
        self._feature = inputs[0]
        self._label = inputs[1]
        self._column = column
        if not column:
            raise ValueError("Column is not defined.")
        self._linear_learning_rate = 0.2
        self.is_training = True

        self._create_model()
        with tf.name_scope('head'):
            self._create_loss()
            self._create_optimizer()
            self._create_metrics()

    def _create_model(self):
        with tf.variable_scope('linear') as scope:
            linear_logits = tf.feature_column.linear_model(
                units=1,
                features=self._feature,
                feature_columns=self._column,
                sparse_combiner='sum',
                weight_collections=None,
                trainable=True)

            self._add_layer_summary(linear_logits, scope.name)

        self._logits = tf.add_n([linear_logits])
        self.probability = tf.math.sigmoid(self._logits)
        self.output = tf.round(self.probability)

    def _create_loss(self):
        self._logits = tf.squeeze(self._logits)
        self.loss = tf.losses.sigmoid_cross_entropy(
            self._label,
            self._logits,
            scope='loss',
            reduction=tf.losses.Reduction.SUM_OVER_BATCH_SIZE)
        tf.summary.scalar('loss', self.loss)

    def _create_optimizer(self):
        self.global_step = tf.train.get_or_create_global_step()
        linear_optimizer = tf.train.FtrlOptimizer(
            learning_rate=self._linear_learning_rate,
            l1_regularization_strength=0.0,
            l2_regularization_strength=0.0)
        train_ops = []
        update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
        with tf.control_dependencies(update_ops):
            train_ops.append(linear_optimizer.minimize(self.loss,
                                                       var_list=tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
                                                                                  scope='linear'),
                                                       global_step=self.global_step))
            self.train_op = tf.group(*train_ops)

    def _create_metrics(self):
        self.auc, self.auc_op = tf.metrics.auc(labels=self._label, predictions=self.probability, num_thresholds=1000)
        tf.summary.scalar('eval_auc', self.auc)

    # used to add summary in tensorboard
    def _add_layer_summary(self, value, tag):
        tf.summary.scalar('%s/fraction_of_zero_values' % tag,
                          tf.nn.zero_fraction(value))
        tf.summary.histogram('%s/activation' % tag, value)


def build_model_input(train_file):
    def decode_fn(record_bytes):
        feature_label = tf.io.parse_single_example(
            record_bytes,
            {
                "feature": tf.io.VarLenFeature(dtype=tf.int64),
                "label": tf.io.FixedLenFeature([], dtype=tf.int64)
            }
        )
        features = {"feature": feature_label["feature"]}
        labels = feature_label["label"]
        return features, labels

    dataset = tf.data.TFRecordDataset([train_file])
    dataset = dataset.map(decode_fn, num_parallel_calls=28)
    dataset = dataset.batch(2)
    return dataset


def build_feature_columns():
    columns = []
    categorical_column = tf.feature_column.categorical_column_with_embedding("feature", dtype=tf.int64)
    embedding_column = tf.feature_column.embedding_column(categorical_column=categorical_column,
                                                          dimension=1,
                                                          initializer=tf.ones_initializer(tf.dtypes.float32))
    columns.append(embedding_column)
    return columns


def train(sess_config, input_hooks, model, data_init_op, checkpoint_dir, steps):
    hooks = []
    hooks.extend(input_hooks)

    scaffold = tf.train.Scaffold(
        local_init_op=tf.group(tf.local_variables_initializer(), data_init_op),
        saver=tf.train.Saver(max_to_keep=1))

    stop_hook = tf.train.StopAtStepHook(last_step=steps)
    log_hook = tf.train.LoggingTensorHook(
        {
            'steps': model.global_step,
            'loss': model.loss
        }, every_n_iter=100)

    hooks.append(stop_hook)
    hooks.append(log_hook)
    save_steps = steps

    with tf.train.MonitoredTrainingSession(
            master='',
            is_chief=True,
            hooks=hooks,
            scaffold=scaffold,
            checkpoint_dir=checkpoint_dir,
            save_checkpoint_steps=save_steps,
            summary_dir=checkpoint_dir,
            save_summaries_steps=0,
            config=sess_config) as sess:
        while not sess.should_stop():
            sess.run([model.loss, model.train_op])
    print("Training completed.")
    reader = tf.train.load_checkpoint(checkpoint_dir)
    keys = reader.get_tensor("linear/linear_model/feature_embedding/embedding_weights-keys")
    values = reader.get_tensor("linear/linear_model/feature_embedding/embedding_weights-values")
    print(keys)
    print(values)
    assert len(keys) == len(values)
    print('total features: %d' % len(keys))
    key_save = []
    value_save = []
    for i in range(len(values)):
        if values[i][0].item() != 0:
            key_save.append(str(keys[i]))
            value_save.append(values[i][0].item())
    print('non zero features: %d' % len(key_save))


def test(sess_config, input_hooks, model, data_init_op, checkpoint_dir, steps):
    model.is_training = False
    hooks = []
    hooks.extend(input_hooks)

    scaffold = tf.train.Scaffold(
        local_init_op=tf.group(tf.local_variables_initializer(), data_init_op))
    session_creator = tf.train.ChiefSessionCreator(
        scaffold=scaffold, checkpoint_dir=checkpoint_dir, config=sess_config)
    writer = tf.summary.FileWriter(os.path.join(checkpoint_dir, 'eval'))
    merged = tf.summary.merge_all()

    with tf.train.MonitoredSession(session_creator=session_creator,
                                   hooks=hooks) as sess:
        for _in in range(1, steps + 1):
            if _in != steps:
                sess.run([model.auc_op])
                if _in % 1000 == 0:
                    print("Evaluation complete:[{}/{}]".format(_in, steps))
            else:
                eval_auc, events = sess.run(
                    [model.auc_op, merged])
                writer.add_summary(events, _in)
                print("Evaluation complete:[{}/{}]".format(_in, steps))
                print("AUC = {}".format(eval_auc))


def run():
    train_file = "movielens_1m-ratings.tfrecord"

    checkpoint_dir = os.path.join('model_lr_' + str(int(time.time())))
    print("Saving model checkpoints to " + checkpoint_dir)

    train_dataset = build_model_input(train_file)
    test_dataset = build_model_input(train_file)

    iterator = tf.data.Iterator.from_structure(output_types=train_dataset.output_types,
                                               output_shapes=train_dataset.output_shapes,
                                               output_classes=train_dataset.output_classes)
    next_element = iterator.get_next()
    train_init_op = iterator.make_initializer(train_dataset)
    test_init_op = iterator.make_initializer(test_dataset)

    column = build_feature_columns()

    sess_config = tf.ConfigProto()

    hooks = []

    model = LR(column=column, inputs=next_element)

    train(sess_config, hooks, model, train_init_op, checkpoint_dir, 10000)
    test(sess_config, hooks, model, test_init_op, checkpoint_dir, 10000)


if __name__ == '__main__':
    run()
