"""
@Filename       : embedded_ic.py
@Create Time    : 2021/1/12 18:49
@Author         : Rylynn
@Description    : 

"""
import os
import random

import torch as th
import torch.nn as nn
import logging

from torch.utils.data import DataLoader, Dataset

from utils.dataloader import DiffuseSequenceDataset


class EmbeddedIC(nn.Module):
    def __init__(self, config):
        super(EmbeddedIC, self).__init__()
        self.sender_embed = nn.Embedding(config['user_num'], config['embed_size'])
        self.receiver_embed = nn.Embedding(config['user_num'], config['embed_size'])

    def infect_prob(self, senders, receivers):
        return th.sigmoid(senders[:, 0] + receivers[:, 0] + th.sum(th.norm(senders[:, 1:] - receivers[:, 1:]), dim=1))

    def forward(self, sequence):
        seq_len = len(sequence)

    def loss(self):
        ...


def train(root_path, dataset):
    config = {
        'user_num': 0,
        'embed_size': 0,
        'lr': 0.001,
        'epoches': 20,
    }
    train_filepath = os.path.join(root_path, dataset, 'train.txt')
    test_filepath = os.path.join(root_path, dataset, 'test.txt')
    train_dataloader = DataLoader(DiffuseSequenceDataset(root_path='../../data', dataset='twitter', train=True),
                                  batch_size=1,
                                  shuffle=True)
    test_dataloader = DataLoader(DiffuseSequenceDataset(root_path='../../data', dataset='twitter', train=False),
                                 batch_size=128,
                                 num_workers=4)

    model = EmbeddedIC(config)
    model = model.cuda()

    optimizer = th.optim.Adam(params=model.parameters(), lr=config['lr'])
    for epoch in range(config['epoches']):
        epoch_loss = 0
        for sequence, _, seq_length in train_dataloader:
            sequence = sequence[0].cuda()
            u_idx = random.randint(1, seq_length)
            u = sequence[random.randint(1, seq_length)]
            v_list = sequence[sequence[:u_idx]]
            loss = model(u, v_list)
            optimizer.zero_grad()

            epoch_loss += loss.item()

            loss.backward()
            optimizer.step()

        if epoch % 3 == 0:
            ...

        ...





