import matplotlib.pyplot as plt
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim

# 设置随机种子以保证结果可重复
np.random.seed(0)
torch.manual_seed(0)

show_t1_src = True# True  # 
show_t2_src = True# True
show_t1_sim = True # True
plot2 = False
show_t2_sim = True
if_train = False

# Part 1: Data Analysis and Simulation

# 原始数据
# 等待时间 t1（秒）
t1_data = np.array([35, 42, 28, 50, 40, 32, 25, 45, 38, 30,
                    20, 15, 18, 22, 17, 19, 21, 16, 14, 12,
                    48, 55, 60, 52, 58, 46, 44, 49, 53, 57,
                    26, 31, 29, 33, 27, 24, 23, 36, 39, 34])

# 乘坐时间 t2（秒）
t2_data = np.array([20, 22, 19, 21, 23, 18, 17, 20, 19, 21,
                    18, 15, 16, 17, 18, 19, 20, 21, 22, 23,
                    24, 25, 22, 21, 20, 19, 18, 17, 16, 15,
                    20, 21, 19, 18, 22, 23, 24, 25, 22, 21])

# 数据分析
# 计算均值和标准差
t1_mean = np.mean(t1_data)
t1_std = np.std(t1_data, ddof=1)
t2_mean = np.mean(t2_data)
t2_std = np.std(t2_data, ddof=1)

print("t1 mean:", t1_mean)
print("t1 std:", t1_std)
print("t2 mean:", t2_mean)
print("t2 std:", t2_std)

# 确定分布函数
# 对于 t1，假设服从指数分布，参数 lambda = 1 / t1_mean
lambda_param = 1 / t1_mean

# 对于 t2，假设服从正态分布，均值为 t2_mean，标准差为 t2_std

# 绘制直方图
# 等待时间 t1 直方图
if show_t1_src:
    # plt.hist(t1_data, bins=5, edgecolor='black')
    plt.hist(t1_data, edgecolor='black', density=True)
    plt.title('Real waiting time t1 distribution')
    plt.xlabel('waiting time t1(s)')
    plt.ylabel('frequency')
    plt.savefig('t1_histogram.png')
    plt.show()

# 乘坐时间 t2 直方图
if show_t2_src:
    # plt.hist(t2_data, bins=5, edgecolor='black')
    plt.hist(t2_data, density=True, edgecolor='black')
    plt.title('Real ride time t2 distribution')
    plt.xlabel('riding time t2(s)')
    plt.ylabel('frequency')
    plt.savefig('t2_histogram.png')
    plt.show()

# Part 2: Simulation

# 模拟 t1，使用指数分布
t1_sim_all = np.random.exponential(scale=1/lambda_param, size=10000)
t1_sim = t1_sim_all[(t1_sim_all >= 10) & (t1_sim_all <= 60)]
# t1_sim = t1_sim_all

# 模拟 t2，使用正态分布
t2_sim_all = np.random.normal(loc=t2_mean, scale=t2_std, size=10000)
t2_sim = t2_sim_all[(t2_sim_all >= 15) & (t2_sim_all <= 25)]

t1_sim_mean = np.mean(t1_sim)
t1_sim_std = np.std(t1_sim, ddof=1)
t2_sim_mean = np.mean(t2_sim)
t2_sim_std = np.std(t2_sim, ddof=1)

print("t1_sim mean:", t1_sim_mean)
print("t1_sim std:", t1_sim_std)
print("t2_sim mean:", t2_sim_mean)
print("t2_sim std:", t2_sim_std)


# 绘制模拟的 t1 分布

x = np.linspace(10, 60, 500)
pdf = lambda_param * np.exp(-lambda_param * x)  # 理论指数分布
normalization_factor = np.exp(-lambda_param * 10) - np.exp(-lambda_param * 60)
pdf = pdf / normalization_factor
if show_t1_sim:
    if plot2:
        # 计算真实数据的直方图
        counts_real, bins_real = np.histogram(t1_data, bins=10, range=(10, 60), density=True)
        bins_center_real = (bins_real[:-1] + bins_real[1:])/2  # 计算每个柱的中心位置

        # 绘制仿真数据的直方图
        plt.hist(t1_sim, bins=50, range=(10, 60), density=True, alpha=0.5, label='仿真数据', color='orange', edgecolor='black')

        # 绘制真实数据的直方图曲线
        plt.plot(bins_center_real, counts_real, 'b-', marker='o', label='真实数据')

        # 绘制理论的指数分布曲线
        x = np.linspace(10, 60, 500)
        pdf = lambda_param * np.exp(-lambda_param * x)
        plt.plot(x, pdf, 'r-', label='理论指数分布')

        # 设置图形标题和标签
        plt.title('真实数据与仿真数据的等待时间分布比较')
        plt.xlabel('等待时间 t1 (秒)')
        plt.ylabel('概率密度')
        plt.legend()
        plt.savefig('t1_real_vs_sim.png')
        plt.show()
    else:
        plt.hist(t1_sim, bins=10, density=True, edgecolor='black')
        plt.plot(x, pdf, 'r-', label='Exp distribution density function')
        plt.title('Simulated wait time t1 distribution')
        plt.xlabel('waiting time t1 (s)')
        plt.ylabel('Probability density')
        plt.legend()
        plt.savefig('t1_simulation.png')
        plt.show()




# 绘制模拟的 t2 分布

x = np.linspace(15, 25, 500)
pdf = (1 / (np.sqrt(2 * np.pi) * t2_std)) * np.exp(- (x - t2_mean) ** 2 / (2 * t2_std ** 2))
if show_t2_sim:
    plt.hist(t2_sim, bins=10, density=True, edgecolor='black')
    plt.plot(x, pdf, 'r-', label='Normal distribution density function')
    plt.title('Simulated ride times t2 distribution')
    plt.xlabel('ride time t2(s)')
    plt.ylabel('Probability density')
    plt.legend()
    plt.savefig('t2_simulation.png')
    plt.show()

# Part 3: BP Neural Network to Approximate Exponential Distribution using PyTorch

# 准备训练数据
t_train = np.linspace(0, 100, 1000)
f_train = lambda_param * np.exp(-lambda_param * t_train)

# 将数据转换为PyTorch的张量
t_train_tensor = torch.from_numpy(t_train.reshape(-1, 1)).float()
f_train_tensor = torch.from_numpy(f_train.reshape(-1, 1)).float()

# 增加网络复杂度
class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.hidden1 = nn.Linear(1, 20)
        self.hidden2 = nn.Linear(20, 10)
        self.output = nn.Linear(10, 1)
        self.activation = nn.ReLU()
        
    def forward(self, x):
        x = self.activation(self.hidden1(x))
        x = self.activation(self.hidden2(x))
        x = self.output(x)
        return x
    
class Net2(Net):
    def __init__(self):
        super(Net2,self).__init__()
        self.hidden = nn.Linear(20,20)
    def forward(self, x):
        x = self.activation(self.hidden1(x))
        x = self.activation(self.hidden(x))
        x = self.activation(self.hidden(x))
        x = self.activation(self.hidden(x))
        x = self.activation(self.hidden2(x))
        x = self.output(x)
        return x

class Net3(nn.Module):
    def __init__(self):
        super(Net3, self).__init__()
        self.hidden1 = nn.Linear(1, 40)
        self.hidden2 = nn.Linear(40, 10)
        self.output = nn.Linear(10, 1)
        self.activation = nn.ReLU()
    def forward(self, x):
        x = self.activation(self.hidden1(x))
        x = self.activation(self.hidden2(x))
        x = self.output(x)
        return x

if if_train:
    model = Net()
    # model = Net2()

    # 定义损失函数和优化器
    criterion = nn.MSELoss()
    optimizer = optim.Adam(model.parameters(), lr=0.005)

    # 训练模型
    num_epochs = 1000
    for epoch in range(num_epochs):
        model.train()
        optimizer.zero_grad()
        outputs = model(t_train_tensor)
        loss = criterion(outputs, f_train_tensor)
        loss.backward()
        optimizer.step()
        if (epoch+1) % 100 == 0:
            print(f'Epoch [{epoch+1}/{num_epochs}], Loss: {loss.item():.6f}')

    # 保存模型权重
    torch.save(model.state_dict(), 'bp_model_weights.pth')

    # 使用神经网络拟合真实数据
    # 加载模型权重（如有需要）
    # model.load_state_dict(torch.load('bp_model_weights.pth'))

    # 计算真实数据的概率密度值
    t1_real_tensor = torch.from_numpy(t1_data.reshape(-1, 1)).float()
    f_real = lambda_param * np.exp(-lambda_param * t1_data)

    # 使用模型进行预测
    model.eval()
    with torch.no_grad():
        f_pred_real_tensor = model(t1_real_tensor)
    f_pred_real = f_pred_real_tensor.numpy().flatten()

    # 绘制拟合效果图
    plt.scatter(t1_data, f_real, color='blue', label='real probability density')
    plt.scatter(t1_data, f_pred_real, color='red', label='Neural network prediction')
    plt.title('The fitting effect of neural network on real data')  # 图5.3 
    plt.xlabel('Waiting time t1 (s)')
    plt.ylabel('Probability densit f(t1)')
    plt.legend()
    plt.savefig('nn_real_data_fitting.png')
    plt.show()
