import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.pipeline import make_pipeline


def simulate(k, m, N_total, penalty_power):
    probabilities = np.random.dirichlet(np.ones(m), size=1)[0]
    H_T = -np.sum(probabilities * np.log2(probabilities))
    utilities = np.random.uniform(low=1, high=10, size=N_total)
    
    c1 = k * m * np.log2(m)
    c2 = k * N_total * np.log2(N_total / m) / m
    c_T = c1 + c2
    H_b = np.log2(np.sqrt(N_total))
    penalty = 10 * np.abs(H_T - H_b)**penalty_power 
    
    v_T_with_penalty = np.sum(utilities) - c_T - penalty
    v_T_with_penalty = max(v_T_with_penalty, 0) 
    
    v_T_without_penalty = np.sum(utilities) - c_T
    v_T_without_penalty = max(v_T_without_penalty, 0) 
    
    return H_T, v_T_with_penalty, v_T_without_penalty, H_b


k = 1
m_values = [1, 5, 10, 20, 40, 80, 100, 200, 300, 500, 700]
N = 1000
penalty_power = 3

all_H_T = []
all_v_T_with_penalty = []
all_v_T_without_penalty = []
H_b_values = []

for m in m_values:
    for _ in range(500):
        H_T, v_T_with_penalty, v_T_without_penalty, H_b = simulate(k, m, N, penalty_power)
        all_H_T.append(H_T)
        all_v_T_with_penalty.append(v_T_with_penalty)
        all_v_T_without_penalty.append(v_T_without_penalty)
        H_b_values.append(H_b)


degree = 4
model = make_pipeline(PolynomialFeatures(degree), LinearRegression())
model.fit(np.array(all_H_T)[:, np.newaxis], np.array(all_v_T_with_penalty))


H_T_lin = np.linspace(min(all_H_T), max(all_H_T), 500)
v_T_pred = model.predict(H_T_lin[:, np.newaxis])


max_index = np.argmax(v_T_pred)
max_H_T = H_T_lin[max_index]
max_v_T = v_T_pred[max_index]


plt.figure(figsize=(10, 6))
plt.scatter(all_H_T, all_v_T_without_penalty, color='blue', label='Original Data (without penalty)', alpha=0.3)
plt.plot(H_T_lin, v_T_pred, color='red', label=f'Polynomial Degree {degree} Fit (with penalty)')
plt.scatter([max_H_T], [max_v_T], color='green', marker='o', label='Maximum Point')


plt.axvline(x=max_H_T, color='purple', linestyle='--', label=r'$H_b = \log_2(\sqrt{N})$')
plt.text(max_H_T, plt.ylim()[0] + 0.1*(plt.ylim()[1]-plt.ylim()[0]), r'$H_b = \log_2(\sqrt{N})$', color='purple', fontsize=12, ha='center')

plt.title('Value Efficiency (v_T) vs Entropy (H_T)')
plt.axis('off')  # Hide axes
plt.legend(loc='lower left')
plt.savefig('HT_vs_vT_plot.png')

