import math
from collections import Counter
def calculate_entropy(data):
    total = len(data)
    class_counts = Counter(data)
    entropy = -sum((count / total) * math.log2(count / total) for count in class_counts.values())
    return entropy
def calculate_information_gain(data, feature_index, labels):
    total_entropy = calculate_entropy(labels)
    feature_values = [row[feature_index] for row in data]
    unique_values = set(feature_values)
    weighted_entropy = 0.0

    for value in unique_values:
        subset_indices = [i for i in range(len(data)) if data[i][feature_index] == value]
        subset_labels = [labels[i] for i in subset_indices]
        weighted_entropy += (len(subset_labels) / len(labels)) * calculate_entropy(subset_labels)

    return total_entropy - weighted_entropy


def id3(data, features, labels):
    if len(set(labels)) == 1:
        return labels[0]
    if not features:
        return Counter(labels).most_common(1)[0][0]

    best_feature_index = max(range(len(features)), key=lambda i: calculate_information_gain(data, i, labels))
    best_feature = features[best_feature_index]

    tree = {best_feature: {}}
    feature_values = [row[best_feature_index] for row in data]
    unique_values = set(feature_values)
    for value in unique_values:
        subset_indices = [i for i in range(len(data)) if data[i][best_feature_index] == value]
        subset_data = [data[i][:best_feature_index] + data[i][best_feature_index + 1:] for i in subset_indices]
        subset_labels = [labels[i] for i in subset_indices]
        subtree = id3(subset_data, features[:best_feature_index] + features[best_feature_index + 1:], subset_labels)
        tree[best_feature][value] = subtree
    return tree

data = [
    ['Sunny', 'Hot', 'High', 'Weak'],
    ['Sunny', 'Hot', 'High', 'Strong'],
    ['Overcast', 'Hot', 'High', 'Weak'],
    ['Rain', 'Mild', 'High', 'Weak'],
    ['Rain', 'Cool', 'Normal', 'Weak']
]
labels = ['No', 'No', 'Yes', 'Yes', 'Yes']
features = ['Outlook', 'Temperature', 'Humidity', 'Wind']
decision_tree = id3(data, features, labels)

print("Generated Decision Tree:", decision_tree)
