File size: 4,405 Bytes
bd712f3
 
 
 
 
d7061cb
 
 
bd712f3
d7061cb
 
 
 
 
bd712f3
d7061cb
bd712f3
 
 
d7061cb
 
 
bd712f3
 
 
d7061cb
 
bd712f3
 
d7061cb
bd712f3
 
 
d7061cb
bd712f3
d7061cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bd712f3
 
 
 
 
d7061cb
 
 
 
bd712f3
 
 
 
d7061cb
bd712f3
d7061cb
 
 
bd712f3
 
d7061cb
bd712f3
 
 
 
d7061cb
 
bd712f3
d7061cb
 
bd712f3
d7061cb
 
 
 
 
 
bd712f3
d7061cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
import streamlit as st
import numpy as np
import torch
import torch.nn as nn
import random
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score

# Define a function to generate a dataset
def generate_dataset(task_id):
    X, y = make_classification(n_samples=100, n_features=10, n_informative=5, n_redundant=3, n_repeated=2, random_state=task_id)
    X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=task_id)
    return X_train, X_test, y_train, y_test

# Define a neural network class
class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.fc1 = nn.Linear(10, 20)
        self.fc2 = nn.Linear(20, 10)
        self.fc3 = nn.Linear(10, 2)

    def forward(self, x):
        x = torch.relu(self.fc1(x))
        x = torch.relu(self.fc2(x))
        x = self.fc3(x)
        return x

# Define a genetic algorithm class
class GeneticAlgorithm:
    def __init__(self, population_size):
        self.population_size = population_size
        self.population = [Net() for _ in range(population_size)]

    def selection(self, task_id):
        X_train, X_test, y_train, y_test = generate_dataset(task_id)
        fitness = []
        for net in self.population:
            criterion = nn.CrossEntropyLoss()
            optimizer = torch.optim.Adam(net.parameters(), lr=0.01)
            for epoch in range(10):
                optimizer.zero_grad()
                inputs = torch.tensor(X_train, dtype=torch.float32)
                labels = torch.tensor(y_train, dtype=torch.long)
                outputs = net(inputs)
                loss = criterion(outputs, labels)
                loss.backward()
                optimizer.step()
            inputs = torch.tensor(X_test, dtype=torch.float32)
            labels = torch.tensor(y_test, dtype=torch.long)
            outputs = net(inputs)
            _, predicted = torch.max(outputs, 1)
            accuracy = accuracy_score(labels, predicted)
            fitness.append(accuracy)
        self.population = [self.population[i] for i in np.argsort(fitness)[-self.population_size//2:]]

    def crossover(self):
        offspring = []
        for _ in range(self.population_size//2):
            parent1, parent2 = random.sample(self.population, 2)
            child = Net()
            child.fc1.weight.data = (parent1.fc1.weight.data + parent2.fc1.weight.data) / 2
            child.fc2.weight.data = (parent1.fc2.weight.data + parent2.fc2.weight.data) / 2
            child.fc3.weight.data = (parent1.fc3.weight.data + parent2.fc3.weight.data) / 2
            offspring.append(child)
        self.population += offspring

    def mutation(self):
        for net in self.population:
            if random.random() < 0.1:
                net.fc1.weight.data += torch.randn_like(net.fc1.weight.data) * 0.1
                net.fc2.weight.data += torch.randn_like(net.fc2.weight.data) * 0.1
                net.fc3.weight.data += torch.randn_like(net.fc3.weight.data) * 0.1

# Streamlit app
st.title("Evolution of Sub-Models")

# Parameters
st.sidebar.header("Parameters")
population_size = st.sidebar.slider("Population size", 10, 100, 50)
num_tasks = st.sidebar.slider("Number of tasks", 1, 10, 5)
num_generations = st.sidebar.slider("Number of generations", 1, 100, 10)

# Run the evolution
if st.button("Run evolution"):
    ga = GeneticAlgorithm(population_size)
    for generation in range(num_generations):
        for task_id in range(num_tasks):
            ga.selection(task_id)
            ga.crossover()
            ga.mutation()
        st.write(f"Generation {generation+1} complete")

    # Evaluate the final population
    final_accuracy = []
    for task_id in range(num_tasks):
        X_train, X_test, y_train, y_test = generate_dataset(task_id)
        accuracy = []
        for net in ga.population:
            criterion = nn.CrossEntropyLoss()
            optimizer = torch.optim.Adam(net.parameters(), lr=0.01)
            for epoch in range(10):
                optimizer.zero_grad()
                inputs = torch.tensor(X_train, dtype=torch.float32)
                labels = torch.tensor(y_train, dtype=torch.long)
                outputs = net(inputs)
                loss = criterion(outputs, labels)
                loss.backward()
                optimizer.step()