TroglodyteDerivations's picture
Create tf_ga_algo.txt
3566447 verified
# Updated Implementation includes RandomSearch, Best Individual .npy save
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
import plotly.graph_objs as go
from keras_tuner import HyperModel, RandomSearch
from tqdm import tqdm
# Define the target X-shape pattern
target_pattern = np.array([
[25.76815, -80.1868],
[25.7743, -80.1937],
[25.762, -80.18],
[25.76815, -80.1868],
[25.7743, -80.18],
[25.762, -80.1937]
])
# Convert target pattern to TensorFlow tensor
target_pattern_tf = tf.constant(target_pattern, dtype=tf.float32)
# Define the GA parameters ranges
population_sizes = [30, 50, 70]
num_generations = 100000
mutation_rates = [0.05, 0.1, 0.15]
crossover_rates = [0.7, 0.8, 0.9]
elitism_counts = [3, 5, 7]
# Define the fitness function using TensorFlow
def fitness_function(positions):
return tf.reduce_sum((positions - target_pattern_tf) ** 2, axis=[1, 2])
# Selection function (tournament selection) using TensorFlow
def selection(population, fitness_values):
selected = []
for _ in range(len(population)):
idx1, idx2 = tf.random.uniform(shape=(2,), minval=0, maxval=len(population), dtype=tf.int32)
if fitness_values[idx1] < fitness_values[idx2]:
selected.append(population[idx1])
else:
selected.append(population[idx2])
return tf.stack(selected)
# Crossover function (single-point crossover) using TensorFlow
def crossover(parent1, parent2, crossover_rate):
if tf.random.uniform(()) < crossover_rate:
crossover_point = tf.random.uniform((), minval=1, maxval=len(parent1), dtype=tf.int32)
child1 = tf.concat([parent1[:crossover_point], parent2[crossover_point:]], axis=0)
child2 = tf.concat([parent2[:crossover_point], parent1[crossover_point:]], axis=0)
return child1, child2
else:
return parent1, parent2
# Mutation function using TensorFlow Probability
def mutate(individual, mutation_rate):
mutation_mask = tfp.distributions.Bernoulli(probs=mutation_rate).sample(sample_shape=tf.shape(individual))
mutation_noise = tfp.distributions.Normal(loc=0.0, scale=0.1).sample(sample_shape=tf.shape(individual))
return individual + tf.cast(mutation_mask, tf.float32) * mutation_noise
# GA algorithm using TensorFlow
def genetic_algorithm(population_size, mutation_rate, crossover_rate, elitism_count, num_generations):
population = tf.random.uniform((population_size, len(target_pattern), 2), dtype=tf.float32)
best_fitness_overall = float('inf')
best_individual_overall = None
for generation in tqdm(range(num_generations), desc="Generations"):
fitness_values = fitness_function(population)
# Track the best fitness and individual overall
best_fitness_current_gen = tf.reduce_min(fitness_values).numpy()
best_individual_current_gen = population[tf.argmin(fitness_values)].numpy()
if best_fitness_current_gen < best_fitness_overall:
best_fitness_overall = best_fitness_current_gen
best_individual_overall = best_individual_current_gen
# Elitism: Preserve the best individuals
elite_indices = tf.argsort(fitness_values)[:elitism_count]
elites = tf.gather(population, elite_indices)
# Select parents
parents = selection(population, fitness_values)
# Create offspring through crossover and mutation
offspring = []
for i in range(0, len(parents) - 1, 2):
child1, child2 = crossover(parents[i], parents[i + 1], crossover_rate)
offspring.append(mutate(child1, mutation_rate))
offspring.append(mutate(child2, mutation_rate))
# Ensure offspring size matches population size - elitism count
num_offspring_needed = population_size - elitism_count
offspring = offspring[:num_offspring_needed]
# Replace the population with the offspring and elites
population = tf.concat([tf.stack(offspring), elites], axis=0)
return best_fitness_overall, best_individual_overall
# HyperModel for Keras Tuner
class GAHyperModel(HyperModel):
def __init__(self, num_generations):
self.num_generations = num_generations
def build(self, hp):
population_size = hp.Choice('population_size', values=population_sizes)
mutation_rate = hp.Choice('mutation_rate', values=mutation_rates)
crossover_rate = hp.Choice('crossover_rate', values=crossover_rates)
elitism_count = hp.Choice('elitism_count', values=elitism_counts)
final_fitness, _ = genetic_algorithm(
population_size, mutation_rate, crossover_rate, elitism_count, self.num_generations
)
return final_fitness
# Hyperparameter tuning using Keras Tuner
tuner = RandomSearch(
GAHyperModel(num_generations),
objective='val_loss',
max_trials=50,
executions_per_trial=1,
directory='ga_tuning',
project_name='genetic_algorithm'
)
tuner.search(x=None, y=None, epochs=1, verbose=1)
# Get the best hyperparameters
best_hyperparams = tuner.get_best_hyperparameters(num_trials=1)[0]
best_population_size = best_hyperparams.get('population_size')
best_mutation_rate = best_hyperparams.get('mutation_rate')
best_crossover_rate = best_hyperparams.get('crossover_rate')
best_elitism_count = best_hyperparams.get('elitism_count')
# Run the GA with the best hyperparameters
best_fitness, best_individual = genetic_algorithm(
best_population_size, best_mutation_rate, best_crossover_rate, best_elitism_count, num_generations
)
# Output the best hyperparameters and the corresponding best individual
print(f"Best hyperparameters: population_size={best_population_size}, mutation_rate={best_mutation_rate}, crossover_rate={best_crossover_rate}, elitism_count={best_elitism_count}")
print(f"Best fitness: {best_fitness}")
# Save the best individual
np.save('best_individual.npy', best_individual)
# Validate the flight path trajectory
def plot_trajectory(positions):
fig = go.Figure()
# Plot target pattern
fig.add_trace(go.Scatter(
x=target_pattern[:, 0],
y=target_pattern[:, 1],
mode='markers+lines',
name='Target Pattern',
marker=dict(size=10, color='red')
))
# Plot optimized pattern
fig.add_trace(go.Scatter(
x=positions[:, 0],
y=positions[:, 1],
mode='markers+lines',
name='Optimized Pattern',
marker=dict(size=10, color='blue')
))
fig.update_layout(
title='UAV Swarm Intelligence: X-Shape Pattern',
xaxis_title='X',
yaxis_title='Y'
)
fig.show()
# Plot the final optimized pattern
plot_trajectory(best_individual)