Skip to content

Commit 8de3e6b

Browse files
First pass on aggressive refactoring and simplification.
Added option to have (mutable) node aggregation functions other than summation. Keep input, output, and hidden node genes in separate containers. Noted issues that should be addressed in future refactoring.
1 parent b07c418 commit 8de3e6b

18 files changed

+658
-516
lines changed

examples/memory/nn_config

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ max_weight = 30
1616
min_weight = -30
1717
feedforward = 0
1818
activation_functions = clamped sigmoid tanh my_sinc_function
19+
aggregation_functions = sum
1920
weight_stdev = 1.0
2021

2122
[genetic]
@@ -33,6 +34,7 @@ prob_mutate_weight = 0.8
3334
prob_replace_weight = 0.1
3435
weight_mutation_power = 0.5
3536
prob_mutate_activation = 0.002
37+
prob_mutate_aggregation = 0.0
3638
prob_toggle_link = 0.01
3739
reset_on_extinction = 1
3840

examples/memory/nn_evolve.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
N = 4
2121

2222

23-
def eval_fitness(g):
24-
net = nn.create_recurrent_phenotype(g)
23+
def eval_fitness(genome_id, genome):
24+
net = nn.create_recurrent_phenotype(genome)
2525

2626
error = 0.0
2727
for _ in range(num_tests):
@@ -43,6 +43,11 @@ def eval_fitness(g):
4343
return -(error / (N * num_tests)) ** 0.5
4444

4545

46+
def eval_genomes(genomes):
47+
for genome_id, genome in genomes:
48+
genome.fitness = eval_fitness(genome_id, genome)
49+
50+
4651
# Demonstration of how to add your own custom activation function.
4752
def sinc(x):
4853
return 1.0 if x == 0 else math.sin(x) / x
@@ -57,8 +62,9 @@ def sinc(x):
5762
def run():
5863
local_dir = os.path.dirname(__file__)
5964
pop = population.Population(os.path.join(local_dir, 'nn_config'))
60-
pe = parallel.ParallelEvaluator(4, eval_fitness)
61-
pop.run(pe.evaluate, 1000)
65+
#pe = parallel.ParallelEvaluator(4, eval_fitness)
66+
#pop.run(pe.evaluate, 1000)
67+
pop.run(eval_genomes, 1000)
6268

6369
# Log statistics.
6470
statistics.save_stats(pop.statistics)

examples/xor/xor2.py

Lines changed: 31 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,42 @@
55

66
from neat import nn, population, statistics
77

8+
89
# Network inputs and expected outputs.
910
xor_inputs = [[0, 0], [0, 1], [1, 0], [1, 1]]
1011
xor_outputs = [0, 1, 1, 0]
1112

13+
def ideal_demo():
14+
import neat
15+
16+
# default Config
17+
# default to parallel processing using auto-detected # hardware cores
18+
n = neat.Sequential(xor_inputs, xor_outputs)
19+
20+
n.evolve(300)
21+
22+
n.save_statistics(".")
23+
24+
print('Number of evaluations: {0}'.format(n.total_evaluations))
25+
26+
# Show output of the most fit genome against training data.
27+
winner = n.best_genome()
28+
print('\nBest genome:\n{!s}'.format(winner))
29+
print('\nOutput:')
30+
winner_output = n.evaluate(winner, xor_inputs)
31+
for inputs, expected, outputs in zip(xor_inputs, xor_outputs, winner_output):
32+
print("input {!r}, expected output {0:1.5f} got {1:1.5f}".format(inputs, expected, outputs[0]))
33+
34+
35+
36+
total_evaluations = 0
1237

1338
def eval_fitness(genomes):
14-
for g in genomes:
39+
global total_evaluations
40+
total_evaluations += len(genomes)
41+
42+
for gid, g in genomes:
43+
1544
net = nn.create_feed_forward_phenotype(g)
1645

1746
sum_square_error = 0.0
@@ -35,7 +64,7 @@ def eval_fitness(genomes):
3564
statistics.save_species_count(pop.statistics)
3665
statistics.save_species_fitness(pop.statistics)
3766

38-
print('Number of evaluations: {0}'.format(pop.total_evaluations))
67+
print('Number of evaluations: {0}'.format(total_evaluations))
3968

4069
# Show output of the most fit genome against training data.
4170
winner = pop.statistics.best_genome()

examples/xor/xor2_config

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ max_weight = 30
1616
min_weight = -30
1717
feedforward = 1
1818
activation_functions = sigmoid
19+
aggregation_functions = sum
1920
weight_stdev = 1.0
2021

2122
[genetic]
@@ -33,6 +34,7 @@ prob_mutate_weight = 0.460
3334
prob_replace_weight = 0.0245
3435
weight_mutation_power = 0.825
3536
prob_mutate_activation = 0.0
37+
prob_mutate_aggregation = 0.0
3638
prob_toggle_link = 0.0138
3739
reset_on_extinction = 1
3840

neat/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
from neat import activations
22

3+
# TODO: This collection should probably be held by the Config object.
34
activation_functions = activations.ActivationFunctionSet()
45

56
activation_functions.add('sigmoid', activations.sigmoid_activation)

neat/config.py

Lines changed: 59 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1+
from random import random, gauss, choice
12
import os
23

3-
from neat.genes import NodeGene, ConnectionGene
44
from neat.genome import Genome, FFGenome
55
from neat import activation_functions
66
from neat.reproduction import DefaultReproduction
@@ -11,6 +11,7 @@
1111
except ImportError:
1212
from ConfigParser import SafeConfigParser as ConfigParser
1313

14+
aggregation_function_defs = {'sum': sum, 'max': max, 'min': min}
1415

1516
class Config(object):
1617
'''
@@ -60,6 +61,7 @@ def load(self, filename):
6061
self.feedforward = bool(int(parameters.get('phenotype', 'feedforward')))
6162
self.weight_stdev = float(parameters.get('phenotype', 'weight_stdev'))
6263
self.activation_functions = parameters.get('phenotype', 'activation_functions').strip().split()
64+
self.aggregation_functions = parameters.get('phenotype', 'aggregation_functions').strip().split()
6365

6466
# Verify that initial connection type is valid.
6567
if 'partial' in self.initial_connection:
@@ -97,6 +99,7 @@ def load(self, filename):
9799
self.prob_replace_weight = float(parameters.get('genetic', 'prob_replace_weight'))
98100
self.weight_mutation_power = float(parameters.get('genetic', 'weight_mutation_power'))
99101
self.prob_mutate_activation = float(parameters.get('genetic', 'prob_mutate_activation'))
102+
self.prob_mutate_aggregation = float(parameters.get('genetic', 'prob_mutate_aggregation'))
100103
self.prob_toggle_link = float(parameters.get('genetic', 'prob_toggle_link'))
101104
self.reset_on_extinction = bool(int(parameters.get('genetic', 'reset_on_extinction')))
102105

@@ -106,10 +109,6 @@ def load(self, filename):
106109
self.disjoint_coefficient = float(parameters.get('genotype compatibility', 'disjoint_coefficient'))
107110
self.weight_coefficient = float(parameters.get('genotype compatibility', 'weight_coefficient'))
108111

109-
# Gene types
110-
self.node_gene_type = NodeGene
111-
self.conn_gene_type = ConnectionGene
112-
113112
stagnation_type_name = parameters.get('Types', 'stagnation_type')
114113
reproduction_type_name = parameters.get('Types', 'reproduction_type')
115114

@@ -142,4 +141,58 @@ def register(self, typeName, typeDef):
142141
self.registry[typeName] = typeDef
143142

144143
def get_type_config(self, typeInstance):
145-
return dict(self.type_config[typeInstance.__class__.__name__])
144+
return dict(self.type_config[typeInstance.__class__.__name__])
145+
146+
# TODO: Factor out these mutation methods into a separate class.
147+
def new_weight(self):
148+
return gauss(0, self.weight_stdev)
149+
150+
def new_bias(self):
151+
return gauss(0, self.weight_stdev)
152+
153+
def new_response(self):
154+
return 1.0
155+
156+
def new_aggregation(self):
157+
return choice(self.aggregation_functions)
158+
159+
def new_activation(self):
160+
return choice(list(activation_functions.functions.keys()))
161+
162+
def mutate_weight(self, weight):
163+
if random() < self.prob_mutate_weight:
164+
if random() < self.prob_replace_weight:
165+
# Replace weight with a random value.
166+
weight = self.new_weight()
167+
else:
168+
# Perturb weight.
169+
weight += gauss(0, self.weight_mutation_power)
170+
weight = max(self.min_weight, min(self.max_weight, weight))
171+
172+
return weight
173+
174+
def mutate_bias(self, bias):
175+
if random() < self.prob_mutate_bias:
176+
bias += gauss(0, self.bias_mutation_power)
177+
bias = max(self.min_weight, min(self.max_weight, bias))
178+
179+
return bias
180+
181+
def mutate_response(self, response):
182+
if random() < self.prob_mutate_response:
183+
response += gauss(0, self.response_mutation_power)
184+
response = max(self.min_weight, min(self.max_weight, response))
185+
186+
return response
187+
188+
def mutate_aggregation(self, aggregation):
189+
if random() < self.prob_mutate_aggregation:
190+
aggregation = self.new_aggregation()
191+
192+
return aggregation
193+
194+
def mutate_activation(self, activation):
195+
if random() < self.prob_mutate_activation:
196+
activation = self.new_activation()
197+
198+
return activation

neat/ctrnn/__init__.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,9 +14,9 @@ class CTNodeGene(NodeGene):
1414
The main difference here is the addition of
1515
a decay rate given by the time constant.
1616
"""
17-
def __init__(self, ID, node_type, bias=0.0, response=4.924273,
17+
def __init__(self, ID, bias=0.0, response=4.924273,
1818
activation_type='sigmoid', time_constant=1.0):
19-
super(CTNodeGene, self).__init__(ID, node_type, bias, response, activation_type)
19+
super(CTNodeGene, self).__init__(ID, bias, response, activation_type)
2020
self.time_constant = time_constant
2121

2222
def mutate(self, config):

0 commit comments

Comments
 (0)