1 | package net.bmahe.genetics4j.neat; | |
2 | ||
3 | import java.util.function.Function; | |
4 | ||
5 | /** | |
6 | * Utility class providing common activation functions for NEAT (NeuroEvolution of Augmenting Topologies) neural networks. | |
7 | * | |
8 | * <p>Activations contains a collection of mathematical functions commonly used as activation functions | |
9 | * in neural networks. These functions transform the weighted sum of inputs at each node into the node's | |
10 | * output value, introducing non-linearity that enables neural networks to learn complex patterns. | |
11 | * | |
12 | * <p>Available activation functions: | |
13 | * <ul> | |
14 | * <li><strong>Linear</strong>: Simple linear transformation with slope and bias parameters</li> | |
15 | * <li><strong>Sigmoid</strong>: Logistic function providing smooth transition between 0 and 1</li> | |
16 | * <li><strong>Hyperbolic tangent</strong>: Smooth function providing output between -1 and 1</li> | |
17 | * <li><strong>Identity</strong>: Pass-through function for linear networks</li> | |
18 | * <li><strong>NEAT paper</strong>: Sigmoid variant with specific parameters from original NEAT research</li> | |
19 | * </ul> | |
20 | * | |
21 | * <p>Function variations: | |
22 | * <ul> | |
23 | * <li><strong>Float versions</strong>: Optimized for float precision networks</li> | |
24 | * <li><strong>Double versions</strong>: Higher precision for sensitive applications</li> | |
25 | * <li><strong>Parameterized versions</strong>: Customizable function parameters</li> | |
26 | * <li><strong>Pre-configured versions</strong>: Common parameter combinations</li> | |
27 | * </ul> | |
28 | * | |
29 | * <p>Common usage patterns: | |
30 | * <pre>{@code | |
31 | * // Use standard sigmoid activation | |
32 | * FeedForwardNetwork network = new FeedForwardNetwork( | |
33 | * inputNodes, outputNodes, connections, Activations::sigmoid | |
34 | * ); | |
35 | * | |
36 | * // Custom sigmoid with different steepness | |
37 | * Function<Double, Double> customSigmoid = Activations.sigmoid(2.0); | |
38 | * FeedForwardNetwork steeperNetwork = new FeedForwardNetwork( | |
39 | * inputNodes, outputNodes, connections, customSigmoid | |
40 | * ); | |
41 | * | |
42 | * // Hyperbolic tangent for outputs in [-1, 1] range | |
43 | * FeedForwardNetwork tanhNetwork = new FeedForwardNetwork( | |
44 | * inputNodes, outputNodes, connections, Activations::tanh | |
45 | * ); | |
46 | * | |
47 | * // Linear activation for regression problems | |
48 | * FeedForwardNetwork linearNetwork = new FeedForwardNetwork( | |
49 | * inputNodes, outputNodes, connections, Activations::identity | |
50 | * ); | |
51 | * | |
52 | * // Float versions for memory efficiency | |
53 | * Function<Float, Float> floatSigmoid = Activations.sigmoidFloat; | |
54 | * }</pre> | |
55 | * | |
56 | * <p>Activation function characteristics: | |
57 | * <ul> | |
58 | * <li><strong>Sigmoid</strong>: Smooth, bounded [0,1], good for binary classification</li> | |
59 | * <li><strong>Tanh</strong>: Smooth, bounded [-1,1], zero-centered, often preferred over sigmoid</li> | |
60 | * <li><strong>Linear</strong>: Unbounded, preserves gradients, suitable for regression</li> | |
61 | * <li><strong>Identity</strong>: No transformation, useful for pass-through connections</li> | |
62 | * </ul> | |
63 | * | |
64 | * <p>Performance considerations: | |
65 | * <ul> | |
66 | * <li><strong>Float vs Double</strong>: Float versions use less memory and may be faster</li> | |
67 | * <li><strong>Function references</strong>: Pre-defined functions avoid object creation</li> | |
68 | * <li><strong>Mathematical operations</strong>: Optimized implementations for common cases</li> | |
69 | * <li><strong>Branch prediction</strong>: Simple functions improve CPU branch prediction</li> | |
70 | * </ul> | |
71 | * | |
72 | * <p>Integration with NEAT evolution: | |
73 | * <ul> | |
74 | * <li><strong>Network evaluation</strong>: Applied to hidden and output nodes during forward propagation</li> | |
75 | * <li><strong>Fitness computation</strong>: Affects network behavior and resulting fitness</li> | |
76 | * <li><strong>Gradient flow</strong>: Function choice impacts learning and evolution dynamics</li> | |
77 | * <li><strong>Problem matching</strong>: Different problems benefit from different activation functions</li> | |
78 | * </ul> | |
79 | * | |
80 | * @see FeedForwardNetwork | |
81 | * @see NeatChromosome | |
82 | * @see Function | |
83 | */ | |
84 | public class Activations { | |
85 | ||
86 | private Activations() { | |
87 | } | |
88 | ||
89 | /** | |
90 | * Creates a linear activation function with specified slope and bias for float values. | |
91 | * | |
92 | * <p>The linear function computes: f(x) = a * x + b | |
93 | * | |
94 | * @param a the slope parameter | |
95 | * @param b the bias parameter | |
96 | * @return a linear activation function | |
97 | */ | |
98 | public static Function<Float, Float> linearFloat(final float a, final float b) { | |
99 |
6
1. lambda$linearFloat$0 : replaced Float return value with 0 for net/bmahe/genetics4j/neat/Activations::lambda$linearFloat$0 → KILLED 2. lambda$linearFloat$0 : Replaced float multiplication with division → KILLED 3. lambda$linearFloat$0 : removed call to java/lang/Float::floatValue → KILLED 4. lambda$linearFloat$0 : Replaced float addition with subtraction → KILLED 5. lambda$linearFloat$0 : removed call to java/lang/Float::valueOf → KILLED 6. linearFloat : replaced return value with null for net/bmahe/genetics4j/neat/Activations::linearFloat → KILLED |
return (x) -> a * x + b; |
100 | } | |
101 | ||
102 | /** | |
103 | * Creates a linear activation function with specified slope and bias for double values. | |
104 | * | |
105 | * <p>The linear function computes: f(x) = a * x + b | |
106 | * | |
107 | * @param a the slope parameter | |
108 | * @param b the bias parameter | |
109 | * @return a linear activation function | |
110 | */ | |
111 | public static Function<Double, Double> linear(final double a, final double b) { | |
112 |
6
1. linear : replaced return value with null for net/bmahe/genetics4j/neat/Activations::linear → KILLED 2. lambda$linear$1 : removed call to java/lang/Double::doubleValue → KILLED 3. lambda$linear$1 : removed call to java/lang/Double::valueOf → KILLED 4. lambda$linear$1 : replaced Double return value with 0 for net/bmahe/genetics4j/neat/Activations::lambda$linear$1 → KILLED 5. lambda$linear$1 : Replaced double multiplication with division → KILLED 6. lambda$linear$1 : Replaced double addition with subtraction → KILLED |
return (x) -> a * x + b; |
113 | } | |
114 | ||
115 | /** | |
116 | * Creates a sigmoid activation function with specified steepness for float values. | |
117 | * | |
118 | * <p>The sigmoid function computes: f(x) = 1 / (1 + exp(-a * x)) | |
119 | * <p>Output range: (0, 1) | |
120 | * | |
121 | * @param a the steepness parameter (higher values create steeper transitions) | |
122 | * @return a sigmoid activation function | |
123 | */ | |
124 | public static Function<Float, Float> sigmoidFloat(final float a) { | |
125 |
12
1. lambda$sigmoidFloat$2 : Replaced float division with multiplication → KILLED 2. sigmoidFloat : replaced return value with null for net/bmahe/genetics4j/neat/Activations::sigmoidFloat → KILLED 3. lambda$sigmoidFloat$2 : removed call to java/lang/Float::valueOf → KILLED 4. lambda$sigmoidFloat$2 : removed call to java/lang/Math::exp → KILLED 5. lambda$sigmoidFloat$2 : Substituted 1.0 with 2.0 → KILLED 6. lambda$sigmoidFloat$2 : removed negation → KILLED 7. lambda$sigmoidFloat$2 : replaced call to java/lang/Math::exp with argument → KILLED 8. lambda$sigmoidFloat$2 : removed call to java/lang/Float::floatValue → KILLED 9. lambda$sigmoidFloat$2 : Replaced float multiplication with division → KILLED 10. lambda$sigmoidFloat$2 : Replaced float addition with subtraction → KILLED 11. lambda$sigmoidFloat$2 : replaced Float return value with 0 for net/bmahe/genetics4j/neat/Activations::lambda$sigmoidFloat$2 → KILLED 12. lambda$sigmoidFloat$2 : Substituted 1.0 with 2.0 → KILLED |
return (x) -> 1.0f / (1.0f + (float) Math.exp(-a * x)); |
126 | } | |
127 | ||
128 | /** | |
129 | * Creates a sigmoid activation function with specified steepness for double values. | |
130 | * | |
131 | * <p>The sigmoid function computes: f(x) = 1 / (1 + exp(-a * x)) | |
132 | * <p>Output range: (0, 1) | |
133 | * | |
134 | * @param a the steepness parameter (higher values create steeper transitions) | |
135 | * @return a sigmoid activation function | |
136 | */ | |
137 | public static Function<Double, Double> sigmoid(final double a) { | |
138 |
12
1. lambda$sigmoid$3 : Replaced double division with multiplication → KILLED 2. lambda$sigmoid$3 : removed call to java/lang/Double::valueOf → KILLED 3. lambda$sigmoid$3 : Substituted 1.0 with 2.0 → KILLED 4. lambda$sigmoid$3 : replaced call to java/lang/Math::exp with argument → KILLED 5. lambda$sigmoid$3 : removed call to java/lang/Double::doubleValue → KILLED 6. lambda$sigmoid$3 : Replaced double addition with subtraction → KILLED 7. lambda$sigmoid$3 : removed call to java/lang/Math::exp → KILLED 8. sigmoid : replaced return value with null for net/bmahe/genetics4j/neat/Activations::sigmoid → KILLED 9. lambda$sigmoid$3 : Replaced double multiplication with division → KILLED 10. lambda$sigmoid$3 : removed negation → KILLED 11. lambda$sigmoid$3 : Substituted 1.0 with 2.0 → KILLED 12. lambda$sigmoid$3 : replaced Double return value with 0 for net/bmahe/genetics4j/neat/Activations::lambda$sigmoid$3 → KILLED |
return (x) -> 1.0d / (1.0d + Math.exp(-a * x)); |
139 | } | |
140 | ||
141 | /** Standard sigmoid activation function for float values (steepness = 1.0). */ | |
142 | public static Function<Float, Float> sigmoidFloat = sigmoidFloat(1.0f); | |
143 | | |
144 | /** Standard sigmoid activation function for double values (steepness = 1.0). */ | |
145 | public static Function<Double, Double> sigmoid = sigmoid(1.0d); | |
146 | ||
147 | /** Identity activation function for float values (f(x) = x). */ | |
148 | public static Function<Float, Float> identityFloat = linearFloat(1.0f, 0.0f); | |
149 | | |
150 | /** Identity activation function for double values (f(x) = x). */ | |
151 | public static Function<Double, Double> identity = linear(1.0d, 0.0d); | |
152 | ||
153 | /** Hyperbolic tangent activation function for float values. Output range: (-1, 1). */ | |
154 |
5
1. lambda$static$4 : replaced call to java/lang/Math::tanh with argument → KILLED 2. lambda$static$4 : removed call to java/lang/Math::tanh → KILLED 3. lambda$static$4 : removed call to java/lang/Float::valueOf → KILLED 4. lambda$static$4 : removed call to java/lang/Float::floatValue → KILLED 5. lambda$static$4 : replaced Float return value with 0 for net/bmahe/genetics4j/neat/Activations::lambda$static$4 → KILLED |
public static Function<Float, Float> tanhFloat = (x) -> (float) Math.tanh(x); |
155 | | |
156 | /** Hyperbolic tangent activation function for double values. Output range: (-1, 1). */ | |
157 |
5
1. lambda$static$5 : removed call to java/lang/Double::valueOf → KILLED 2. lambda$static$5 : removed call to java/lang/Double::doubleValue → KILLED 3. lambda$static$5 : removed call to java/lang/Math::tanh → KILLED 4. lambda$static$5 : replaced call to java/lang/Math::tanh with argument → KILLED 5. lambda$static$5 : replaced Double return value with 0 for net/bmahe/genetics4j/neat/Activations::lambda$static$5 → KILLED |
public static Function<Double, Double> tanh = (x) -> Math.tanh(x); |
158 | ||
159 | /** Sigmoid activation function with steepness 4.9 as used in the original NEAT paper (float version). */ | |
160 | public static Function<Float, Float> neatPaperFloat = sigmoidFloat(4.9f); | |
161 | | |
162 | /** Sigmoid activation function with steepness 4.9 as used in the original NEAT paper (double version). */ | |
163 | public static Function<Double, Double> neatPaper = sigmoid(4.9f); | |
164 | } | |
Mutations | ||
99 |
1.1 2.2 3.3 4.4 5.5 6.6 |
|
112 |
1.1 2.2 3.3 4.4 5.5 6.6 |
|
125 |
1.1 2.2 3.3 4.4 5.5 6.6 7.7 8.8 9.9 10.10 11.11 12.12 |
|
138 |
1.1 2.2 3.3 4.4 5.5 6.6 7.7 8.8 9.9 10.10 11.11 12.12 |
|
154 |
1.1 2.2 3.3 4.4 5.5 |
|
157 |
1.1 2.2 3.3 4.4 5.5 |