1
0
mirror of https://github.com/fazo96/AIrium.git synced 2025-01-10 09:34:20 +01:00

added some groundwork for recurrent neural networks

This commit is contained in:
Enrico Fasoli 2015-08-08 13:06:59 +02:00
parent f6ae99b23a
commit c37675d7b3
3 changed files with 28 additions and 15 deletions

View File

@ -241,6 +241,8 @@ public class World implements Runnable {
n = top[first].getBrain().breed(top[sec].getBrain().getMap()); n = top[first].getBrain().breed(top[sec].getBrain().getMap());
} catch (Exception ex) { } catch (Exception ex) {
// Should not happen // Should not happen
Log.log(Log.ERROR, "Could not breed: " + ex.getMessage()
+ "\nIt is advised to restart the simulation after changing the brain's topology");
Logger.getLogger(World.class.getName()).log(Level.SEVERE, null, ex); Logger.getLogger(World.class.getName()).log(Level.SEVERE, null, ex);
} }
Creature ne = spawnCreature(n); Creature ne = spawnCreature(n);

View File

@ -46,7 +46,7 @@ public class Brain {
for (int j = 0; j < brainMap[i].length; j++) { // for each neuron for (int j = 0; j < brainMap[i].length; j++) { // for each neuron
// skip input layer // skip input layer
if (neurons[i + 1][j] == null) { if (neurons[i + 1][j] == null) {
neurons[i + 1][j] = new Neuron(j, bias, this, brainMap[i][j]); neurons[i + 1][j] = new Neuron(i + 1, i, bias, this, brainMap[i][j]);
} else { } else {
neurons[i + 1][j].setWeights(brainMap[i][j]); neurons[i + 1][j].setWeights(brainMap[i][j]);
} }
@ -63,7 +63,7 @@ public class Brain {
for (int i = 0; i < neurons.length; i++) { for (int i = 0; i < neurons.length; i++) {
for (int j = 0; j < neurons[i].length; j++) { for (int j = 0; j < neurons[i].length; j++) {
// create neuron // create neuron
Neuron n = new Neuron(i, bias, this); Neuron n = new Neuron(i, i - 1, bias, this);
neurons[i][j] = n; neurons[i][j] = n;
Log.log(Log.DEBUG, "Adding Layer " + (i + 1) + " Neuron " + (j + 1)); Log.log(Log.DEBUG, "Adding Layer " + (i + 1) + " Neuron " + (j + 1));
} }

View File

@ -15,7 +15,7 @@ public class Neuron {
private NeuronCache cache; private NeuronCache cache;
private float bias, output; private float bias, output;
private boolean isInputNeuron; private boolean isInputNeuron;
private int layer; private int layer, receivesFromLayer;
private Brain brain; private Brain brain;
/** /**
@ -24,10 +24,12 @@ public class Neuron {
* *
* @param layer the layer in which this neuron is positioned * @param layer the layer in which this neuron is positioned
* @param bias the bias of this neuron * @param bias the bias of this neuron
* @param receivesFromLayer the layer to read data from (negative for input
* neurons)
* @param brain the brain which contains this neuron * @param brain the brain which contains this neuron
*/ */
public Neuron(int layer, float bias, Brain brain) { public Neuron(int layer, int receivesFromLayer, float bias, Brain brain) {
this(layer, bias, brain, null); this(layer, receivesFromLayer, bias, brain, null);
} }
/** /**
@ -35,20 +37,27 @@ public class Neuron {
* being the input layer, with given weights * being the input layer, with given weights
* *
* @param layer the layer in which this neuron is positioned * @param layer the layer in which this neuron is positioned
* @param receivesFromLayer the layer to read data from (negative for input
* neurons)
* @param bias the bias of this neuron * @param bias the bias of this neuron
* @param brain the brain which contains this neuron * @param brain the brain which contains this neuron
* @param weights the weights to use to configure this neuron * @param weights the weights to use to configure this neuron
*/ */
public Neuron(int layer, float bias, Brain brain, float[] weights) { public Neuron(int layer, int receivesFromLayer, float bias, Brain brain, float[] weights) {
this.brain = brain; this.brain = brain;
this.layer = layer; this.layer = layer;
if (weights == null) { this.receivesFromLayer = receivesFromLayer;
if (receivesFromLayer < 0 || layer == 0) {
isInputNeuron = true;
} else if (weights == null) {
scramble(); scramble();
} else { } else {
this.weights = weights; this.weights = weights;
} }
if (!isInputNeuron) {
cache = new NeuronCache(this.weights.length); cache = new NeuronCache(this.weights.length);
} }
}
/** /**
* Randomize the weights of this neuron * Randomize the weights of this neuron
@ -56,8 +65,8 @@ public class Neuron {
private void scramble() { private void scramble() {
// init weights // init weights
if (layer > 0) { if (layer > 0) {
weights = new float[brain.getNeurons()[layer - 1].length]; weights = new float[brain.getNeurons()[receivesFromLayer].length];
} else { // layer 0 } else { // layer 0 or negative
isInputNeuron = true; isInputNeuron = true;
weights = new float[0]; weights = new float[0];
} }
@ -76,7 +85,9 @@ public class Neuron {
* @return the output of this neuron. * @return the output of this neuron.
*/ */
public float compute() { public float compute() {
if(weights == null || weights.length == 0) isInputNeuron = true; if (weights == null || weights.length == 0) {
isInputNeuron = true;
}
if (isInputNeuron) { if (isInputNeuron) {
return output; return output;
} }
@ -93,7 +104,7 @@ public class Neuron {
Logger.getLogger(Neuron.class.getName()).log(Level.SEVERE, null, ex); Logger.getLogger(Neuron.class.getName()).log(Level.SEVERE, null, ex);
} }
} else { } else {
Neuron n = brain.getNeurons()[layer - 1][i]; Neuron n = brain.getNeurons()[receivesFromLayer][i];
float v = n.compute() * weights[i]; float v = n.compute() * weights[i];
a += v; a += v;
cache.put(i, v); cache.put(i, v);