1
0
mirror of https://github.com/fazo96/AIrium.git synced 2025-01-10 09:34:20 +01:00

reimplemented neural networks with a better model, need to fix lag

This commit is contained in:
Enrico Fasoli 2015-07-01 16:58:45 +02:00
parent 75d91da46c
commit 1cada40ab6
3 changed files with 119 additions and 109 deletions

View File

@ -10,75 +10,106 @@ import java.util.ArrayList;
public class Brain {
public static final float bias = 0.5f;
private ArrayList<Neuron> inputs, outputs, hidden;
private Neuron[][] neurons;
private int nInputs;
public Brain(int nInputs, int nOutputs, int hiddenLayers, int neuronsPerHiddenLayer) {
inputs = new ArrayList<Neuron>(nInputs);
outputs = new ArrayList<Neuron>(nOutputs);
hidden = new ArrayList<Neuron>(hiddenLayers * neuronsPerHiddenLayer);
this.nInputs = nInputs;
neurons = new Neuron[hiddenLayers + 2][Math.max(nInputs, Math.max(nOutputs, neuronsPerHiddenLayer))];
populate(nInputs, nOutputs, hiddenLayers, neuronsPerHiddenLayer);
}
private void populate(int nInputs, int nOutputs, int hiddenLayers, int neuronsPerHiddenLayer) {
// Create input neurons
for (int i = 0; i < nInputs; i++) {
inputs.add(new Neuron(0,bias));
neurons[0][i] = new Neuron(0, bias, this);
Log.log(Log.DEBUG, "Adding Input Layer Neuron " + (i + 1));
}
// popiulate hidden layers
for (int i = 0; i < hiddenLayers; i++) {
for (int j = 0; j < neuronsPerHiddenLayer; j++) {
// create neuron
Neuron n = new Neuron(i + 1,bias);
// add connections
for (Neuron s : inputs) {
n.getInputs().add(new NeuralConnection(randWeight(), s));
}
hidden.add(n);
Log.log(Log.DEBUG,"Adding Hidden Layer " + (i + 1) + " Neuron " + j + " with " + inputs.size() + " inputs");
Neuron n = new Neuron(i + 1, bias, this);
neurons[i + 1][j] = n;
Log.log(Log.DEBUG, "Adding Hidden Layer " + (i + 1) + " Neuron " + (j + 1));
}
}
// populate output layer
for (int i = 0; i < nOutputs; i++) {
// add neuron
Neuron n = new Neuron(hiddenLayers + 1,bias);
int conn = 0;
for (Neuron s : hidden) {
// add connections where applicable
if (s.getLayer() == hiddenLayers) {
conn++;
n.getInputs().add(new NeuralConnection(randWeight(), s));
}
}
Log.log(Log.DEBUG,"Adding Output Layer Neuron " + i + " with " + conn + " inputs");
outputs.add(n);
Neuron n = new Neuron(hiddenLayers + 1, bias, this);
neurons[hiddenLayers + 1][i] = n;
Log.log(Log.DEBUG, "Adding Output Layer Neuron " + (i + 1));
}
}
private float randWeight() {
return (float) Math.random()*5 - 2.5f;
return (float) Math.random() * 5 - 2.5f;
}
public void input(float[] values) {
for (int i = 0; i < values.length; i++) {
inputs.get(i).setOutput(values[i]);
neurons[0][i].setOutput(values[i]);
}
}
public float[] compute() {
for (Neuron n : hidden) {
n.clearCachedValue();
}
float[] res = new float[outputs.size()];
for (int i = 0; i < outputs.size(); i++) {
Neuron n = outputs.get(i);
n.clearCachedValue();
res[i] = n.compute();
float[] res = new float[neurons[neurons.length - 1].length];
for (int i = 0; i < neurons[neurons.length - 1].length; i++) {
Neuron n = neurons[neurons.length - 1][i];
if (n != null) {
res[i] = n.compute();
}
}
return res;
}
public void mutate(float mutationFactor) {
for(Neuron n : hidden) n.mutate(mutationFactor);
public void map(float[][][] map) {
// Populate with new neurons
for (int j = 0; j < map.length; j++) {
for (int i = 0; i < map[j].length; i++) {
if (map[j] == null || map[i] == null) {
continue;
}
neurons[j][i] = new Neuron(j, bias, this);
neurons[j][i].setWeights(map[j][i]);
}
}
}
public float[][][] getMap() {
float[][][] res = new float[neurons.length][neurons[1].length][neurons[1].length];
for (int i = 0; i < neurons.length; i++) // layers
{
for (int j = 0; i < neurons[i].length; j++) // neurons per layer
{
if (neurons[i][j] == null) {
continue;
}
res[i][j] = neurons[i][j].getWeights();
}
}
return res;
}
public float[][][] mutate(float mutationFactor) {
float[][][] res = new float[neurons.length][neurons[1].length][neurons[1].length];
for (int i = 0; i < neurons.length; i++) // layers
{
for (int j = 0; i < neurons[i].length; j++) // neurons per layer
{
res[i][j] = neurons[i][j].mutate(mutationFactor);
}
}
return res;
}
public Neuron[][] getNeurons() {
return neurons;
}
public int howManyInputNeurons() {
return nInputs;
}
}

View File

@ -1,50 +0,0 @@
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package logic.neural;
/**
*
* @author fazo
*/
public class NeuralConnection {
private float weight = 1;
private final Neuron source;
private float cachedValue;
private boolean cachedValueValid = false;
public NeuralConnection(float weight, Neuron source) {
this.source = source;
this.weight = weight;
}
public float compute() {
if (cachedValueValid) {
return cachedValue;
}
// get value from Neuron
cachedValueValid = true;
return cachedValue = source.compute() * getWeight();
}
public void mutate(float mutationFactor) {
float mutation = (float) (Math.random() * mutationFactor - mutationFactor/2);
weight += mutation;
}
public void clearCachedValue() {
cachedValueValid = false;
}
public float getWeight() {
return weight;
}
public void setWeight(float weight) {
this.weight = weight;
}
}

View File

@ -7,6 +7,9 @@ package logic.neural;
import com.mygdx.game.Log;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
*
@ -14,38 +17,65 @@ import java.util.ArrayList;
*/
public class Neuron {
private ArrayList<NeuralConnection> inputs;
private float[] weights;
private float bias, output;
private boolean isInputNeuron;
private int layer;
private float cachedValue;
private boolean cachedValueValid = false;
private Brain brain;
public Neuron(int layer, float bias) {
public Neuron(int layer, float bias, Brain brain) {
this(layer, bias, brain, null);
}
public Neuron(int layer, float bias, Brain brain, float[] weights) {
this.brain = brain;
this.layer = layer;
inputs = new ArrayList<NeuralConnection>();
if (weights == null) {
scramble();
} else {
this.weights = weights;
}
}
private void scramble() {
// init weights
if (layer > 1) {
weights = new float[brain.getNeurons()[layer - 1].length];
} else if (layer == 1) {
weights = new float[brain.howManyInputNeurons()];
} else { // layer 0
isInputNeuron = true;
weights = new float[0];
}
// Put random weights
for (int i = 0; i < weights.length; i++) {
weights[i] = (float) (Math.random() * 5 - 2.5f);
}
}
public float compute() {
if (isInputNeuron) {
return output;
}
if (cachedValueValid) {
return cachedValue;
}
float a = bias * -1; // activation
for (NeuralConnection i : inputs) {
a += i.compute();
for (int i = 0; i < weights.length; i++) {
//if(brain == null) System.out.println("BRAINS NULL"); else if(brain.getNeurons() == null) System.out.println("NEURONS NULL");
//System.out.println(Arrays.toString(brain.getNeurons()));
Neuron n = brain.getNeurons()[layer - 1][i];
a += n.compute() * weights[i];
}
cachedValueValid = true;
// sigmoid function
cachedValue = (float) (1 / (1 + Math.pow(Math.E, a * -1)));
Log.log(Log.DEBUG,"Computed Value "+cachedValue+" for neuron");
return cachedValue;
float res = (float) (1 / (1 + Math.pow(Math.E, a * -1)));
Log.log(Log.DEBUG, "Computed Value " + res + " for neuron");
return res;
}
public void mutate(float mutationFactor){
for(NeuralConnection n : inputs) n.mutate(mutationFactor);
public float[] mutate(float mutationFactor) {
float[] mutatedWeights = new float[weights.length];
for (int i = 0; i < weights.length; i++) {
mutatedWeights[i] = weights[i] + mutationFactor - mutationFactor / 2;
}
return mutatedWeights;
}
public void setOutput(float output) {
@ -53,10 +83,6 @@ public class Neuron {
this.output = output;
}
public ArrayList<NeuralConnection> getInputs() {
return inputs;
}
public float getBias() {
return bias;
}
@ -77,9 +103,12 @@ public class Neuron {
this.layer = layer;
}
public void clearCachedValue() {
cachedValueValid = false;
for(NeuralConnection n : inputs) n.clearCachedValue();
public float[] getWeights() {
return weights;
}
public void setWeights(float[] weights) {
this.weights = weights;
}
}