mirror of
https://github.com/fazo96/AIrium.git
synced 2025-01-10 09:34:20 +01:00
reimplemented neural networks with a better model, need to fix lag
This commit is contained in:
parent
75d91da46c
commit
1cada40ab6
@ -10,75 +10,106 @@ import java.util.ArrayList;
|
|||||||
public class Brain {
|
public class Brain {
|
||||||
|
|
||||||
public static final float bias = 0.5f;
|
public static final float bias = 0.5f;
|
||||||
private ArrayList<Neuron> inputs, outputs, hidden;
|
private Neuron[][] neurons;
|
||||||
|
private int nInputs;
|
||||||
|
|
||||||
public Brain(int nInputs, int nOutputs, int hiddenLayers, int neuronsPerHiddenLayer) {
|
public Brain(int nInputs, int nOutputs, int hiddenLayers, int neuronsPerHiddenLayer) {
|
||||||
inputs = new ArrayList<Neuron>(nInputs);
|
this.nInputs = nInputs;
|
||||||
outputs = new ArrayList<Neuron>(nOutputs);
|
neurons = new Neuron[hiddenLayers + 2][Math.max(nInputs, Math.max(nOutputs, neuronsPerHiddenLayer))];
|
||||||
hidden = new ArrayList<Neuron>(hiddenLayers * neuronsPerHiddenLayer);
|
|
||||||
populate(nInputs, nOutputs, hiddenLayers, neuronsPerHiddenLayer);
|
populate(nInputs, nOutputs, hiddenLayers, neuronsPerHiddenLayer);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void populate(int nInputs, int nOutputs, int hiddenLayers, int neuronsPerHiddenLayer) {
|
private void populate(int nInputs, int nOutputs, int hiddenLayers, int neuronsPerHiddenLayer) {
|
||||||
// Create input neurons
|
// Create input neurons
|
||||||
for (int i = 0; i < nInputs; i++) {
|
for (int i = 0; i < nInputs; i++) {
|
||||||
inputs.add(new Neuron(0,bias));
|
neurons[0][i] = new Neuron(0, bias, this);
|
||||||
|
Log.log(Log.DEBUG, "Adding Input Layer Neuron " + (i + 1));
|
||||||
}
|
}
|
||||||
// popiulate hidden layers
|
// popiulate hidden layers
|
||||||
for (int i = 0; i < hiddenLayers; i++) {
|
for (int i = 0; i < hiddenLayers; i++) {
|
||||||
for (int j = 0; j < neuronsPerHiddenLayer; j++) {
|
for (int j = 0; j < neuronsPerHiddenLayer; j++) {
|
||||||
// create neuron
|
// create neuron
|
||||||
Neuron n = new Neuron(i + 1,bias);
|
Neuron n = new Neuron(i + 1, bias, this);
|
||||||
// add connections
|
neurons[i + 1][j] = n;
|
||||||
for (Neuron s : inputs) {
|
Log.log(Log.DEBUG, "Adding Hidden Layer " + (i + 1) + " Neuron " + (j + 1));
|
||||||
n.getInputs().add(new NeuralConnection(randWeight(), s));
|
|
||||||
}
|
|
||||||
hidden.add(n);
|
|
||||||
Log.log(Log.DEBUG,"Adding Hidden Layer " + (i + 1) + " Neuron " + j + " with " + inputs.size() + " inputs");
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// populate output layer
|
// populate output layer
|
||||||
for (int i = 0; i < nOutputs; i++) {
|
for (int i = 0; i < nOutputs; i++) {
|
||||||
// add neuron
|
// add neuron
|
||||||
Neuron n = new Neuron(hiddenLayers + 1,bias);
|
Neuron n = new Neuron(hiddenLayers + 1, bias, this);
|
||||||
int conn = 0;
|
neurons[hiddenLayers + 1][i] = n;
|
||||||
for (Neuron s : hidden) {
|
Log.log(Log.DEBUG, "Adding Output Layer Neuron " + (i + 1));
|
||||||
// add connections where applicable
|
|
||||||
if (s.getLayer() == hiddenLayers) {
|
|
||||||
conn++;
|
|
||||||
n.getInputs().add(new NeuralConnection(randWeight(), s));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Log.log(Log.DEBUG,"Adding Output Layer Neuron " + i + " with " + conn + " inputs");
|
|
||||||
outputs.add(n);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private float randWeight() {
|
private float randWeight() {
|
||||||
return (float) Math.random()*5 - 2.5f;
|
return (float) Math.random() * 5 - 2.5f;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void input(float[] values) {
|
public void input(float[] values) {
|
||||||
for (int i = 0; i < values.length; i++) {
|
for (int i = 0; i < values.length; i++) {
|
||||||
inputs.get(i).setOutput(values[i]);
|
neurons[0][i].setOutput(values[i]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public float[] compute() {
|
public float[] compute() {
|
||||||
for (Neuron n : hidden) {
|
float[] res = new float[neurons[neurons.length - 1].length];
|
||||||
n.clearCachedValue();
|
for (int i = 0; i < neurons[neurons.length - 1].length; i++) {
|
||||||
}
|
Neuron n = neurons[neurons.length - 1][i];
|
||||||
float[] res = new float[outputs.size()];
|
if (n != null) {
|
||||||
for (int i = 0; i < outputs.size(); i++) {
|
res[i] = n.compute();
|
||||||
Neuron n = outputs.get(i);
|
}
|
||||||
n.clearCachedValue();
|
|
||||||
res[i] = n.compute();
|
|
||||||
}
|
}
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void mutate(float mutationFactor) {
|
public void map(float[][][] map) {
|
||||||
for(Neuron n : hidden) n.mutate(mutationFactor);
|
// Populate with new neurons
|
||||||
|
for (int j = 0; j < map.length; j++) {
|
||||||
|
for (int i = 0; i < map[j].length; i++) {
|
||||||
|
if (map[j] == null || map[i] == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
neurons[j][i] = new Neuron(j, bias, this);
|
||||||
|
neurons[j][i].setWeights(map[j][i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public float[][][] getMap() {
|
||||||
|
float[][][] res = new float[neurons.length][neurons[1].length][neurons[1].length];
|
||||||
|
for (int i = 0; i < neurons.length; i++) // layers
|
||||||
|
{
|
||||||
|
for (int j = 0; i < neurons[i].length; j++) // neurons per layer
|
||||||
|
{
|
||||||
|
if (neurons[i][j] == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
res[i][j] = neurons[i][j].getWeights();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
public float[][][] mutate(float mutationFactor) {
|
||||||
|
float[][][] res = new float[neurons.length][neurons[1].length][neurons[1].length];
|
||||||
|
for (int i = 0; i < neurons.length; i++) // layers
|
||||||
|
{
|
||||||
|
for (int j = 0; i < neurons[i].length; j++) // neurons per layer
|
||||||
|
{
|
||||||
|
res[i][j] = neurons[i][j].mutate(mutationFactor);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Neuron[][] getNeurons() {
|
||||||
|
return neurons;
|
||||||
|
}
|
||||||
|
|
||||||
|
public int howManyInputNeurons() {
|
||||||
|
return nInputs;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1,50 +0,0 @@
|
|||||||
/*
|
|
||||||
* To change this license header, choose License Headers in Project Properties.
|
|
||||||
* To change this template file, choose Tools | Templates
|
|
||||||
* and open the template in the editor.
|
|
||||||
*/
|
|
||||||
package logic.neural;
|
|
||||||
|
|
||||||
/**
|
|
||||||
*
|
|
||||||
* @author fazo
|
|
||||||
*/
|
|
||||||
public class NeuralConnection {
|
|
||||||
|
|
||||||
private float weight = 1;
|
|
||||||
|
|
||||||
private final Neuron source;
|
|
||||||
private float cachedValue;
|
|
||||||
private boolean cachedValueValid = false;
|
|
||||||
|
|
||||||
public NeuralConnection(float weight, Neuron source) {
|
|
||||||
this.source = source;
|
|
||||||
this.weight = weight;
|
|
||||||
}
|
|
||||||
|
|
||||||
public float compute() {
|
|
||||||
if (cachedValueValid) {
|
|
||||||
return cachedValue;
|
|
||||||
}
|
|
||||||
// get value from Neuron
|
|
||||||
cachedValueValid = true;
|
|
||||||
return cachedValue = source.compute() * getWeight();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void mutate(float mutationFactor) {
|
|
||||||
float mutation = (float) (Math.random() * mutationFactor - mutationFactor/2);
|
|
||||||
weight += mutation;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void clearCachedValue() {
|
|
||||||
cachedValueValid = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
public float getWeight() {
|
|
||||||
return weight;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setWeight(float weight) {
|
|
||||||
this.weight = weight;
|
|
||||||
}
|
|
||||||
}
|
|
@ -7,6 +7,9 @@ package logic.neural;
|
|||||||
|
|
||||||
import com.mygdx.game.Log;
|
import com.mygdx.game.Log;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.logging.Level;
|
||||||
|
import java.util.logging.Logger;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
@ -14,38 +17,65 @@ import java.util.ArrayList;
|
|||||||
*/
|
*/
|
||||||
public class Neuron {
|
public class Neuron {
|
||||||
|
|
||||||
private ArrayList<NeuralConnection> inputs;
|
private float[] weights;
|
||||||
private float bias, output;
|
private float bias, output;
|
||||||
private boolean isInputNeuron;
|
private boolean isInputNeuron;
|
||||||
private int layer;
|
private int layer;
|
||||||
private float cachedValue;
|
private Brain brain;
|
||||||
private boolean cachedValueValid = false;
|
|
||||||
|
|
||||||
public Neuron(int layer, float bias) {
|
public Neuron(int layer, float bias, Brain brain) {
|
||||||
|
this(layer, bias, brain, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public Neuron(int layer, float bias, Brain brain, float[] weights) {
|
||||||
|
this.brain = brain;
|
||||||
this.layer = layer;
|
this.layer = layer;
|
||||||
inputs = new ArrayList<NeuralConnection>();
|
if (weights == null) {
|
||||||
|
scramble();
|
||||||
|
} else {
|
||||||
|
this.weights = weights;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void scramble() {
|
||||||
|
// init weights
|
||||||
|
if (layer > 1) {
|
||||||
|
weights = new float[brain.getNeurons()[layer - 1].length];
|
||||||
|
} else if (layer == 1) {
|
||||||
|
weights = new float[brain.howManyInputNeurons()];
|
||||||
|
} else { // layer 0
|
||||||
|
isInputNeuron = true;
|
||||||
|
weights = new float[0];
|
||||||
|
}
|
||||||
|
// Put random weights
|
||||||
|
for (int i = 0; i < weights.length; i++) {
|
||||||
|
weights[i] = (float) (Math.random() * 5 - 2.5f);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public float compute() {
|
public float compute() {
|
||||||
if (isInputNeuron) {
|
if (isInputNeuron) {
|
||||||
return output;
|
return output;
|
||||||
}
|
}
|
||||||
if (cachedValueValid) {
|
|
||||||
return cachedValue;
|
|
||||||
}
|
|
||||||
float a = bias * -1; // activation
|
float a = bias * -1; // activation
|
||||||
for (NeuralConnection i : inputs) {
|
for (int i = 0; i < weights.length; i++) {
|
||||||
a += i.compute();
|
//if(brain == null) System.out.println("BRAINS NULL"); else if(brain.getNeurons() == null) System.out.println("NEURONS NULL");
|
||||||
|
//System.out.println(Arrays.toString(brain.getNeurons()));
|
||||||
|
Neuron n = brain.getNeurons()[layer - 1][i];
|
||||||
|
a += n.compute() * weights[i];
|
||||||
}
|
}
|
||||||
cachedValueValid = true;
|
|
||||||
// sigmoid function
|
// sigmoid function
|
||||||
cachedValue = (float) (1 / (1 + Math.pow(Math.E, a * -1)));
|
float res = (float) (1 / (1 + Math.pow(Math.E, a * -1)));
|
||||||
Log.log(Log.DEBUG,"Computed Value "+cachedValue+" for neuron");
|
Log.log(Log.DEBUG, "Computed Value " + res + " for neuron");
|
||||||
return cachedValue;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void mutate(float mutationFactor){
|
public float[] mutate(float mutationFactor) {
|
||||||
for(NeuralConnection n : inputs) n.mutate(mutationFactor);
|
float[] mutatedWeights = new float[weights.length];
|
||||||
|
for (int i = 0; i < weights.length; i++) {
|
||||||
|
mutatedWeights[i] = weights[i] + mutationFactor - mutationFactor / 2;
|
||||||
|
}
|
||||||
|
return mutatedWeights;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setOutput(float output) {
|
public void setOutput(float output) {
|
||||||
@ -53,10 +83,6 @@ public class Neuron {
|
|||||||
this.output = output;
|
this.output = output;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ArrayList<NeuralConnection> getInputs() {
|
|
||||||
return inputs;
|
|
||||||
}
|
|
||||||
|
|
||||||
public float getBias() {
|
public float getBias() {
|
||||||
return bias;
|
return bias;
|
||||||
}
|
}
|
||||||
@ -77,9 +103,12 @@ public class Neuron {
|
|||||||
this.layer = layer;
|
this.layer = layer;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void clearCachedValue() {
|
public float[] getWeights() {
|
||||||
cachedValueValid = false;
|
return weights;
|
||||||
for(NeuralConnection n : inputs) n.clearCachedValue();
|
}
|
||||||
|
|
||||||
|
public void setWeights(float[] weights) {
|
||||||
|
this.weights = weights;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user