NeuralNetworkGenetic/Genetic.cpp

1007 lines
26 KiB
C++
Raw Normal View History

2019-12-15 16:55:17 +03:00
#include <iostream>
#include <time.h>
#define RandomRange 1
2019-12-16 20:37:58 +03:00
#define InitialSynapseValue 0.0
2019-12-24 18:57:26 +03:00
#define MutationRate 0.25
#define CrossOverRate 0.25
#define PopCrossOverRate 0.7
2019-12-15 16:55:17 +03:00
class Synapse;
class Neuron;
class Layer;
class Input;
class Output;
class NeuralNetwork;
2019-12-24 16:49:49 +03:00
class Generation;
2019-12-15 16:55:17 +03:00
float RandomFloat(int min, int max)
{
float result;
int value;
static unsigned long int counter = time(0);
srand(time(0) * counter++);
value = ((rand() * counter) % ((max - min) * 100000));
result = (float)value / 100000.0 + (float)min;
return result;
}
#pragma region Synapse
class Synapse
{
private:
float weight;
float value;
float bias;
public:
Synapse();
2019-12-18 23:05:18 +03:00
~Synapse();
2019-12-15 16:55:17 +03:00
void SetValue(float);
void SetWeight(float);
void SetBias(float);
2019-12-24 16:49:49 +03:00
float GetWeight();
float GetValue();
float GetBias();
2019-12-15 16:55:17 +03:00
float Fire();
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Synapse::Synapse()
{
this -> value = this -> weight = this -> bias = InitialSynapseValue;
}
2019-12-18 23:05:18 +03:00
Synapse::~Synapse()
{
}
2019-12-15 16:55:17 +03:00
void Synapse::SetValue(float value)
{
this -> value = value;
}
void Synapse::SetWeight(float weight)
{
this -> weight = weight;
}
void Synapse::SetBias(float bias)
{
this -> bias = bias;
}
2019-12-24 16:49:49 +03:00
float Synapse::GetWeight()
{
return weight;
}
float Synapse::GetValue()
{
return value;
}
float Synapse::GetBias()
{
return bias;
}
2019-12-15 16:55:17 +03:00
float Synapse::Fire()
{
float result = 0.0;
result = this -> value * this -> weight + this -> bias;
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
return result;
}
#pragma endregion
#pragma region Neuron
class Neuron
{
private:
Synapse *incomings;
Synapse *forwards;
int incomingsSize;
int forwardsSize;
int layerSize;
public:
Neuron();
2019-12-18 23:05:18 +03:00
~Neuron();
2019-12-15 16:55:17 +03:00
void ConnectIncomings(Synapse *, int);
void ConnectForwards(Synapse *, int, int);
void SetValue(float);
2019-12-18 18:42:44 +03:00
void Reset();
2019-12-15 16:55:17 +03:00
float GetValue();
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Neuron::Neuron()
{
incomings = forwards = NULL;
incomingsSize = forwardsSize = layerSize = 0;
2019-12-18 23:05:18 +03:00
}
Neuron::~Neuron()
{
2019-12-15 16:55:17 +03:00
}
2019-12-16 20:37:58 +03:00
2019-12-18 18:42:44 +03:00
void Neuron::Reset()
{
incomings = forwards = NULL;
incomingsSize = forwardsSize = layerSize = 0;
}
2019-12-15 16:55:17 +03:00
void Neuron::SetValue(float value)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < forwardsSize; i++)
2019-12-15 16:55:17 +03:00
(forwards + i) -> SetValue(value);
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
void Neuron::ConnectIncomings(Synapse *incomings, int incomingsSize)
{
this -> incomings = incomings;
this -> incomingsSize = incomingsSize;
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
void Neuron::ConnectForwards(Synapse *forwards, int forwardsSize, int layerSize)
{
this -> forwards = forwards;
this -> forwardsSize = forwardsSize;
this -> layerSize = layerSize;
}
float Neuron::GetValue()
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
float result = 0.0;
if(!incomings) return result;
2019-12-18 23:05:18 +03:00
for (i = 0; i < incomingsSize; i++)
2019-12-15 16:55:17 +03:00
result += (incomings + i) -> Fire();
if(!forwards) return result;
2019-12-18 23:05:18 +03:00
for (i = 0; i < forwardsSize; i++)
2019-12-15 16:55:17 +03:00
(forwards + i * layerSize) -> SetValue(result);
return result;
}
#pragma endregion
#pragma region Layer
class Layer
{
protected:
Neuron *neurons;
Synapse *synapses;
int neuronSize;
int synapseSize;
2019-12-18 23:05:18 +03:00
void _SwapSynapses(Synapse *, Synapse *);
2019-12-15 16:55:17 +03:00
Neuron *_CreateNeurons(int);
2019-12-18 18:42:44 +03:00
Synapse *_CreateSynapses(int);
2019-12-15 16:55:17 +03:00
public:
Layer();
Layer(int);
~Layer();
2019-12-18 23:05:18 +03:00
void CopySynapses(Layer *);
2019-12-15 16:55:17 +03:00
void FireLayer();
void Mutate();
void RandomizeValues();
2019-12-18 23:05:18 +03:00
void CrossOverSynapses(Layer *);
2019-12-24 16:49:49 +03:00
friend void WriteToFile(NeuralNetwork *);
2019-12-15 16:55:17 +03:00
bool CreateNeurons(int);
bool ConnectPrevious(Layer *);
bool ConnectForwards(Layer *);
int GetSize();
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Layer::Layer()
{
neuronSize = synapseSize = 0;
neurons = NULL;
synapses = NULL;
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Layer::Layer(int size)
{
neuronSize = synapseSize = 0;
synapses = NULL;
neurons = _CreateNeurons(size);
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Layer::~Layer()
{
if(neurons) delete neurons;
if(synapses) delete synapses;
}
2019-12-18 23:05:18 +03:00
void Layer::_SwapSynapses(Synapse *first, Synapse *second)
{
Synapse temporary = Synapse();
temporary = *first;
*first = *second;
*second = temporary;
}
2019-12-15 16:55:17 +03:00
Neuron *Layer::_CreateNeurons(int size)
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
Neuron *newNeurons = NULL;
2019-12-18 23:05:18 +03:00
2019-12-18 18:42:44 +03:00
newNeurons = new Neuron[size];
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
if(newNeurons)
2019-12-18 23:05:18 +03:00
for (i = 0; i < size; i++)
2019-12-18 18:42:44 +03:00
(newNeurons + i) -> Reset();
2019-12-15 16:55:17 +03:00
return newNeurons;
}
2019-12-18 18:42:44 +03:00
Synapse *Layer::_CreateSynapses(int size)
{
Synapse *newSynapses = NULL;
newSynapses = new Synapse[size];
return newSynapses;
}
2019-12-18 23:05:18 +03:00
void Layer::CopySynapses(Layer *from)
{
int counter;
for (counter = 0; counter < this -> synapseSize; counter++)
*(synapses + counter) = *((from -> synapses) + counter);
}
2019-12-15 16:55:17 +03:00
void Layer::FireLayer()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < neuronSize; i++)
2019-12-15 16:55:17 +03:00
(neurons + i) -> GetValue();
}
void Layer::RandomizeValues()
{
float bias;
float weight;
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < synapseSize; i++)
2019-12-15 16:55:17 +03:00
{
bias = RandomFloat(-RandomRange, RandomRange);
2019-12-16 20:37:58 +03:00
weight = RandomFloat(-RandomRange, RandomRange);
2019-12-15 16:55:17 +03:00
(synapses + i) -> SetBias(bias);
(synapses + i) -> SetWeight(weight);
}
}
void Layer::Mutate()
{
float bias = 0.0;
float weight = 0.0;
float mutationValue = 0.0;
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
2019-12-18 23:05:18 +03:00
for (i = 0; i < synapseSize; i++)
2019-12-15 16:55:17 +03:00
{
mutationValue = RandomFloat(0, 1);
if(mutationValue <= MutationRate)
{
bias = RandomFloat(-RandomRange, RandomRange);
weight = RandomFloat(-RandomRange, RandomRange);
(synapses + i) -> SetBias(bias);
(synapses + i) -> SetWeight(weight);
}
}
}
2019-12-18 23:05:18 +03:00
void Layer::CrossOverSynapses(Layer *other)
{
int thisCounter;
for (thisCounter = 0; thisCounter < synapseSize; thisCounter++)
if(RandomFloat(0, 1) < CrossOverRate)
_SwapSynapses((synapses + thisCounter), (other -> synapses + thisCounter));
}
2019-12-15 16:55:17 +03:00
bool Layer::CreateNeurons(int size)
{
2019-12-18 23:05:18 +03:00
if((neurons = _CreateNeurons(size)))
2019-12-15 16:55:17 +03:00
neuronSize = size;
return neurons;
}
bool Layer::ConnectPrevious(Layer *previous)
{
int previousSize = previous -> GetSize();
int synapseCount = (this -> neuronSize) * previousSize;
2019-12-18 23:05:18 +03:00
int thisNeuron;
int prevNeuron;
2019-12-15 16:55:17 +03:00
Neuron *currentNeuron = NULL;
2019-12-18 23:05:18 +03:00
if(synapses)
{
delete synapses;
synapses = NULL;
}
2019-12-18 18:42:44 +03:00
synapses = _CreateSynapses(synapseCount);
2019-12-15 16:55:17 +03:00
if(!synapses) return false;
2019-12-18 23:05:18 +03:00
for (thisNeuron = 0; thisNeuron < this -> neuronSize; thisNeuron++)
2019-12-15 16:55:17 +03:00
{
2019-12-18 23:05:18 +03:00
for (prevNeuron = 0; prevNeuron < previousSize; prevNeuron++)
2019-12-15 16:55:17 +03:00
currentNeuron = (previous -> neurons) + prevNeuron;
currentNeuron = (neurons + thisNeuron);
currentNeuron -> ConnectIncomings((synapses + thisNeuron * previousSize), previousSize);
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
synapseSize = synapseCount;
return previous -> ConnectForwards(this);
}
bool Layer::ConnectForwards(Layer *forwards)
{
int forwardsSize = forwards -> neuronSize;
2019-12-18 23:05:18 +03:00
int thisNeuron;
int forwardNeuron;
2019-12-15 16:55:17 +03:00
Neuron *currentNeuron = NULL;
2019-12-16 20:37:58 +03:00
2019-12-18 23:05:18 +03:00
for (thisNeuron = 0; thisNeuron < this -> neuronSize; thisNeuron++)
2019-12-15 16:55:17 +03:00
{
currentNeuron = (neurons + thisNeuron);
2019-12-18 23:05:18 +03:00
for (forwardNeuron = 0; forwardNeuron < forwardsSize; forwardNeuron++)
2019-12-15 16:55:17 +03:00
currentNeuron -> ConnectForwards(forwards -> synapses + thisNeuron, forwardsSize, this -> neuronSize);
}
return true;
}
int Layer::GetSize()
{
return neuronSize;
}
#pragma region Input-Output
class Input : public Layer
{
public:
Input();
2019-12-16 20:37:58 +03:00
void SetValue(float, int);
2019-12-15 16:55:17 +03:00
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Input::Input() : Layer() {}
2019-12-16 20:37:58 +03:00
void Input::SetValue(float value, int index = 0)
2019-12-15 16:55:17 +03:00
{
if(index >= this -> neuronSize || index < 0)
return;
(neurons + index) -> SetValue(value);
}
class Output : public Layer
{
public:
Output();
float GetValue(int);
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Output::Output() : Layer() {}
2019-12-16 20:37:58 +03:00
float Output::GetValue(int index = 0)
2019-12-15 16:55:17 +03:00
{
float result = 0.0;
if(index >= this -> neuronSize || index < 0)
return result;
result = (neurons + index) -> GetValue();
return result;
}
#pragma endregion
#pragma endregion
#pragma region NeuralNetwork
class NeuralNetwork
{
private:
Input *input;
Layer *hidden;
Output *output;
int hiddenSize;
2019-12-24 18:57:26 +03:00
float score;
2019-12-18 23:05:18 +03:00
Input *_CreateInput();
Layer *_CreateLayers(int);
Output *_CreateOutput();
2019-12-15 16:55:17 +03:00
public:
NeuralNetwork();
NeuralNetwork(int);
~NeuralNetwork();
2019-12-18 23:05:18 +03:00
void Copy(const NeuralNetwork &);
2019-12-15 16:55:17 +03:00
void FireNetwork();
void RandomizeValues();
void MutateNetwork();
2019-12-18 23:05:18 +03:00
void Reset();
void CrossOverNetwork(NeuralNetwork *);
2019-12-24 18:57:26 +03:00
friend void WriteToFile(NeuralNetwork *);
2019-12-15 16:55:17 +03:00
bool SetInputNeurons(int);
bool SetHiddenNeurons(int, int);
bool SetOutputNeurons(int);
bool ConnectLayers();
2019-12-18 18:42:44 +03:00
bool SetLayer(int);
2019-12-15 16:55:17 +03:00
float GetOutput(int);
2019-12-24 16:49:49 +03:00
float GetError(int, float);
2019-12-24 18:57:26 +03:00
float GetPrediction(int);
float GetScore();
2019-12-18 23:05:18 +03:00
int GetHiddenSize();
2019-12-24 18:57:26 +03:00
void SetScore(float);
2019-12-16 20:37:58 +03:00
void SetInput(float, int);
2019-12-15 16:55:17 +03:00
};
2019-12-16 20:37:58 +03:00
2019-12-18 23:05:18 +03:00
Input *NeuralNetwork::_CreateInput()
{
Input *newInputs = NULL;
newInputs = new Input();
return newInputs;
}
Layer *NeuralNetwork::_CreateLayers(int size)
{
Layer *newLayers = NULL;
newLayers = new Layer[size];
return newLayers;
}
Output *NeuralNetwork::_CreateOutput()
{
Output *newOutputs = NULL;
newOutputs = new Output();
return newOutputs;
}
2019-12-15 16:55:17 +03:00
NeuralNetwork::NeuralNetwork()
{
hiddenSize = 0;
input = NULL;
hidden = NULL;
output = NULL;
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
NeuralNetwork::NeuralNetwork(int hiddenSize)
{
this -> hiddenSize = hiddenSize;
2019-12-18 23:05:18 +03:00
input = _CreateInput();
hidden = _CreateLayers(hiddenSize);
output = _CreateOutput();
2019-12-15 16:55:17 +03:00
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
NeuralNetwork::~NeuralNetwork()
{
2019-12-24 16:49:49 +03:00
if(input) delete input;
if(hidden) delete hidden;
if(output) delete output;
2019-12-18 23:05:18 +03:00
}
void NeuralNetwork::Copy(const NeuralNetwork &parameter)
{
int i;
input -> CopySynapses(parameter.input);
for (i = 0; i < hiddenSize; i++)
(hidden + i) -> CopySynapses(parameter.hidden + i);
output -> CopySynapses(parameter.output);
2019-12-15 16:55:17 +03:00
}
void NeuralNetwork::FireNetwork()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
(hidden + i) -> FireLayer();
output -> FireLayer();
}
2019-12-18 23:05:18 +03:00
2019-12-15 16:55:17 +03:00
void NeuralNetwork::MutateNetwork()
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
input -> Mutate();
2019-12-18 23:05:18 +03:00
for (i = 0; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
(hidden + i) -> Mutate();
output -> Mutate();
}
2019-12-18 23:05:18 +03:00
void NeuralNetwork::CrossOverNetwork(NeuralNetwork *other)
{
int i;
input -> CrossOverSynapses(other -> input);
for (i = 0; i < hiddenSize; i++)
(hidden + i) -> CrossOverSynapses((other -> hidden) + i);
output -> CrossOverSynapses(other -> output);
}
2019-12-15 16:55:17 +03:00
void NeuralNetwork::RandomizeValues()
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
input -> RandomizeValues();
2019-12-18 23:05:18 +03:00
for (i = 0; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
(hidden + i) -> RandomizeValues();
output -> RandomizeValues();
}
2019-12-24 16:49:49 +03:00
void WriteToFile(NeuralNetwork *network)
{
int i;
int j;
Synapse *synapsePtr = network -> input -> synapses;
int count = network -> input -> synapseSize;
std::cout << count << "\n";
FILE *file = fopen("Data/BestSynapses.txt", "w");
for (i = 0; i < count; i++)
{
fprintf(file, "%f, %f, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
synapsePtr++;
}
for (j = 0; j < network -> hiddenSize; j++)
{
count = (network -> hidden + j) -> synapseSize;
std::cout << count << "\n";
synapsePtr = (network -> hidden + j) -> synapses;
for (i = 0; i < count; i++)
{
fprintf(file, "%f, %f, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
synapsePtr++;
}
}
synapsePtr = network -> output -> synapses;
count = network -> output -> synapseSize;
std::cout << count << "\n";
for (i = 0; i < count; i++)
{
fprintf(file, "%f, %f, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
synapsePtr++;
}
fclose(file);
}
2019-12-18 23:05:18 +03:00
void NeuralNetwork::Reset()
{
input = NULL;
hidden = NULL;
output = NULL;
}
2019-12-15 16:55:17 +03:00
bool NeuralNetwork::SetInputNeurons(int size)
{
return input -> CreateNeurons(size);
}
bool NeuralNetwork::SetHiddenNeurons(int index, int size)
{
return (hidden + index) -> CreateNeurons(size);
}
bool NeuralNetwork::SetOutputNeurons(int size)
{
return output -> CreateNeurons(size);
}
bool NeuralNetwork::ConnectLayers()
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
if(!hidden -> ConnectPrevious(input))
return false;
2019-12-16 20:37:58 +03:00
2019-12-18 23:05:18 +03:00
for (i = 1; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
if(!(hidden + i) -> ConnectPrevious((hidden + i - 1)))
return false;
2019-12-16 20:37:58 +03:00
if(!output -> ConnectPrevious((hidden + hiddenSize - 1)))
2019-12-15 16:55:17 +03:00
return false;
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
return true;
}
2019-12-16 20:37:58 +03:00
2019-12-18 18:42:44 +03:00
bool NeuralNetwork::SetLayer(int hiddenSize)
{
this -> hiddenSize = hiddenSize;
2019-12-18 23:05:18 +03:00
input = _CreateInput();
hidden = _CreateLayers(hiddenSize);
output = _CreateOutput();
return input && hidden && output;
2019-12-18 18:42:44 +03:00
}
2019-12-16 20:37:58 +03:00
float NeuralNetwork::GetOutput(int index = 0)
2019-12-15 16:55:17 +03:00
{
return output -> GetValue(index);
}
2019-12-16 20:37:58 +03:00
2019-12-24 16:49:49 +03:00
float NeuralNetwork::GetError(int index = 0, float target = 0.0)
2019-12-16 20:37:58 +03:00
{
float result = GetOutput(index) - target;
return result < 0.0 ? -result : result;
}
2019-12-24 16:49:49 +03:00
2019-12-24 18:57:26 +03:00
float NeuralNetwork::GetPrediction(int index = 0)
2019-12-24 16:49:49 +03:00
{
float result = GetOutput(index);
return result;
}
2019-12-24 18:57:26 +03:00
float NeuralNetwork::GetScore()
{
return score;
}
2019-12-18 23:05:18 +03:00
int NeuralNetwork::GetHiddenSize()
{
return hiddenSize;
}
2019-12-16 20:37:58 +03:00
void NeuralNetwork::SetInput(float value, int index = 0)
2019-12-15 16:55:17 +03:00
{
2019-12-16 20:37:58 +03:00
input -> SetValue(value, index);
2019-12-15 16:55:17 +03:00
}
2019-12-24 18:57:26 +03:00
void NeuralNetwork::SetScore(float value)
{
score = value;
}
2019-12-15 16:55:17 +03:00
#pragma endregion
2019-12-16 20:37:58 +03:00
#pragma region Generation
class Generation
{
private:
NeuralNetwork *networks;
int size;
int step;
float target;
2019-12-18 23:05:18 +03:00
void _SwapNetworks(NeuralNetwork *, NeuralNetwork *);
2019-12-16 20:37:58 +03:00
NeuralNetwork *_CreateNetworks(int, int);
public:
Generation();
Generation(int, int);
~Generation();
void Randomize();
void Fire();
2019-12-24 18:57:26 +03:00
void SortByScore();
2019-12-16 20:37:58 +03:00
void DisplayScores(int);
2019-12-24 16:49:49 +03:00
void DisplayBest(int);
2019-12-16 20:37:58 +03:00
void SetTarget(float);
void SetInput(float, int);
2019-12-24 16:49:49 +03:00
void NextGeneration();
void WriteBestToFile();
2019-12-24 18:57:26 +03:00
void UpdateScores();
void ResetScores();
2019-12-16 20:37:58 +03:00
bool CreateNetworks(int, int);
bool ConnectNetworks();
bool SetInputNeurons(int);
bool SetHiddenNeurons(int, int);
bool SetOutputNeurons(int);
2019-12-24 18:57:26 +03:00
float GetBestPrediction(int);
2019-12-24 16:49:49 +03:00
float GetError(int);
int GetStep();
2019-12-16 20:37:58 +03:00
};
Generation::Generation()
{
step = 0;
networks = NULL;
size = 0;
target = 0.0;
}
Generation::Generation(int size, int hiddenSizes)
{
step = 0;
target = 0.0;
this -> size = size;
networks = _CreateNetworks(size, hiddenSizes);
}
Generation::~Generation()
{
if(networks) delete networks;
}
NeuralNetwork *Generation::_CreateNetworks(int size, int hiddenSizes)
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-16 20:37:58 +03:00
NeuralNetwork *newNetworks = NULL;
2019-12-18 18:42:44 +03:00
newNetworks = new NeuralNetwork[size];
2019-12-15 16:55:17 +03:00
2019-12-16 20:37:58 +03:00
if(newNetworks)
2019-12-18 23:05:18 +03:00
for (i = 0; i < size; i++)
2019-12-18 18:42:44 +03:00
(newNetworks + i) -> SetLayer(hiddenSizes);
2019-12-16 20:37:58 +03:00
return newNetworks;
}
void Generation::Randomize()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
(networks + i) -> RandomizeValues();
}
void Generation::Fire()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
(networks + i) -> FireNetwork();
}
2019-12-18 23:05:18 +03:00
void Generation::_SwapNetworks(NeuralNetwork *first, NeuralNetwork *second)
2019-12-16 20:37:58 +03:00
{
NeuralNetwork temp;
temp = *first;
*first = *second;
*second = temp;
2019-12-18 23:05:18 +03:00
temp.Reset();
2019-12-16 20:37:58 +03:00
}
void Generation::DisplayScores(int index = 0)
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-16 20:37:58 +03:00
std::cout << "----Scores----\n";
2019-12-18 23:05:18 +03:00
for (i = 0; i < this -> size; i++)
2019-12-24 16:49:49 +03:00
std::cout << i << " -> " << (networks + i) -> GetError(index) << "\n";
2019-12-16 20:37:58 +03:00
}
2019-12-24 16:49:49 +03:00
void Generation::DisplayBest(int index = 0)
{
2019-12-24 18:57:26 +03:00
std::cout << "Target -> " << target << "\tBest -> " << networks -> GetPrediction(index) << "\n";
2019-12-24 16:49:49 +03:00
}
2019-12-24 18:57:26 +03:00
void Generation::UpdateScores()
2019-12-24 16:49:49 +03:00
{
2019-12-24 18:57:26 +03:00
float scoreToAdd;
int i;
for (i = 0; i < size; i++)
{
scoreToAdd = (networks + i) -> GetError(0, target);
(networks + i) -> SetScore((networks + i) -> GetScore() + scoreToAdd);
}
2019-12-24 16:49:49 +03:00
}
2019-12-24 18:57:26 +03:00
void Generation::ResetScores()
2019-12-24 16:49:49 +03:00
{
2019-12-24 18:57:26 +03:00
int i;
for (i = 0; i < size; i++)
(networks + i) -> SetScore(0.0);
2019-12-24 16:49:49 +03:00
}
2019-12-24 18:57:26 +03:00
float Generation::GetBestPrediction(int index = 0)
2019-12-16 20:37:58 +03:00
{
2019-12-24 18:57:26 +03:00
return networks -> GetPrediction(index);
}
float Generation::GetError(int index = 0)
{
return (networks + index) -> GetError(0, target);
2019-12-16 20:37:58 +03:00
}
2019-12-24 18:57:26 +03:00
void Generation::SortByScore()
2019-12-24 16:49:49 +03:00
{
int i;
int j;
for (i = 0; i < size - 1; i++)
for (j = i + 1; j < size; j++)
2019-12-24 18:57:26 +03:00
if((networks + i) -> GetScore() > (networks + j) -> GetScore())
2019-12-24 16:49:49 +03:00
_SwapNetworks((networks + i), (networks + j));
}
2019-12-16 20:37:58 +03:00
void Generation::SetTarget(float target)
{
this -> target = target;
}
void Generation::SetInput(float value, int index = 0)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
(networks + i) -> SetInput(value, index);
}
2019-12-24 16:49:49 +03:00
void Generation::WriteBestToFile()
{
WriteToFile(networks);
}
void Generation::NextGeneration()
{
int i = 2;
int crossOverCount = size * PopCrossOverRate;
if(i + crossOverCount >= size)
return;
NeuralNetwork *first = NULL;
NeuralNetwork *second = NULL;
Fire();
for (i = 2; i < crossOverCount; i+=2)
{
first = (networks + i);
second = (networks + i + 1);
first -> Copy(*(networks + 0));
second -> Copy(*(networks + 1));
if(RandomFloat(0, 1) < 0.5)
first -> CrossOverNetwork(second);
else
{
first -> MutateNetwork();
second -> MutateNetwork();
}
}
for (; i < size; i++)
(networks + i) -> RandomizeValues();
step++;
}
2019-12-16 20:37:58 +03:00
bool Generation::CreateNetworks(int size, int hiddenSizes)
{
if((networks = _CreateNetworks(size, hiddenSizes)))
this -> size = size;
return networks;
}
bool Generation::ConnectNetworks()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> ConnectLayers())
return false;
return true;
}
bool Generation::SetInputNeurons(int size)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> SetInputNeurons(size))
return false;
return true;
}
bool Generation::SetHiddenNeurons(int index, int size)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> SetHiddenNeurons(index, size))
return false;
return true;
}
bool Generation::SetOutputNeurons(int size)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> SetOutputNeurons(size))
return false;
return true;
}
2019-12-24 16:49:49 +03:00
int Generation::GetStep()
{
return step;
}
2019-12-16 20:37:58 +03:00
#pragma endregion
2019-12-24 16:49:49 +03:00
2019-12-18 23:05:18 +03:00
int main()
2019-12-15 16:55:17 +03:00
{
2019-12-24 16:49:49 +03:00
FILE *inputFile;
FILE *outputFile;
int decision;
float currentError;
int trainCounter;
int inputCounter;
int floatCounter;
2019-12-24 18:57:26 +03:00
int groupSize;
2019-12-24 16:49:49 +03:00
float trainInputs[30][5];
float testInputs[120][5];
Generation generation(50, 5);
2019-12-24 18:57:26 +03:00
2019-12-24 16:49:49 +03:00
inputFile = fopen("Data/train.data", "r");
for (inputCounter = 0; inputCounter < 30; inputCounter++)
for (floatCounter = 0; floatCounter < 5; floatCounter++)
fscanf(inputFile, "%f,", &trainInputs[inputCounter][floatCounter]);
fclose(inputFile);
2019-12-24 20:40:55 +03:00
2019-12-24 16:49:49 +03:00
inputFile = fopen("Data/test.data", "r");
2019-12-24 18:57:26 +03:00
for (inputCounter = 0; inputCounter < 120; inputCounter++)
2019-12-24 16:49:49 +03:00
for (floatCounter = 0; floatCounter < 5; floatCounter++)
fscanf(inputFile, "%f,", &testInputs[inputCounter][floatCounter]);
fclose(inputFile);
2019-12-24 20:40:55 +03:00
std::cout << "Inputs Are Getting Set: ";
std::cout << (generation.SetInputNeurons(4) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 1 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(0, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 2 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(1, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 3 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(2, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 4 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(3, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 5 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(4, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Outputs Are Getting Set: ";
std::cout << (generation.SetOutputNeurons(1) ? "Successfull!" : "Failed!") << "\n";
2019-12-24 16:49:49 +03:00
std::cout << "Networks Are Getting Connected: ";
2019-12-24 20:40:55 +03:00
std::cout << (generation.ConnectNetworks() ? "Successfull!" : "Failed!") << "\n";
2019-12-15 16:55:17 +03:00
2019-12-16 20:37:58 +03:00
generation.Randomize();
2019-12-18 23:05:18 +03:00
2019-12-24 16:49:49 +03:00
do
{
2019-12-24 20:40:55 +03:00
std::cout << "[-1] Test\n[-2] Best to File\n[-3] Exit\nAny Positive Number for train count\nDecision: ";
2019-12-24 16:49:49 +03:00
std::cin >> decision;
switch (decision)
{
case -3:
2019-12-24 20:40:55 +03:00
std::cout << "Exiting...\n";
break;
case -2:
2019-12-24 16:49:49 +03:00
generation.WriteBestToFile();
break;
default:
2019-12-24 20:40:55 +03:00
for (trainCounter = 0; trainCounter < decision; trainCounter++)
2019-12-24 16:49:49 +03:00
{
2019-12-24 20:40:55 +03:00
std::cout << (trainCounter + 1) << "\n";
2019-12-24 16:49:49 +03:00
for (inputCounter = 0; inputCounter < 10; inputCounter++)
{
2019-12-24 18:57:26 +03:00
generation.ResetScores();
for (groupSize = 0; groupSize < 3; groupSize++)
{
for (floatCounter = 0; floatCounter < 4; floatCounter++)
generation.SetInput(trainInputs[inputCounter * 3 + groupSize][floatCounter], floatCounter);
generation.SetTarget(trainInputs[inputCounter * 3 + groupSize][4]);
generation.Fire();
generation.UpdateScores();
}
generation.SortByScore();
generation.NextGeneration();
2019-12-24 16:49:49 +03:00
}
}
2019-12-24 20:40:55 +03:00
std::cout << "Best Score -> " << generation.GetBestPrediction() << "\n";
2019-12-24 16:49:49 +03:00
std::cout << "Train is Over!\n";
2019-12-24 20:40:55 +03:00
// break; To test it after the train is done
case -1:
2019-12-24 16:49:49 +03:00
outputFile = fopen("Data/results.data", "w");
for (inputCounter = 0; inputCounter < 120; inputCounter++)
{
for (floatCounter = 0; floatCounter < 4; floatCounter++)
generation.SetInput(testInputs[inputCounter][floatCounter], floatCounter);
generation.SetTarget(testInputs[inputCounter][4]);
generation.Fire();
2019-12-24 18:57:26 +03:00
currentError = testInputs[inputCounter][4] - generation.GetBestPrediction() < 0 ? generation.GetBestPrediction() - testInputs[inputCounter][4] : testInputs[inputCounter][4] - generation.GetBestPrediction();
fprintf(outputFile, "%f,%f,%f\n", testInputs[inputCounter][4], generation.GetBestPrediction(), currentError);
2019-12-24 16:49:49 +03:00
}
fclose(outputFile);
std::cout << "Test is Over!\n";
break;
}
2019-12-24 20:40:55 +03:00
} while (decision != -3);
2019-12-24 16:49:49 +03:00
2019-12-15 16:55:17 +03:00
return 0;
}