NeuralNetworkGenetic/Genetic.cpp

1015 lines
26 KiB
C++
Raw Normal View History

2019-12-24 21:36:20 +03:00
/*
Author: Asrın "Syntriax" Doğan
Mail: asrindogan99@gmail.com
*/
2019-12-15 16:55:17 +03:00
#include <iostream>
#include <time.h>
#define RandomRange 1
2019-12-16 20:37:58 +03:00
#define InitialSynapseValue 0.0
2019-12-24 18:57:26 +03:00
#define MutationRate 0.25
#define CrossOverRate 0.25
2019-12-24 21:36:20 +03:00
#define PopCrossOverRate 0.75
2019-12-15 16:55:17 +03:00
class Synapse;
class Neuron;
class Layer;
class Input;
class Output;
class NeuralNetwork;
2019-12-24 16:49:49 +03:00
class Generation;
2019-12-15 16:55:17 +03:00
2019-12-24 21:36:20 +03:00
double RandomDouble(int min, int max)
2019-12-15 16:55:17 +03:00
{
2019-12-24 21:36:20 +03:00
double result;
long int value;
2019-12-15 16:55:17 +03:00
static unsigned long int counter = time(0);
srand(time(0) * counter++);
2019-12-24 21:36:20 +03:00
value = ((rand() * counter) % ((max - min) * 100000000));
result = (double)value / 100000000.0 + (double)min;
2019-12-15 16:55:17 +03:00
return result;
}
#pragma region Synapse
class Synapse
{
private:
2019-12-24 21:36:20 +03:00
double weight;
double value;
double bias;
2019-12-15 16:55:17 +03:00
public:
Synapse();
2019-12-18 23:05:18 +03:00
~Synapse();
2019-12-24 21:36:20 +03:00
void SetValue(double);
void SetWeight(double);
void SetBias(double);
double GetWeight();
double GetValue();
double GetBias();
double Fire();
2019-12-15 16:55:17 +03:00
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Synapse::Synapse()
{
this -> value = this -> weight = this -> bias = InitialSynapseValue;
}
2019-12-18 23:05:18 +03:00
Synapse::~Synapse()
{
}
2019-12-24 21:36:20 +03:00
void Synapse::SetValue(double value)
2019-12-15 16:55:17 +03:00
{
this -> value = value;
}
2019-12-24 21:36:20 +03:00
void Synapse::SetWeight(double weight)
2019-12-15 16:55:17 +03:00
{
this -> weight = weight;
}
2019-12-24 21:36:20 +03:00
void Synapse::SetBias(double bias)
2019-12-15 16:55:17 +03:00
{
this -> bias = bias;
}
2019-12-24 21:36:20 +03:00
double Synapse::GetWeight()
2019-12-24 16:49:49 +03:00
{
return weight;
}
2019-12-24 21:36:20 +03:00
double Synapse::GetValue()
2019-12-24 16:49:49 +03:00
{
return value;
}
2019-12-24 21:36:20 +03:00
double Synapse::GetBias()
2019-12-24 16:49:49 +03:00
{
return bias;
}
2019-12-24 21:36:20 +03:00
double Synapse::Fire()
2019-12-15 16:55:17 +03:00
{
2019-12-24 21:36:20 +03:00
double result = 0.0;
2019-12-15 16:55:17 +03:00
result = this -> value * this -> weight + this -> bias;
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
return result;
}
#pragma endregion
#pragma region Neuron
class Neuron
{
private:
Synapse *incomings;
Synapse *forwards;
int incomingsSize;
int forwardsSize;
int layerSize;
public:
Neuron();
2019-12-18 23:05:18 +03:00
~Neuron();
2019-12-15 16:55:17 +03:00
void ConnectIncomings(Synapse *, int);
void ConnectForwards(Synapse *, int, int);
2019-12-24 21:36:20 +03:00
void SetValue(double);
2019-12-18 18:42:44 +03:00
void Reset();
2019-12-24 21:36:20 +03:00
double GetValue();
2019-12-15 16:55:17 +03:00
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Neuron::Neuron()
{
incomings = forwards = NULL;
incomingsSize = forwardsSize = layerSize = 0;
2019-12-18 23:05:18 +03:00
}
Neuron::~Neuron()
{
2019-12-15 16:55:17 +03:00
}
2019-12-16 20:37:58 +03:00
2019-12-18 18:42:44 +03:00
void Neuron::Reset()
{
incomings = forwards = NULL;
incomingsSize = forwardsSize = layerSize = 0;
}
2019-12-24 21:36:20 +03:00
void Neuron::SetValue(double value)
2019-12-15 16:55:17 +03:00
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < forwardsSize; i++)
2019-12-15 16:55:17 +03:00
(forwards + i) -> SetValue(value);
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
void Neuron::ConnectIncomings(Synapse *incomings, int incomingsSize)
{
this -> incomings = incomings;
this -> incomingsSize = incomingsSize;
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
void Neuron::ConnectForwards(Synapse *forwards, int forwardsSize, int layerSize)
{
this -> forwards = forwards;
this -> forwardsSize = forwardsSize;
this -> layerSize = layerSize;
}
2019-12-24 21:36:20 +03:00
double Neuron::GetValue()
2019-12-15 16:55:17 +03:00
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-24 21:36:20 +03:00
double result = 0.0;
2019-12-15 16:55:17 +03:00
if(!incomings) return result;
2019-12-18 23:05:18 +03:00
for (i = 0; i < incomingsSize; i++)
2019-12-15 16:55:17 +03:00
result += (incomings + i) -> Fire();
if(!forwards) return result;
2019-12-18 23:05:18 +03:00
for (i = 0; i < forwardsSize; i++)
2019-12-15 16:55:17 +03:00
(forwards + i * layerSize) -> SetValue(result);
return result;
}
#pragma endregion
#pragma region Layer
class Layer
{
protected:
Neuron *neurons;
Synapse *synapses;
int neuronSize;
int synapseSize;
2019-12-18 23:05:18 +03:00
void _SwapSynapses(Synapse *, Synapse *);
2019-12-15 16:55:17 +03:00
Neuron *_CreateNeurons(int);
2019-12-18 18:42:44 +03:00
Synapse *_CreateSynapses(int);
2019-12-15 16:55:17 +03:00
public:
Layer();
Layer(int);
~Layer();
2019-12-18 23:05:18 +03:00
void CopySynapses(Layer *);
2019-12-15 16:55:17 +03:00
void FireLayer();
void Mutate();
void RandomizeValues();
2019-12-18 23:05:18 +03:00
void CrossOverSynapses(Layer *);
2019-12-24 16:49:49 +03:00
friend void WriteToFile(NeuralNetwork *);
2019-12-15 16:55:17 +03:00
bool CreateNeurons(int);
bool ConnectPrevious(Layer *);
bool ConnectForwards(Layer *);
int GetSize();
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Layer::Layer()
{
neuronSize = synapseSize = 0;
neurons = NULL;
synapses = NULL;
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Layer::Layer(int size)
{
neuronSize = synapseSize = 0;
synapses = NULL;
neurons = _CreateNeurons(size);
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Layer::~Layer()
{
if(neurons) delete neurons;
if(synapses) delete synapses;
}
2019-12-18 23:05:18 +03:00
void Layer::_SwapSynapses(Synapse *first, Synapse *second)
{
Synapse temporary = Synapse();
temporary = *first;
*first = *second;
*second = temporary;
}
2019-12-15 16:55:17 +03:00
Neuron *Layer::_CreateNeurons(int size)
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
Neuron *newNeurons = NULL;
2019-12-18 23:05:18 +03:00
2019-12-18 18:42:44 +03:00
newNeurons = new Neuron[size];
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
if(newNeurons)
2019-12-18 23:05:18 +03:00
for (i = 0; i < size; i++)
2019-12-18 18:42:44 +03:00
(newNeurons + i) -> Reset();
2019-12-15 16:55:17 +03:00
return newNeurons;
}
2019-12-18 18:42:44 +03:00
Synapse *Layer::_CreateSynapses(int size)
{
Synapse *newSynapses = NULL;
newSynapses = new Synapse[size];
return newSynapses;
}
2019-12-18 23:05:18 +03:00
void Layer::CopySynapses(Layer *from)
{
int counter;
for (counter = 0; counter < this -> synapseSize; counter++)
*(synapses + counter) = *((from -> synapses) + counter);
}
2019-12-15 16:55:17 +03:00
void Layer::FireLayer()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < neuronSize; i++)
2019-12-15 16:55:17 +03:00
(neurons + i) -> GetValue();
}
void Layer::RandomizeValues()
{
2019-12-24 21:36:20 +03:00
double bias;
double weight;
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < synapseSize; i++)
2019-12-15 16:55:17 +03:00
{
2019-12-24 21:36:20 +03:00
bias = RandomDouble(-RandomRange, RandomRange);
weight = RandomDouble(-RandomRange, RandomRange);
2019-12-15 16:55:17 +03:00
(synapses + i) -> SetBias(bias);
(synapses + i) -> SetWeight(weight);
}
}
void Layer::Mutate()
{
2019-12-24 21:36:20 +03:00
double bias = 0.0;
double weight = 0.0;
double mutationValue = 0.0;
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
2019-12-18 23:05:18 +03:00
for (i = 0; i < synapseSize; i++)
2019-12-15 16:55:17 +03:00
{
2019-12-24 21:36:20 +03:00
mutationValue = RandomDouble(0, 1);
2019-12-15 16:55:17 +03:00
if(mutationValue <= MutationRate)
{
2019-12-24 21:36:20 +03:00
bias = RandomDouble(-RandomRange, RandomRange);
weight = RandomDouble(-RandomRange, RandomRange);
2019-12-15 16:55:17 +03:00
(synapses + i) -> SetBias(bias);
(synapses + i) -> SetWeight(weight);
}
}
}
2019-12-18 23:05:18 +03:00
void Layer::CrossOverSynapses(Layer *other)
{
int thisCounter;
for (thisCounter = 0; thisCounter < synapseSize; thisCounter++)
2019-12-24 21:36:20 +03:00
if(RandomDouble(0, 1) < CrossOverRate)
2019-12-18 23:05:18 +03:00
_SwapSynapses((synapses + thisCounter), (other -> synapses + thisCounter));
}
2019-12-15 16:55:17 +03:00
bool Layer::CreateNeurons(int size)
{
2019-12-18 23:05:18 +03:00
if((neurons = _CreateNeurons(size)))
2019-12-15 16:55:17 +03:00
neuronSize = size;
return neurons;
}
bool Layer::ConnectPrevious(Layer *previous)
{
int previousSize = previous -> GetSize();
int synapseCount = (this -> neuronSize) * previousSize;
2019-12-18 23:05:18 +03:00
int thisNeuron;
int prevNeuron;
2019-12-15 16:55:17 +03:00
Neuron *currentNeuron = NULL;
2019-12-18 23:05:18 +03:00
if(synapses)
{
delete synapses;
synapses = NULL;
}
2019-12-18 18:42:44 +03:00
synapses = _CreateSynapses(synapseCount);
2019-12-15 16:55:17 +03:00
if(!synapses) return false;
2019-12-18 23:05:18 +03:00
for (thisNeuron = 0; thisNeuron < this -> neuronSize; thisNeuron++)
2019-12-15 16:55:17 +03:00
{
2019-12-18 23:05:18 +03:00
for (prevNeuron = 0; prevNeuron < previousSize; prevNeuron++)
2019-12-15 16:55:17 +03:00
currentNeuron = (previous -> neurons) + prevNeuron;
currentNeuron = (neurons + thisNeuron);
currentNeuron -> ConnectIncomings((synapses + thisNeuron * previousSize), previousSize);
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
synapseSize = synapseCount;
return previous -> ConnectForwards(this);
}
bool Layer::ConnectForwards(Layer *forwards)
{
int forwardsSize = forwards -> neuronSize;
2019-12-18 23:05:18 +03:00
int thisNeuron;
int forwardNeuron;
2019-12-15 16:55:17 +03:00
Neuron *currentNeuron = NULL;
2019-12-16 20:37:58 +03:00
2019-12-18 23:05:18 +03:00
for (thisNeuron = 0; thisNeuron < this -> neuronSize; thisNeuron++)
2019-12-15 16:55:17 +03:00
{
currentNeuron = (neurons + thisNeuron);
2019-12-18 23:05:18 +03:00
for (forwardNeuron = 0; forwardNeuron < forwardsSize; forwardNeuron++)
2019-12-15 16:55:17 +03:00
currentNeuron -> ConnectForwards(forwards -> synapses + thisNeuron, forwardsSize, this -> neuronSize);
}
return true;
}
int Layer::GetSize()
{
return neuronSize;
}
#pragma region Input-Output
class Input : public Layer
{
public:
Input();
2019-12-24 21:36:20 +03:00
void SetValue(double, int);
2019-12-15 16:55:17 +03:00
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Input::Input() : Layer() {}
2019-12-24 21:36:20 +03:00
void Input::SetValue(double value, int index = 0)
2019-12-15 16:55:17 +03:00
{
if(index >= this -> neuronSize || index < 0)
return;
(neurons + index) -> SetValue(value);
}
class Output : public Layer
{
public:
Output();
2019-12-24 21:36:20 +03:00
double GetValue(int);
2019-12-15 16:55:17 +03:00
};
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
Output::Output() : Layer() {}
2019-12-24 21:36:20 +03:00
double Output::GetValue(int index = 0)
2019-12-15 16:55:17 +03:00
{
2019-12-24 21:36:20 +03:00
double result = 0.0;
2019-12-15 16:55:17 +03:00
if(index >= this -> neuronSize || index < 0)
return result;
result = (neurons + index) -> GetValue();
return result;
}
#pragma endregion
#pragma endregion
#pragma region NeuralNetwork
class NeuralNetwork
{
private:
Input *input;
Layer *hidden;
Output *output;
int hiddenSize;
2019-12-24 21:36:20 +03:00
double score;
2019-12-18 23:05:18 +03:00
Input *_CreateInput();
Layer *_CreateLayers(int);
Output *_CreateOutput();
2019-12-15 16:55:17 +03:00
public:
NeuralNetwork();
NeuralNetwork(int);
~NeuralNetwork();
2019-12-18 23:05:18 +03:00
void Copy(const NeuralNetwork &);
2019-12-15 16:55:17 +03:00
void FireNetwork();
void RandomizeValues();
void MutateNetwork();
2019-12-18 23:05:18 +03:00
void Reset();
void CrossOverNetwork(NeuralNetwork *);
2019-12-24 18:57:26 +03:00
friend void WriteToFile(NeuralNetwork *);
2019-12-15 16:55:17 +03:00
bool SetInputNeurons(int);
bool SetHiddenNeurons(int, int);
bool SetOutputNeurons(int);
bool ConnectLayers();
2019-12-18 18:42:44 +03:00
bool SetLayer(int);
2019-12-24 21:36:20 +03:00
double GetOutput(int);
double GetError(int, double);
double GetPrediction(int);
double GetScore();
2019-12-18 23:05:18 +03:00
int GetHiddenSize();
2019-12-24 21:36:20 +03:00
void SetScore(double);
void SetInput(double, int);
2019-12-15 16:55:17 +03:00
};
2019-12-16 20:37:58 +03:00
2019-12-18 23:05:18 +03:00
Input *NeuralNetwork::_CreateInput()
{
Input *newInputs = NULL;
newInputs = new Input();
return newInputs;
}
Layer *NeuralNetwork::_CreateLayers(int size)
{
Layer *newLayers = NULL;
newLayers = new Layer[size];
return newLayers;
}
Output *NeuralNetwork::_CreateOutput()
{
Output *newOutputs = NULL;
newOutputs = new Output();
return newOutputs;
}
2019-12-15 16:55:17 +03:00
NeuralNetwork::NeuralNetwork()
{
hiddenSize = 0;
input = NULL;
hidden = NULL;
output = NULL;
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
NeuralNetwork::NeuralNetwork(int hiddenSize)
{
this -> hiddenSize = hiddenSize;
2019-12-18 23:05:18 +03:00
input = _CreateInput();
hidden = _CreateLayers(hiddenSize);
output = _CreateOutput();
2019-12-15 16:55:17 +03:00
}
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
NeuralNetwork::~NeuralNetwork()
{
2019-12-24 16:49:49 +03:00
if(input) delete input;
if(hidden) delete hidden;
if(output) delete output;
2019-12-18 23:05:18 +03:00
}
void NeuralNetwork::Copy(const NeuralNetwork &parameter)
{
int i;
input -> CopySynapses(parameter.input);
for (i = 0; i < hiddenSize; i++)
(hidden + i) -> CopySynapses(parameter.hidden + i);
output -> CopySynapses(parameter.output);
2019-12-15 16:55:17 +03:00
}
void NeuralNetwork::FireNetwork()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
(hidden + i) -> FireLayer();
output -> FireLayer();
}
2019-12-18 23:05:18 +03:00
2019-12-15 16:55:17 +03:00
void NeuralNetwork::MutateNetwork()
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
input -> Mutate();
2019-12-18 23:05:18 +03:00
for (i = 0; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
(hidden + i) -> Mutate();
output -> Mutate();
}
2019-12-18 23:05:18 +03:00
void NeuralNetwork::CrossOverNetwork(NeuralNetwork *other)
{
int i;
input -> CrossOverSynapses(other -> input);
for (i = 0; i < hiddenSize; i++)
(hidden + i) -> CrossOverSynapses((other -> hidden) + i);
output -> CrossOverSynapses(other -> output);
}
2019-12-15 16:55:17 +03:00
void NeuralNetwork::RandomizeValues()
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
input -> RandomizeValues();
2019-12-18 23:05:18 +03:00
for (i = 0; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
(hidden + i) -> RandomizeValues();
output -> RandomizeValues();
}
2019-12-24 16:49:49 +03:00
void WriteToFile(NeuralNetwork *network)
{
int i;
int j;
Synapse *synapsePtr = network -> input -> synapses;
int count = network -> input -> synapseSize;
std::cout << count << "\n";
FILE *file = fopen("Data/BestSynapses.txt", "w");
for (i = 0; i < count; i++)
{
2019-12-24 21:36:20 +03:00
fprintf(file, "%lf, %lf, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
2019-12-24 16:49:49 +03:00
synapsePtr++;
}
for (j = 0; j < network -> hiddenSize; j++)
{
count = (network -> hidden + j) -> synapseSize;
std::cout << count << "\n";
synapsePtr = (network -> hidden + j) -> synapses;
for (i = 0; i < count; i++)
{
2019-12-24 21:36:20 +03:00
fprintf(file, "%lf, %lf, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
2019-12-24 16:49:49 +03:00
synapsePtr++;
}
}
synapsePtr = network -> output -> synapses;
count = network -> output -> synapseSize;
std::cout << count << "\n";
for (i = 0; i < count; i++)
{
2019-12-24 21:36:20 +03:00
fprintf(file, "%lf, %lf, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
2019-12-24 16:49:49 +03:00
synapsePtr++;
}
fclose(file);
}
2019-12-18 23:05:18 +03:00
void NeuralNetwork::Reset()
{
input = NULL;
hidden = NULL;
output = NULL;
}
2019-12-15 16:55:17 +03:00
bool NeuralNetwork::SetInputNeurons(int size)
{
return input -> CreateNeurons(size);
}
bool NeuralNetwork::SetHiddenNeurons(int index, int size)
{
return (hidden + index) -> CreateNeurons(size);
}
bool NeuralNetwork::SetOutputNeurons(int size)
{
return output -> CreateNeurons(size);
}
bool NeuralNetwork::ConnectLayers()
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-15 16:55:17 +03:00
if(!hidden -> ConnectPrevious(input))
return false;
2019-12-16 20:37:58 +03:00
2019-12-18 23:05:18 +03:00
for (i = 1; i < hiddenSize; i++)
2019-12-15 16:55:17 +03:00
if(!(hidden + i) -> ConnectPrevious((hidden + i - 1)))
return false;
2019-12-16 20:37:58 +03:00
if(!output -> ConnectPrevious((hidden + hiddenSize - 1)))
2019-12-15 16:55:17 +03:00
return false;
2019-12-16 20:37:58 +03:00
2019-12-15 16:55:17 +03:00
return true;
}
2019-12-16 20:37:58 +03:00
2019-12-18 18:42:44 +03:00
bool NeuralNetwork::SetLayer(int hiddenSize)
{
this -> hiddenSize = hiddenSize;
2019-12-18 23:05:18 +03:00
input = _CreateInput();
hidden = _CreateLayers(hiddenSize);
output = _CreateOutput();
return input && hidden && output;
2019-12-18 18:42:44 +03:00
}
2019-12-24 21:36:20 +03:00
double NeuralNetwork::GetOutput(int index = 0)
2019-12-15 16:55:17 +03:00
{
return output -> GetValue(index);
}
2019-12-16 20:37:58 +03:00
2019-12-24 21:36:20 +03:00
double NeuralNetwork::GetError(int index = 0, double target = 0.0)
2019-12-16 20:37:58 +03:00
{
2019-12-24 21:36:20 +03:00
double result = GetOutput(index) - target;
2019-12-16 20:37:58 +03:00
return result < 0.0 ? -result : result;
}
2019-12-24 16:49:49 +03:00
2019-12-24 21:36:20 +03:00
double NeuralNetwork::GetPrediction(int index = 0)
2019-12-24 16:49:49 +03:00
{
2019-12-24 21:36:20 +03:00
double result = GetOutput(index);
2019-12-24 16:49:49 +03:00
return result;
}
2019-12-24 21:36:20 +03:00
double NeuralNetwork::GetScore()
2019-12-24 18:57:26 +03:00
{
return score;
}
2019-12-18 23:05:18 +03:00
int NeuralNetwork::GetHiddenSize()
{
return hiddenSize;
}
2019-12-16 20:37:58 +03:00
2019-12-24 21:36:20 +03:00
void NeuralNetwork::SetInput(double value, int index = 0)
2019-12-15 16:55:17 +03:00
{
2019-12-16 20:37:58 +03:00
input -> SetValue(value, index);
2019-12-15 16:55:17 +03:00
}
2019-12-24 18:57:26 +03:00
2019-12-24 21:36:20 +03:00
void NeuralNetwork::SetScore(double value)
2019-12-24 18:57:26 +03:00
{
score = value;
}
2019-12-15 16:55:17 +03:00
#pragma endregion
2019-12-16 20:37:58 +03:00
#pragma region Generation
class Generation
{
private:
NeuralNetwork *networks;
int size;
int step;
2019-12-24 21:36:20 +03:00
double target;
2019-12-18 23:05:18 +03:00
void _SwapNetworks(NeuralNetwork *, NeuralNetwork *);
2019-12-16 20:37:58 +03:00
NeuralNetwork *_CreateNetworks(int, int);
public:
Generation();
Generation(int, int);
~Generation();
void Randomize();
void Fire();
2019-12-24 18:57:26 +03:00
void SortByScore();
2019-12-16 20:37:58 +03:00
void DisplayScores(int);
2019-12-24 16:49:49 +03:00
void DisplayBest(int);
2019-12-24 21:36:20 +03:00
void SetTarget(double);
void SetInput(double, int);
2019-12-24 16:49:49 +03:00
void NextGeneration();
void WriteBestToFile();
2019-12-24 18:57:26 +03:00
void UpdateScores();
void ResetScores();
2019-12-16 20:37:58 +03:00
bool CreateNetworks(int, int);
bool ConnectNetworks();
bool SetInputNeurons(int);
bool SetHiddenNeurons(int, int);
bool SetOutputNeurons(int);
2019-12-24 21:36:20 +03:00
double GetBestPrediction(int);
double GetError(int);
2019-12-24 16:49:49 +03:00
int GetStep();
2019-12-16 20:37:58 +03:00
};
Generation::Generation()
{
step = 0;
networks = NULL;
size = 0;
target = 0.0;
}
Generation::Generation(int size, int hiddenSizes)
{
step = 0;
target = 0.0;
this -> size = size;
networks = _CreateNetworks(size, hiddenSizes);
}
Generation::~Generation()
{
if(networks) delete networks;
}
NeuralNetwork *Generation::_CreateNetworks(int size, int hiddenSizes)
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-16 20:37:58 +03:00
NeuralNetwork *newNetworks = NULL;
2019-12-18 18:42:44 +03:00
newNetworks = new NeuralNetwork[size];
2019-12-15 16:55:17 +03:00
2019-12-16 20:37:58 +03:00
if(newNetworks)
2019-12-18 23:05:18 +03:00
for (i = 0; i < size; i++)
2019-12-18 18:42:44 +03:00
(newNetworks + i) -> SetLayer(hiddenSizes);
2019-12-16 20:37:58 +03:00
return newNetworks;
}
void Generation::Randomize()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
(networks + i) -> RandomizeValues();
}
void Generation::Fire()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
(networks + i) -> FireNetwork();
}
2019-12-18 23:05:18 +03:00
void Generation::_SwapNetworks(NeuralNetwork *first, NeuralNetwork *second)
2019-12-16 20:37:58 +03:00
{
NeuralNetwork temp;
temp = *first;
*first = *second;
*second = temp;
2019-12-18 23:05:18 +03:00
temp.Reset();
2019-12-16 20:37:58 +03:00
}
void Generation::DisplayScores(int index = 0)
{
2019-12-18 23:05:18 +03:00
int i;
2019-12-16 20:37:58 +03:00
std::cout << "----Scores----\n";
2019-12-18 23:05:18 +03:00
for (i = 0; i < this -> size; i++)
2019-12-24 16:49:49 +03:00
std::cout << i << " -> " << (networks + i) -> GetError(index) << "\n";
2019-12-16 20:37:58 +03:00
}
2019-12-24 16:49:49 +03:00
void Generation::DisplayBest(int index = 0)
{
2019-12-24 18:57:26 +03:00
std::cout << "Target -> " << target << "\tBest -> " << networks -> GetPrediction(index) << "\n";
2019-12-24 16:49:49 +03:00
}
2019-12-24 18:57:26 +03:00
void Generation::UpdateScores()
2019-12-24 16:49:49 +03:00
{
2019-12-24 21:36:20 +03:00
double scoreToAdd;
2019-12-24 18:57:26 +03:00
int i;
for (i = 0; i < size; i++)
{
scoreToAdd = (networks + i) -> GetError(0, target);
(networks + i) -> SetScore((networks + i) -> GetScore() + scoreToAdd);
}
2019-12-24 16:49:49 +03:00
}
2019-12-24 18:57:26 +03:00
void Generation::ResetScores()
2019-12-24 16:49:49 +03:00
{
2019-12-24 18:57:26 +03:00
int i;
for (i = 0; i < size; i++)
(networks + i) -> SetScore(0.0);
2019-12-24 16:49:49 +03:00
}
2019-12-24 18:57:26 +03:00
2019-12-24 21:36:20 +03:00
double Generation::GetBestPrediction(int index = 0)
2019-12-16 20:37:58 +03:00
{
2019-12-24 18:57:26 +03:00
return networks -> GetPrediction(index);
}
2019-12-24 21:36:20 +03:00
double Generation::GetError(int index = 0)
2019-12-24 18:57:26 +03:00
{
return (networks + index) -> GetError(0, target);
2019-12-16 20:37:58 +03:00
}
2019-12-24 21:36:20 +03:00
2019-12-24 18:57:26 +03:00
void Generation::SortByScore()
2019-12-24 16:49:49 +03:00
{
int i;
int j;
for (i = 0; i < size - 1; i++)
for (j = i + 1; j < size; j++)
2019-12-24 18:57:26 +03:00
if((networks + i) -> GetScore() > (networks + j) -> GetScore())
2019-12-24 16:49:49 +03:00
_SwapNetworks((networks + i), (networks + j));
}
2019-12-16 20:37:58 +03:00
2019-12-24 21:36:20 +03:00
void Generation::SetTarget(double target)
2019-12-16 20:37:58 +03:00
{
this -> target = target;
}
2019-12-24 21:36:20 +03:00
void Generation::SetInput(double value, int index = 0)
2019-12-16 20:37:58 +03:00
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
(networks + i) -> SetInput(value, index);
}
2019-12-24 16:49:49 +03:00
void Generation::WriteBestToFile()
{
WriteToFile(networks);
}
void Generation::NextGeneration()
{
int i = 2;
int crossOverCount = size * PopCrossOverRate;
if(i + crossOverCount >= size)
return;
NeuralNetwork *first = NULL;
NeuralNetwork *second = NULL;
Fire();
for (i = 2; i < crossOverCount; i+=2)
{
first = (networks + i);
second = (networks + i + 1);
first -> Copy(*(networks + 0));
second -> Copy(*(networks + 1));
2019-12-24 21:36:20 +03:00
if(RandomDouble(0, 1) < 0.5)
2019-12-24 16:49:49 +03:00
first -> CrossOverNetwork(second);
else
{
first -> MutateNetwork();
second -> MutateNetwork();
}
}
for (; i < size; i++)
(networks + i) -> RandomizeValues();
step++;
}
2019-12-16 20:37:58 +03:00
bool Generation::CreateNetworks(int size, int hiddenSizes)
{
if((networks = _CreateNetworks(size, hiddenSizes)))
this -> size = size;
return networks;
}
bool Generation::ConnectNetworks()
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> ConnectLayers())
return false;
return true;
}
bool Generation::SetInputNeurons(int size)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> SetInputNeurons(size))
return false;
return true;
}
bool Generation::SetHiddenNeurons(int index, int size)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> SetHiddenNeurons(index, size))
return false;
return true;
}
bool Generation::SetOutputNeurons(int size)
{
2019-12-18 23:05:18 +03:00
int i;
for (i = 0; i < this -> size; i++)
2019-12-16 20:37:58 +03:00
if(!(networks + i) -> SetOutputNeurons(size))
return false;
return true;
}
2019-12-24 21:36:20 +03:00
2019-12-24 16:49:49 +03:00
int Generation::GetStep()
{
return step;
}
2019-12-16 20:37:58 +03:00
#pragma endregion
2019-12-24 16:49:49 +03:00
2019-12-18 23:05:18 +03:00
int main()
2019-12-15 16:55:17 +03:00
{
2019-12-24 16:49:49 +03:00
FILE *inputFile;
FILE *outputFile;
int decision;
2019-12-24 21:36:20 +03:00
double currentError;
2019-12-24 16:49:49 +03:00
int trainCounter;
int inputCounter;
2019-12-24 21:36:20 +03:00
int doubleCounter;
2019-12-24 18:57:26 +03:00
int groupSize;
2019-12-24 21:36:20 +03:00
double trainInputs[30][5];
double testInputs[120][5];
2019-12-24 16:49:49 +03:00
Generation generation(50, 5);
2019-12-24 18:57:26 +03:00
2019-12-24 16:49:49 +03:00
inputFile = fopen("Data/train.data", "r");
for (inputCounter = 0; inputCounter < 30; inputCounter++)
2019-12-24 21:36:20 +03:00
for (doubleCounter = 0; doubleCounter < 5; doubleCounter++)
fscanf(inputFile, "%lf,", &trainInputs[inputCounter][doubleCounter]);
2019-12-24 16:49:49 +03:00
fclose(inputFile);
2019-12-24 20:40:55 +03:00
2019-12-24 16:49:49 +03:00
inputFile = fopen("Data/test.data", "r");
2019-12-24 18:57:26 +03:00
for (inputCounter = 0; inputCounter < 120; inputCounter++)
2019-12-24 21:36:20 +03:00
for (doubleCounter = 0; doubleCounter < 5; doubleCounter++)
fscanf(inputFile, "%lf,", &testInputs[inputCounter][doubleCounter]);
2019-12-24 16:49:49 +03:00
fclose(inputFile);
2019-12-24 20:40:55 +03:00
std::cout << "Inputs Are Getting Set: ";
2019-12-24 21:36:20 +03:00
std::cout << (generation.SetInputNeurons(4) ? "Successfull!" : "Failed!") << "\n";
2019-12-24 20:40:55 +03:00
std::cout << "Hidden 1 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(0, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 2 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(1, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 3 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(2, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 4 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(3, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Hidden 5 Are Getting Set: ";
std::cout << (generation.SetHiddenNeurons(4, 2) ? "Successfull!" : "Failed!") << "\n";
std::cout << "Outputs Are Getting Set: ";
2019-12-24 21:36:20 +03:00
std::cout << (generation.SetOutputNeurons(1) ? "Successfull!" : "Failed!") << "\n";
2019-12-24 16:49:49 +03:00
std::cout << "Networks Are Getting Connected: ";
2019-12-24 21:36:20 +03:00
std::cout << (generation.ConnectNetworks() ? "Successfull!" : "Failed!") << "\n";
2019-12-15 16:55:17 +03:00
2019-12-24 21:36:20 +03:00
std::cout << "Networks Are Getting Randomized: ";
2019-12-16 20:37:58 +03:00
generation.Randomize();
2019-12-24 21:36:20 +03:00
std::cout << "Done!\n";
2019-12-18 23:05:18 +03:00
2019-12-24 16:49:49 +03:00
do
{
2019-12-24 21:36:20 +03:00
std::cout << "\n[-1] Test\n[-2] Best to File\n[-3] Exit\nAny Positive Number for train count\nDecision: ";
2019-12-24 16:49:49 +03:00
std::cin >> decision;
switch (decision)
{
case -3:
2019-12-24 20:40:55 +03:00
std::cout << "Exiting...\n";
break;
case -2:
2019-12-24 16:49:49 +03:00
generation.WriteBestToFile();
break;
default:
2019-12-24 20:40:55 +03:00
for (trainCounter = 0; trainCounter < decision; trainCounter++)
2019-12-24 16:49:49 +03:00
{
2019-12-24 20:40:55 +03:00
std::cout << (trainCounter + 1) << "\n";
2019-12-24 16:49:49 +03:00
for (inputCounter = 0; inputCounter < 10; inputCounter++)
{
2019-12-24 18:57:26 +03:00
generation.ResetScores();
for (groupSize = 0; groupSize < 3; groupSize++)
{
2019-12-24 21:36:20 +03:00
for (doubleCounter = 0; doubleCounter < 4; doubleCounter++)
generation.SetInput(trainInputs[inputCounter * 3 + groupSize][doubleCounter], doubleCounter);
2019-12-24 18:57:26 +03:00
generation.SetTarget(trainInputs[inputCounter * 3 + groupSize][4]);
generation.Fire();
generation.UpdateScores();
}
generation.SortByScore();
generation.NextGeneration();
2019-12-24 16:49:49 +03:00
}
}
2019-12-24 20:40:55 +03:00
std::cout << "Best Score -> " << generation.GetBestPrediction() << "\n";
2019-12-24 16:49:49 +03:00
std::cout << "Train is Over!\n";
2019-12-24 20:40:55 +03:00
// break; To test it after the train is done
case -1:
2019-12-24 16:49:49 +03:00
outputFile = fopen("Data/results.data", "w");
for (inputCounter = 0; inputCounter < 120; inputCounter++)
{
2019-12-24 21:36:20 +03:00
for (doubleCounter = 0; doubleCounter < 4; doubleCounter++)
generation.SetInput(testInputs[inputCounter][doubleCounter], doubleCounter);
2019-12-24 16:49:49 +03:00
generation.SetTarget(testInputs[inputCounter][4]);
generation.Fire();
2019-12-24 18:57:26 +03:00
currentError = testInputs[inputCounter][4] - generation.GetBestPrediction() < 0 ? generation.GetBestPrediction() - testInputs[inputCounter][4] : testInputs[inputCounter][4] - generation.GetBestPrediction();
2019-12-24 21:36:20 +03:00
fprintf(outputFile, "%lf,%lf,%lf\n", testInputs[inputCounter][4], generation.GetBestPrediction(), currentError);
2019-12-24 16:49:49 +03:00
}
fclose(outputFile);
std::cout << "Test is Over!\n";
break;
}
2019-12-24 20:40:55 +03:00
} while (decision != -3);
2019-12-24 16:49:49 +03:00
2019-12-15 16:55:17 +03:00
return 0;
}