Changed Genetic.cpp to hpp
This commit is contained in:
parent
ce969af2df
commit
f0bf9192dc
|
@ -1,3 +1,3 @@
|
|||
*.*
|
||||
!main.cpp
|
||||
!Genetic.cpp
|
||||
!Genetic.hpp
|
||||
|
|
|
@ -1,15 +1,11 @@
|
|||
/*
|
||||
Author: Asrın "Syntriax" Doğan
|
||||
Mail: asrindogan99@gmail.com
|
||||
*/
|
||||
#include <iostream>
|
||||
#include <time.h>
|
||||
|
||||
#define RandomRange 1
|
||||
#define InitialSynapseValue 0.0
|
||||
#define MutationRate 0.25
|
||||
#define MutationRate 0.15
|
||||
#define CrossOverRate 0.25
|
||||
#define PopCrossOverRate 0.75
|
||||
#define PopCrossOverRate 0.5
|
||||
|
||||
class Synapse;
|
||||
class Neuron;
|
||||
|
@ -192,6 +188,7 @@ double RandomDouble(int min, int max)
|
|||
void Mutate();
|
||||
void RandomizeValues();
|
||||
void CrossOverSynapses(Layer *);
|
||||
friend void LoadFromFile(NeuralNetwork *, char *);
|
||||
friend void WriteToFile(NeuralNetwork *);
|
||||
bool CreateNeurons(int);
|
||||
bool ConnectPrevious(Layer *);
|
||||
|
@ -283,6 +280,7 @@ double RandomDouble(int min, int max)
|
|||
double bias = 0.0;
|
||||
double weight = 0.0;
|
||||
double mutationValue = 0.0;
|
||||
bool isMutated = false;
|
||||
int i;
|
||||
|
||||
for (i = 0; i < synapseSize; i++)
|
||||
|
@ -290,12 +288,16 @@ double RandomDouble(int min, int max)
|
|||
mutationValue = RandomDouble(0, 1);
|
||||
if(mutationValue <= MutationRate)
|
||||
{
|
||||
isMutated = true;
|
||||
bias = RandomDouble(-RandomRange, RandomRange);
|
||||
weight = RandomDouble(-RandomRange, RandomRange);
|
||||
(synapses + i) -> SetBias(bias);
|
||||
(synapses + i) -> SetWeight(weight);
|
||||
}
|
||||
}
|
||||
|
||||
if(!isMutated && synapseSize != 0)
|
||||
Mutate();
|
||||
}
|
||||
|
||||
void Layer::CrossOverSynapses(Layer *other)
|
||||
|
@ -422,6 +424,7 @@ double RandomDouble(int min, int max)
|
|||
void MutateNetwork();
|
||||
void Reset();
|
||||
void CrossOverNetwork(NeuralNetwork *);
|
||||
friend void LoadFromFile(NeuralNetwork *, char *);
|
||||
friend void WriteToFile(NeuralNetwork *);
|
||||
bool SetInputNeurons(int);
|
||||
bool SetHiddenNeurons(int, int);
|
||||
|
@ -548,22 +551,20 @@ double RandomDouble(int min, int max)
|
|||
int j;
|
||||
Synapse *synapsePtr = network -> input -> synapses;
|
||||
int count = network -> input -> synapseSize;
|
||||
std::cout << count << "\n";
|
||||
FILE *file = fopen("Data/BestSynapses.txt", "w");
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
fprintf(file, "%lf, %lf, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
|
||||
fprintf(file, "%f, %f, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
|
||||
synapsePtr++;
|
||||
}
|
||||
|
||||
for (j = 0; j < network -> hiddenSize; j++)
|
||||
{
|
||||
count = (network -> hidden + j) -> synapseSize;
|
||||
std::cout << count << "\n";
|
||||
synapsePtr = (network -> hidden + j) -> synapses;
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
fprintf(file, "%lf, %lf, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
|
||||
fprintf(file, "%f, %f, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
|
||||
synapsePtr++;
|
||||
}
|
||||
}
|
||||
|
@ -571,10 +572,9 @@ double RandomDouble(int min, int max)
|
|||
|
||||
synapsePtr = network -> output -> synapses;
|
||||
count = network -> output -> synapseSize;
|
||||
std::cout << count << "\n";
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
fprintf(file, "%lf, %lf, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
|
||||
fprintf(file, "%f, %f, ", synapsePtr -> GetWeight(), synapsePtr -> GetBias());
|
||||
synapsePtr++;
|
||||
}
|
||||
fclose(file);
|
||||
|
@ -587,6 +587,49 @@ double RandomDouble(int min, int max)
|
|||
output = NULL;
|
||||
}
|
||||
|
||||
void LoadFromFile(NeuralNetwork *network, char *filePath)
|
||||
{
|
||||
int i;
|
||||
int j;
|
||||
float readWeight;
|
||||
float readBias;
|
||||
Synapse *synapsePtr = network -> input -> synapses;
|
||||
int count = network -> input -> synapseSize;
|
||||
FILE *file = fopen(filePath, "r");
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
fscanf(file, "%f, %f, ", &readWeight, &readBias);
|
||||
synapsePtr -> SetWeight(readWeight);
|
||||
synapsePtr -> SetBias(readBias);
|
||||
synapsePtr++;
|
||||
}
|
||||
|
||||
for (j = 0; j < network -> hiddenSize; j++)
|
||||
{
|
||||
count = (network -> hidden + j) -> synapseSize;
|
||||
synapsePtr = (network -> hidden + j) -> synapses;
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
fscanf(file, "%f, %f, ", &readWeight, &readBias);
|
||||
synapsePtr -> SetWeight(readWeight);
|
||||
synapsePtr -> SetBias(readBias);
|
||||
synapsePtr++;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
synapsePtr = network -> output -> synapses;
|
||||
count = network -> output -> synapseSize;
|
||||
for (i = 0; i < count; i++)
|
||||
{
|
||||
fscanf(file, "%f, %f, ", &readWeight, &readBias);
|
||||
synapsePtr -> SetWeight(readWeight);
|
||||
synapsePtr -> SetBias(readBias);
|
||||
synapsePtr++;
|
||||
}
|
||||
fclose(file);
|
||||
}
|
||||
|
||||
bool NeuralNetwork::SetInputNeurons(int size)
|
||||
{
|
||||
return input -> CreateNeurons(size);
|
||||
|
@ -690,6 +733,7 @@ double RandomDouble(int min, int max)
|
|||
void WriteBestToFile();
|
||||
void UpdateScores(int);
|
||||
void ResetScores();
|
||||
void LoadBestFromFile(char *);
|
||||
bool CreateNetworks(int, int);
|
||||
bool ConnectNetworks();
|
||||
bool SetInputNeurons(int);
|
||||
|
@ -859,6 +903,14 @@ double RandomDouble(int min, int max)
|
|||
step++;
|
||||
}
|
||||
|
||||
void Generation::LoadBestFromFile(char *filePath)
|
||||
{
|
||||
LoadFromFile(networks, filePath);
|
||||
LoadFromFile(networks + 1, filePath);
|
||||
this -> NextGeneration();
|
||||
}
|
||||
|
||||
|
||||
bool Generation::CreateNetworks(int size, int hiddenSizes)
|
||||
{
|
||||
if((networks = _CreateNetworks(size, hiddenSizes)))
|
||||
|
@ -908,108 +960,3 @@ double RandomDouble(int min, int max)
|
|||
return step;
|
||||
}
|
||||
#pragma endregion
|
||||
|
||||
int main()
|
||||
{
|
||||
FILE *inputFile;
|
||||
FILE *outputFile;
|
||||
int decision;
|
||||
|
||||
int trainCounter;
|
||||
int inputCounter;
|
||||
int doubleCounter;
|
||||
int groupCounter;
|
||||
|
||||
double trainInputs[30][5];
|
||||
double testInputs[120][5];
|
||||
double currentError;
|
||||
Generation generation(50, 5);
|
||||
|
||||
inputFile = fopen("Data/train.data", "r");
|
||||
for (inputCounter = 0; inputCounter < 30; inputCounter++)
|
||||
for (doubleCounter = 0; doubleCounter < 5; doubleCounter++)
|
||||
fscanf(inputFile, "%lf,", &trainInputs[inputCounter][doubleCounter]);
|
||||
fclose(inputFile);
|
||||
|
||||
inputFile = fopen("Data/test.data", "r");
|
||||
for (inputCounter = 0; inputCounter < 120; inputCounter++)
|
||||
for (doubleCounter = 0; doubleCounter < 5; doubleCounter++)
|
||||
fscanf(inputFile, "%lf,", &testInputs[inputCounter][doubleCounter]);
|
||||
fclose(inputFile);
|
||||
|
||||
std::cout << "Inputs Are Getting Set: ";
|
||||
std::cout << (generation.SetInputNeurons(4) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 1 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(0, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 2 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(1, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 3 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(2, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 4 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(3, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 5 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(4, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Outputs Are Getting Set: ";
|
||||
std::cout << (generation.SetOutputNeurons(1) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Networks Are Getting Connected: ";
|
||||
std::cout << (generation.ConnectNetworks() ? "Successfull!" : "Failed!") << "\n";
|
||||
|
||||
std::cout << "Networks Are Getting Randomized: ";
|
||||
generation.Randomize();
|
||||
std::cout << "Done!\n";
|
||||
|
||||
do
|
||||
{
|
||||
std::cout << "\n[-1] Test\n[-2] Best to File\n[-3] Exit\nAny Positive Number for train count\nDecision: ";
|
||||
std::cin >> decision;
|
||||
|
||||
switch (decision)
|
||||
{
|
||||
case -3:
|
||||
std::cout << "Exiting...\n";
|
||||
break;
|
||||
case -2:
|
||||
generation.WriteBestToFile();
|
||||
break;
|
||||
default:
|
||||
for (trainCounter = 0; trainCounter < decision; trainCounter++)
|
||||
{
|
||||
std::cout << (trainCounter + 1) << "\n";
|
||||
for (inputCounter = 0; inputCounter < 10; inputCounter++)
|
||||
{
|
||||
generation.ResetScores();
|
||||
for (groupCounter = 0; groupCounter < 3; groupCounter++)
|
||||
{
|
||||
for (doubleCounter = 0; doubleCounter < 4; doubleCounter++)
|
||||
generation.SetInput(trainInputs[inputCounter * 3 + groupCounter][doubleCounter], doubleCounter);
|
||||
generation.SetTarget(trainInputs[inputCounter * 3 + groupCounter][4]);
|
||||
generation.Fire();
|
||||
generation.UpdateScores();
|
||||
}
|
||||
generation.SortByScore();
|
||||
generation.NextGeneration();
|
||||
}
|
||||
}
|
||||
std::cout << "Best Score -> " << generation.GetPredictionOfBestNetwork() << "\n";
|
||||
std::cout << "Train is Over!\n";
|
||||
// break; To test it after the train is done
|
||||
case -1:
|
||||
outputFile = fopen("Data/results.data", "w");
|
||||
for (inputCounter = 0; inputCounter < 120; inputCounter++)
|
||||
{
|
||||
for (doubleCounter = 0; doubleCounter < 4; doubleCounter++)
|
||||
generation.SetInput(testInputs[inputCounter][doubleCounter], doubleCounter);
|
||||
generation.SetTarget(testInputs[inputCounter][4]);
|
||||
|
||||
generation.Fire();
|
||||
currentError = testInputs[inputCounter][4] - generation.GetPredictionOfBestNetwork() < 0 ? generation.GetPredictionOfBestNetwork() - testInputs[inputCounter][4] : testInputs[inputCounter][4] - generation.GetPredictionOfBestNetwork();
|
||||
fprintf(outputFile, "%lf,%lf,%lf\n", testInputs[inputCounter][4], generation.GetPredictionOfBestNetwork(), currentError);
|
||||
}
|
||||
fclose(outputFile);
|
||||
std::cout << "Test is Over!\n";
|
||||
break;
|
||||
}
|
||||
} while (decision != -3);
|
||||
|
||||
return 0;
|
||||
}
|
477
main.cpp
477
main.cpp
|
@ -1,391 +1,106 @@
|
|||
#include <iostream>
|
||||
#include <time.h>
|
||||
#include "Genetic.hpp"
|
||||
|
||||
#define InitialSynapseValue 1.0
|
||||
|
||||
class Synapse;
|
||||
class Neuron;
|
||||
class Layer;
|
||||
class Input;
|
||||
class Output;
|
||||
class NeuralNetwork;
|
||||
|
||||
#pragma region Synapse
|
||||
class Synapse
|
||||
{
|
||||
private:
|
||||
float weight;
|
||||
float value;
|
||||
float bias;
|
||||
public:
|
||||
Synapse();
|
||||
void SetValue(float);
|
||||
void SetWeight(float);
|
||||
void SetBias(float);
|
||||
float Fire();
|
||||
};
|
||||
|
||||
Synapse::Synapse()
|
||||
{
|
||||
this -> value = this -> weight = this -> bias = InitialSynapseValue;
|
||||
}
|
||||
|
||||
void Synapse::SetValue(float value)
|
||||
{
|
||||
this -> value = value;
|
||||
}
|
||||
|
||||
void Synapse::SetWeight(float weight)
|
||||
{
|
||||
this -> weight = weight;
|
||||
}
|
||||
|
||||
void Synapse::SetBias(float bias)
|
||||
{
|
||||
this -> bias = bias;
|
||||
}
|
||||
|
||||
float Synapse::Fire()
|
||||
{
|
||||
float result = 0.0;
|
||||
|
||||
result = this -> value * this -> weight + this -> bias;
|
||||
|
||||
return result;
|
||||
}
|
||||
#pragma endregion
|
||||
#pragma region Neuron
|
||||
class Neuron
|
||||
{
|
||||
private:
|
||||
Synapse *incomings;
|
||||
Synapse *forwards;
|
||||
int incomingsSize;
|
||||
int forwardsSize;
|
||||
int layerSize;
|
||||
public:
|
||||
Neuron();
|
||||
void ConnectIncomings(Synapse *, int);
|
||||
void ConnectForwards(Synapse *, int, int);
|
||||
void SetValue(float);
|
||||
float GetValue();
|
||||
};
|
||||
|
||||
Neuron::Neuron()
|
||||
{
|
||||
incomings = forwards = NULL;
|
||||
incomingsSize = forwardsSize = layerSize = 0;
|
||||
}
|
||||
|
||||
void Neuron::SetValue(float value)
|
||||
{
|
||||
for (int i = 0; i < forwardsSize; i++)
|
||||
(forwards + i) -> SetValue(value);
|
||||
}
|
||||
|
||||
void Neuron::ConnectIncomings(Synapse *incomings, int incomingsSize)
|
||||
{
|
||||
this -> incomings = incomings;
|
||||
this -> incomingsSize = incomingsSize;
|
||||
}
|
||||
|
||||
void Neuron::ConnectForwards(Synapse *forwards, int forwardsSize, int layerSize)
|
||||
{
|
||||
this -> forwards = forwards;
|
||||
this -> forwardsSize = forwardsSize;
|
||||
this -> layerSize = layerSize;
|
||||
}
|
||||
|
||||
float Neuron::GetValue()
|
||||
{
|
||||
float result = 0.0;
|
||||
|
||||
if(!incomings) return result;
|
||||
|
||||
for (int i = 0; i < incomingsSize; i++)
|
||||
result += (incomings + i) -> Fire();
|
||||
|
||||
|
||||
if(!forwards) return result;
|
||||
|
||||
for (int i = 0; i < forwardsSize; i++)
|
||||
(forwards + i * layerSize) -> SetValue(result);
|
||||
|
||||
return result;
|
||||
}
|
||||
#pragma endregion
|
||||
#pragma region Layer
|
||||
class Layer
|
||||
{
|
||||
protected:
|
||||
Neuron *neurons;
|
||||
Synapse *synapses;
|
||||
int neuronSize;
|
||||
int synapseSize;
|
||||
Neuron *_CreateNeurons(int);
|
||||
public:
|
||||
Layer();
|
||||
Layer(int);
|
||||
~Layer();
|
||||
void FireLayer();
|
||||
bool CreateNeurons(int);
|
||||
bool ConnectPrevious(Layer *);
|
||||
bool ConnectForwards(Layer *);
|
||||
int GetSize();
|
||||
};
|
||||
|
||||
Layer::Layer()
|
||||
{
|
||||
neuronSize = synapseSize = 0;
|
||||
neurons = NULL;
|
||||
synapses = NULL;
|
||||
}
|
||||
|
||||
Layer::Layer(int size)
|
||||
{
|
||||
neuronSize = synapseSize = 0;
|
||||
synapses = NULL;
|
||||
neurons = _CreateNeurons(size);
|
||||
}
|
||||
|
||||
Layer::~Layer()
|
||||
{
|
||||
if(neurons) delete neurons;
|
||||
if(synapses) delete synapses;
|
||||
}
|
||||
|
||||
Neuron *Layer::_CreateNeurons(int size)
|
||||
{
|
||||
Neuron *newNeurons = NULL;
|
||||
newNeurons = (Neuron *) new char[sizeof(Neuron) * size];
|
||||
|
||||
if(newNeurons)
|
||||
for (int i = 0; i < size; i++)
|
||||
*(newNeurons + i) = Neuron();
|
||||
|
||||
return newNeurons;
|
||||
}
|
||||
|
||||
void Layer::FireLayer()
|
||||
{
|
||||
for (int i = 0; i < neuronSize; i++)
|
||||
(neurons + i) -> GetValue();
|
||||
}
|
||||
|
||||
bool Layer::CreateNeurons(int size)
|
||||
{
|
||||
if(neurons = _CreateNeurons(size))
|
||||
neuronSize = size;
|
||||
return neurons;
|
||||
}
|
||||
|
||||
bool Layer::ConnectPrevious(Layer *previous)
|
||||
{
|
||||
int previousSize = previous -> GetSize();
|
||||
int synapseCount = (this -> neuronSize) * previousSize;
|
||||
int currentIndex = 0;
|
||||
Synapse *currentSynapse = NULL;
|
||||
Neuron *currentNeuron = NULL;
|
||||
|
||||
if(synapses) delete synapses;
|
||||
synapses = (Synapse *) new char[sizeof(Synapse) * synapseCount];
|
||||
if(!synapses) return false;
|
||||
|
||||
for (int thisNeuron = 0; thisNeuron < this -> neuronSize; thisNeuron++)
|
||||
{
|
||||
for (int prevNeuron = 0; prevNeuron < previousSize; prevNeuron++)
|
||||
{
|
||||
currentIndex = thisNeuron * previousSize + prevNeuron;
|
||||
currentSynapse = (synapses + currentIndex);
|
||||
currentNeuron = (previous -> neurons) + prevNeuron;
|
||||
|
||||
*currentSynapse = Synapse();
|
||||
}
|
||||
|
||||
currentNeuron = (neurons + thisNeuron);
|
||||
currentNeuron -> ConnectIncomings((synapses + thisNeuron * previousSize), previousSize);
|
||||
}
|
||||
|
||||
synapseSize = synapseCount;
|
||||
return previous -> ConnectForwards(this);
|
||||
}
|
||||
|
||||
bool Layer::ConnectForwards(Layer *forwards)
|
||||
{
|
||||
int forwardsSize = forwards -> neuronSize;
|
||||
Neuron *currentNeuron = NULL;
|
||||
|
||||
for (int thisNeuron = 0; thisNeuron < this -> neuronSize; thisNeuron++)
|
||||
{
|
||||
currentNeuron = (neurons + thisNeuron);
|
||||
for (int forwardNeuron = 0; forwardNeuron < forwardsSize; forwardNeuron++)
|
||||
currentNeuron -> ConnectForwards(forwards -> synapses + thisNeuron, forwardsSize, this -> neuronSize);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
int Layer::GetSize()
|
||||
{
|
||||
return neuronSize;
|
||||
}
|
||||
#pragma region Input-Output
|
||||
class Input : public Layer
|
||||
{
|
||||
public:
|
||||
Input();
|
||||
void SetValue(int, float);
|
||||
};
|
||||
|
||||
Input::Input() : Layer() {}
|
||||
void Input::SetValue(int index, float value)
|
||||
{
|
||||
if(index >= this -> neuronSize || index < 0)
|
||||
return;
|
||||
|
||||
(neurons + index) -> SetValue(value);
|
||||
}
|
||||
|
||||
class Output : public Layer
|
||||
{
|
||||
public:
|
||||
Output();
|
||||
float GetValue(int);
|
||||
};
|
||||
|
||||
Output::Output() : Layer() {}
|
||||
float Output::GetValue(int index)
|
||||
{
|
||||
float result = 0.0;
|
||||
|
||||
if(index >= this -> neuronSize || index < 0)
|
||||
return result;
|
||||
|
||||
result = (neurons + index) -> GetValue();
|
||||
return result;
|
||||
}
|
||||
#pragma endregion
|
||||
#pragma endregion
|
||||
#pragma region NeuralNetwork
|
||||
class NeuralNetwork
|
||||
{
|
||||
private:
|
||||
Input *input;
|
||||
Layer *hidden;
|
||||
Output *output;
|
||||
int hiddenSize;
|
||||
public:
|
||||
NeuralNetwork();
|
||||
NeuralNetwork(int);
|
||||
~NeuralNetwork();
|
||||
void FireNetwork();
|
||||
bool SetInputNeurons(int);
|
||||
bool SetHiddenNeurons(int, int);
|
||||
bool SetOutputNeurons(int);
|
||||
bool ConnectLayers();
|
||||
float GetOutput(int);
|
||||
void SetInput(int, float);
|
||||
};
|
||||
|
||||
NeuralNetwork::NeuralNetwork()
|
||||
{
|
||||
hiddenSize = 0;
|
||||
input = NULL;
|
||||
hidden = NULL;
|
||||
output = NULL;
|
||||
}
|
||||
|
||||
NeuralNetwork::NeuralNetwork(int hiddenSize)
|
||||
{
|
||||
this -> hiddenSize = hiddenSize;
|
||||
input = new Input();
|
||||
hidden = new Layer(hiddenSize);
|
||||
output = new Output();
|
||||
}
|
||||
|
||||
NeuralNetwork::~NeuralNetwork()
|
||||
{
|
||||
if(input) delete input;
|
||||
if(hidden) delete hidden;
|
||||
if(output) delete output;
|
||||
}
|
||||
|
||||
void NeuralNetwork::FireNetwork()
|
||||
{
|
||||
for (int i = 0; i < hiddenSize; i++)
|
||||
(hidden + i) -> FireLayer();
|
||||
|
||||
output -> FireLayer();
|
||||
}
|
||||
|
||||
bool NeuralNetwork::SetInputNeurons(int size)
|
||||
{
|
||||
return input -> CreateNeurons(size);
|
||||
}
|
||||
|
||||
bool NeuralNetwork::SetHiddenNeurons(int index, int size)
|
||||
{
|
||||
return (hidden + index) -> CreateNeurons(size);
|
||||
}
|
||||
|
||||
bool NeuralNetwork::SetOutputNeurons(int size)
|
||||
{
|
||||
return output -> CreateNeurons(size);
|
||||
}
|
||||
|
||||
bool NeuralNetwork::ConnectLayers()
|
||||
{
|
||||
if(!hidden -> ConnectPrevious(input))
|
||||
return false;
|
||||
|
||||
for (int i = 1; i < hiddenSize; i++)
|
||||
if(!(hidden + i) -> ConnectPrevious((hidden + i - 1)))
|
||||
return false;
|
||||
|
||||
if(output -> ConnectPrevious((hidden + hiddenSize - 1)))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
float NeuralNetwork::GetOutput(int index)
|
||||
{
|
||||
return output -> GetValue(index);
|
||||
}
|
||||
|
||||
void NeuralNetwork::SetInput(int index, float value)
|
||||
{
|
||||
input -> SetValue(index, value);
|
||||
}
|
||||
#pragma endregion
|
||||
|
||||
|
||||
int main(int argc, char const *argv[])
|
||||
int main()
|
||||
{
|
||||
NeuralNetwork network(3);
|
||||
FILE *inputFile;
|
||||
FILE *outputFile;
|
||||
int decision;
|
||||
|
||||
#pragma region Initialization
|
||||
network.SetInputNeurons(1);
|
||||
network.SetHiddenNeurons(0, 2);
|
||||
network.SetHiddenNeurons(1, 3);
|
||||
network.SetHiddenNeurons(2, 2);
|
||||
network.SetOutputNeurons(1);
|
||||
int trainCounter;
|
||||
int inputCounter;
|
||||
int doubleCounter;
|
||||
int groupCounter;
|
||||
|
||||
network.ConnectLayers();
|
||||
#pragma endregion
|
||||
double trainInputs[30][5];
|
||||
double testInputs[120][5];
|
||||
double currentError;
|
||||
Generation generation(50, 5);
|
||||
|
||||
#pragma region Fixed Bias&Weight
|
||||
network.SetInput(0, 1);
|
||||
network.FireNetwork();
|
||||
std::cout << "Result = " << network.GetOutput(0) << "\n";
|
||||
inputFile = fopen("Data/train.data", "r");
|
||||
for (inputCounter = 0; inputCounter < 30; inputCounter++)
|
||||
for (doubleCounter = 0; doubleCounter < 5; doubleCounter++)
|
||||
fscanf(inputFile, "%lf,", &trainInputs[inputCounter][doubleCounter]);
|
||||
fclose(inputFile);
|
||||
|
||||
network.SetInput(0, 2);
|
||||
network.FireNetwork();
|
||||
std::cout << "Result = " << network.GetOutput(0) << "\n";
|
||||
inputFile = fopen("Data/test.data", "r");
|
||||
for (inputCounter = 0; inputCounter < 120; inputCounter++)
|
||||
for (doubleCounter = 0; doubleCounter < 5; doubleCounter++)
|
||||
fscanf(inputFile, "%lf,", &testInputs[inputCounter][doubleCounter]);
|
||||
fclose(inputFile);
|
||||
|
||||
network.SetInput(0, 3);
|
||||
network.FireNetwork();
|
||||
std::cout << "Result = " << network.GetOutput(0) << "\n";
|
||||
#pragma endregion
|
||||
std::cout << "Inputs Are Getting Set: ";
|
||||
std::cout << (generation.SetInputNeurons(4) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 1 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(0, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 2 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(1, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 3 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(2, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 4 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(3, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Hidden 5 Are Getting Set: ";
|
||||
std::cout << (generation.SetHiddenNeurons(4, 2) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Outputs Are Getting Set: ";
|
||||
std::cout << (generation.SetOutputNeurons(1) ? "Successfull!" : "Failed!") << "\n";
|
||||
std::cout << "Networks Are Getting Connected: ";
|
||||
std::cout << (generation.ConnectNetworks() ? "Successfull!" : "Failed!") << "\n";
|
||||
|
||||
std::cout << "Networks Are Getting Randomized: ";
|
||||
generation.Randomize();
|
||||
std::cout << "Done!\n";
|
||||
|
||||
do
|
||||
{
|
||||
std::cout << "\n[-1] Test\n[-2] Best to File\n[-3] Exit\nAny Positive Number for train count\nDecision: ";
|
||||
std::cin >> decision;
|
||||
|
||||
switch (decision)
|
||||
{
|
||||
case -3:
|
||||
std::cout << "Exiting...\n";
|
||||
break;
|
||||
case -2:
|
||||
generation.WriteBestToFile();
|
||||
break;
|
||||
default:
|
||||
for (trainCounter = 0; trainCounter < decision; trainCounter++)
|
||||
{
|
||||
std::cout << (trainCounter + 1) << "\n";
|
||||
for (inputCounter = 0; inputCounter < 10; inputCounter++)
|
||||
{
|
||||
generation.ResetScores();
|
||||
for (groupCounter = 0; groupCounter < 3; groupCounter++)
|
||||
{
|
||||
for (doubleCounter = 0; doubleCounter < 4; doubleCounter++)
|
||||
generation.SetInput(trainInputs[inputCounter * 3 + groupCounter][doubleCounter], doubleCounter);
|
||||
generation.SetTarget(trainInputs[inputCounter * 3 + groupCounter][4]);
|
||||
generation.Fire();
|
||||
generation.UpdateScores();
|
||||
}
|
||||
generation.SortByScore();
|
||||
generation.NextGeneration();
|
||||
}
|
||||
}
|
||||
std::cout << "Best Score -> " << generation.GetPredictionOfBestNetwork() << "\n";
|
||||
std::cout << "Train is Over!\n";
|
||||
// break; To test it after the train is done
|
||||
case -1:
|
||||
outputFile = fopen("Data/results.data", "w");
|
||||
for (inputCounter = 0; inputCounter < 120; inputCounter++)
|
||||
{
|
||||
for (doubleCounter = 0; doubleCounter < 4; doubleCounter++)
|
||||
generation.SetInput(testInputs[inputCounter][doubleCounter], doubleCounter);
|
||||
generation.SetTarget(testInputs[inputCounter][4]);
|
||||
|
||||
generation.Fire();
|
||||
currentError = testInputs[inputCounter][4] - generation.GetPredictionOfBestNetwork() < 0 ? generation.GetPredictionOfBestNetwork() - testInputs[inputCounter][4] : testInputs[inputCounter][4] - generation.GetPredictionOfBestNetwork();
|
||||
fprintf(outputFile, "%lf,%lf,%lf\n", testInputs[inputCounter][4], generation.GetPredictionOfBestNetwork(), currentError);
|
||||
}
|
||||
fclose(outputFile);
|
||||
std::cout << "Test is Over!\n";
|
||||
break;
|
||||
}
|
||||
} while (decision != -3);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue