WIP MLP class still in development.

This commit is contained in:
davidjacnogueira
2016-01-22 02:15:36 +00:00
parent 5223a4fc50
commit a9e908bcbf
9 changed files with 252 additions and 3 deletions

View File

@@ -142,7 +142,9 @@
</Link> </Link>
</ItemDefinitionGroup> </ItemDefinitionGroup>
<ItemGroup> <ItemGroup>
<ClInclude Include="..\src\Layer.h" />
<ClInclude Include="..\src\MLP.h" /> <ClInclude Include="..\src\MLP.h" />
<ClInclude Include="..\src\Node.h" />
<ClInclude Include="..\src\Sample.h" /> <ClInclude Include="..\src\Sample.h" />
<ClInclude Include="..\src\Utils.h" /> <ClInclude Include="..\src\Utils.h" />
</ItemGroup> </ItemGroup>

View File

@@ -24,6 +24,12 @@
<ClInclude Include="..\src\MLP.h"> <ClInclude Include="..\src\MLP.h">
<Filter>Header Files</Filter> <Filter>Header Files</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="..\src\Layer.h">
<Filter>Header Files</Filter>
</ClInclude>
<ClInclude Include="..\src\Node.h">
<Filter>Header Files</Filter>
</ClInclude>
</ItemGroup> </ItemGroup>
<ItemGroup> <ItemGroup>
<ClCompile Include="..\src\Main.cpp"> <ClCompile Include="..\src\Main.cpp">

39
src/Layer.h Normal file
View File

@@ -0,0 +1,39 @@
//============================================================================
// Name : Layer.h
// Author : David Nogueira
//============================================================================
#ifndef LAYER_H
#define LAYER_H
#include "Node.h"
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <sstream>
#include <fstream>
#include <vector>
#include <algorithm>
class Layer {
public:
Layer() {
m_num_nodes = 0;
m_nodes.clear();
};
Layer(int num_nodes, int num_inputs_per_node) {
m_num_nodes = num_nodes;
m_nodes = std::vector<Node>(num_nodes, Node(num_inputs_per_node));
};
~Layer() {
};
protected:
int m_num_nodes;
std::vector<Node> m_nodes;
};
#endif //LAYER_H

14
src/MLP.cpp Normal file
View File

@@ -0,0 +1,14 @@
//============================================================================
// Name : MLP.cpp
// Author : David Nogueira
//============================================================================
#include "MLP.h"
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <sstream>
#include <fstream>
#include <vector>
#include <algorithm>

75
src/MLP.h Normal file
View File

@@ -0,0 +1,75 @@
//============================================================================
// Name : MLP.cpp
// Author : David Nogueira
//============================================================================
#ifndef MLP_H
#define MLP_H
#include "Layer.h"
#include "Sample.h"
#include "Utils.h"
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <sstream>
#include <fstream>
#include <vector>
#include <algorithm>
class MLP {
public:
MLP(int num_inputs,
int num_outputs,
int num_hidden_layers,
int num_nodes_per_hidden_layer,
double learning_rate,
int max_iterations,
double threshold) {
m_num_inputs = num_inputs;
m_num_outputs = num_outputs;
m_num_hidden_layers = num_hidden_layers;
m_num_nodes_per_hidden_layer = num_nodes_per_hidden_layer;
m_learning_rate = learning_rate;
m_max_iterations = max_iterations;
m_threshold = threshold;
};
~MLP() {
m_layers.clear();
};
void CreateMLP() {
if (m_num_hidden_layers > 0) {
//first layer
m_layers.emplace_back(Layer(m_num_nodes_per_hidden_layer, m_num_inputs));
//subsequent layers
for (int i = 0; i < m_num_hidden_layers - 1; i++) {
m_layers.emplace_back(Layer(m_num_nodes_per_hidden_layer,
m_num_nodes_per_hidden_layer));
}
//last layer
m_layers.emplace_back(Layer(m_num_outputs, m_num_nodes_per_hidden_layer));
} else {
m_layers.emplace_back(Layer(m_num_outputs, m_num_inputs));
}
}
private:
int m_num_inputs;
int m_num_outputs;
int m_num_hidden_layers;
int m_num_nodes_per_hidden_layer;
double m_learning_rate;
int m_max_iterations;
double m_threshold;
std::vector<Layer> m_layers;
};
#endif //MLP_H

View File

@@ -2,7 +2,6 @@
// Name : Main.cpp // Name : Main.cpp
// Author : David Nogueira // Author : David Nogueira
//============================================================================ //============================================================================
#include "MLP.h" #include "MLP.h"
#include <stdio.h> #include <stdio.h>
#include <stdlib.h> #include <stdlib.h>
@@ -101,6 +100,28 @@ void LearnNOR() {
std::cout << std::endl; std::cout << std::endl;
} }
void LearnXOR() {
std::cout << "Train XOR function with mlp." << std::endl;
std::vector<TrainingSample> training_set =
{
{ { 1, 0, 0 },{ 1,0 } },
{ { 1, 0, 1 },{ 0,1 } },
{ { 1, 1, 0 },{ 0,1 } },
{ { 1, 1, 1 },{ 1,0 } }
};
MLP my_mlp(0.1, 100, 0.5);
my_mlp.Train(training_set, 1, 1);
assert(my_mlp.GetOutput({ 1, 0, 0 }) == 0);
assert(my_mlp.GetOutput({ 1, 0, 1 }) == 1);
assert(my_mlp.GetOutput({ 1, 1, 0 }) == 1);
assert(my_mlp.GetOutput({ 1, 1, 1 }) == 0);
std::cout << "Trained with success." << std::endl;
std::cout << std::endl;
}
void LearnNOT() { void LearnNOT() {
std::cout << "Train NOT function with mlp." << std::endl; std::cout << "Train NOT function with mlp." << std::endl;
@@ -124,6 +145,7 @@ int main() {
LearnNAND(); LearnNAND();
LearnOR(); LearnOR();
LearnNOR(); LearnNOR();
LearnXOR();
LearnNOT(); LearnNOT();
return 0; return 0;

82
src/Node.h Normal file
View File

@@ -0,0 +1,82 @@
//============================================================================
// Name : Node.h
// Author : David Nogueira
//============================================================================
#ifndef NODE_H
#define NODE_H
#include <stdio.h>
#include <stdlib.h>
#include <iostream>
#include <sstream>
#include <fstream>
#include <vector>
#include <algorithm>
#define ZERO_WEIGHT_INITIALIZATION 1
class Node {
public:
Node() {
m_bias = 0.0;
//m_old_bias = 0.0;
m_num_inputs = 0;
m_weights.clear();
//m_old_weights.clear();
};
Node(int num_inputs) {
m_bias = 0.0;
//m_old_bias = 0.0;
m_num_inputs = num_inputs;
m_weights.clear();
//m_old_weights.clear();
m_weights = std::vector<double>(num_inputs);
//m_old_weights = std::vector<double>(num_inputs);
//initialize weight vector
std::generate_n(m_weights.begin(),
num_inputs,
(ZERO_WEIGHT_INITIALIZATION) ?
utils::gen_rand(0) : utils::gen_rand());
};
~Node() {
m_weights.clear();
//m_old_weights.clear();
};
int GetInputSize() {
return m_num_inputs;
}
void SetInputSize(int num_inputs) {
m_num_inputs = num_inputs;
}
double GetBias() {
return m_bias;
}
//double GetOldBias() {
// return m_old_bias;
//}
void SetBias(double bias) {
m_bias = bias;
}
//void SetOldBias(double old_bias) {
// m_old_bias = old_bias;
//}
std::vector<double> & GetWeights() {
return m_weights;
}
//std::vector<double> & GetOldWeights() {
// return m_old_weights;
//}
uint32_t GetWeightsVectorSize() const {
return m_weights.size();
}
protected:
int m_num_inputs;
double m_bias;
//double m_old_bias;
std::vector<double> m_weights;
//std::vector<double> m_old_weights;
};
#endif //NODE_H

View File

@@ -1,3 +1,7 @@
//============================================================================
// Name : Sample.h
// Author : David Nogueira
//============================================================================
#ifndef TRAININGSAMPLE_H #ifndef TRAININGSAMPLE_H
#define TRAININGSAMPLE_H #define TRAININGSAMPLE_H

View File

@@ -1,3 +1,7 @@
//============================================================================
// Name : Utils.h
// Author : David Nogueira
//============================================================================
#ifndef UTILS_H #ifndef UTILS_H
#define UTILS_H #define UTILS_H
@@ -15,10 +19,11 @@ namespace utils {
struct gen_rand { struct gen_rand {
double factor; double factor;
double offset;
public: public:
gen_rand(double r = 1.0) : factor(r / RAND_MAX) {} gen_rand(double r = 2.0) : factor(r / RAND_MAX), offset(r / 2) {}
double operator()() { double operator()() {
return rand() * factor; return rand() * factor - offset;
} }
}; };