diff --git a/MLP_MVS/MLP_MVS.vcxproj b/MLP_MVS/MLP_MVS.vcxproj
index 4b79bac..151acca 100644
--- a/MLP_MVS/MLP_MVS.vcxproj
+++ b/MLP_MVS/MLP_MVS.vcxproj
@@ -142,7 +142,9 @@
+
+
diff --git a/MLP_MVS/MLP_MVS.vcxproj.filters b/MLP_MVS/MLP_MVS.vcxproj.filters
index 5e3ac27..8822b46 100644
--- a/MLP_MVS/MLP_MVS.vcxproj.filters
+++ b/MLP_MVS/MLP_MVS.vcxproj.filters
@@ -24,6 +24,12 @@
Header Files
+
+ Header Files
+
+
+ Header Files
+
diff --git a/src/Layer.h b/src/Layer.h
new file mode 100644
index 0000000..4eb1b65
--- /dev/null
+++ b/src/Layer.h
@@ -0,0 +1,39 @@
+//============================================================================
+// Name : Layer.h
+// Author : David Nogueira
+//============================================================================
+#ifndef LAYER_H
+#define LAYER_H
+
+#include "Node.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+class Layer {
+public:
+ Layer() {
+ m_num_nodes = 0;
+ m_nodes.clear();
+ };
+
+
+ Layer(int num_nodes, int num_inputs_per_node) {
+ m_num_nodes = num_nodes;
+ m_nodes = std::vector(num_nodes, Node(num_inputs_per_node));
+ };
+
+ ~Layer() {
+
+ };
+protected:
+ int m_num_nodes;
+ std::vector m_nodes;
+};
+
+#endif //LAYER_H
\ No newline at end of file
diff --git a/src/MLP.cpp b/src/MLP.cpp
new file mode 100644
index 0000000..b158b40
--- /dev/null
+++ b/src/MLP.cpp
@@ -0,0 +1,14 @@
+//============================================================================
+// Name : MLP.cpp
+// Author : David Nogueira
+//============================================================================
+#include "MLP.h"
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+
diff --git a/src/MLP.h b/src/MLP.h
new file mode 100644
index 0000000..ade9b17
--- /dev/null
+++ b/src/MLP.h
@@ -0,0 +1,75 @@
+//============================================================================
+// Name : MLP.cpp
+// Author : David Nogueira
+//============================================================================
+#ifndef MLP_H
+#define MLP_H
+
+#include "Layer.h"
+#include "Sample.h"
+#include "Utils.h"
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+class MLP {
+public:
+ MLP(int num_inputs,
+ int num_outputs,
+ int num_hidden_layers,
+ int num_nodes_per_hidden_layer,
+ double learning_rate,
+ int max_iterations,
+ double threshold) {
+
+ m_num_inputs = num_inputs;
+ m_num_outputs = num_outputs;
+ m_num_hidden_layers = num_hidden_layers;
+ m_num_nodes_per_hidden_layer = num_nodes_per_hidden_layer;
+
+ m_learning_rate = learning_rate;
+ m_max_iterations = max_iterations;
+ m_threshold = threshold;
+ };
+
+ ~MLP() {
+ m_layers.clear();
+ };
+
+ void CreateMLP() {
+ if (m_num_hidden_layers > 0) {
+ //first layer
+ m_layers.emplace_back(Layer(m_num_nodes_per_hidden_layer, m_num_inputs));
+ //subsequent layers
+ for (int i = 0; i < m_num_hidden_layers - 1; i++) {
+ m_layers.emplace_back(Layer(m_num_nodes_per_hidden_layer,
+ m_num_nodes_per_hidden_layer));
+ }
+ //last layer
+ m_layers.emplace_back(Layer(m_num_outputs, m_num_nodes_per_hidden_layer));
+ } else {
+ m_layers.emplace_back(Layer(m_num_outputs, m_num_inputs));
+ }
+ }
+
+
+private:
+
+ int m_num_inputs;
+ int m_num_outputs;
+ int m_num_hidden_layers;
+ int m_num_nodes_per_hidden_layer;
+
+ double m_learning_rate;
+ int m_max_iterations;
+ double m_threshold;
+
+ std::vector m_layers;
+};
+
+#endif //MLP_H
\ No newline at end of file
diff --git a/src/Main.cpp b/src/Main.cpp
index 05b9074..2618b38 100644
--- a/src/Main.cpp
+++ b/src/Main.cpp
@@ -2,7 +2,6 @@
// Name : Main.cpp
// Author : David Nogueira
//============================================================================
-
#include "MLP.h"
#include
#include
@@ -101,6 +100,28 @@ void LearnNOR() {
std::cout << std::endl;
}
+void LearnXOR() {
+ std::cout << "Train XOR function with mlp." << std::endl;
+
+ std::vector training_set =
+ {
+ { { 1, 0, 0 },{ 1,0 } },
+ { { 1, 0, 1 },{ 0,1 } },
+ { { 1, 1, 0 },{ 0,1 } },
+ { { 1, 1, 1 },{ 1,0 } }
+ };
+
+ MLP my_mlp(0.1, 100, 0.5);
+ my_mlp.Train(training_set, 1, 1);
+
+ assert(my_mlp.GetOutput({ 1, 0, 0 }) == 0);
+ assert(my_mlp.GetOutput({ 1, 0, 1 }) == 1);
+ assert(my_mlp.GetOutput({ 1, 1, 0 }) == 1);
+ assert(my_mlp.GetOutput({ 1, 1, 1 }) == 0);
+ std::cout << "Trained with success." << std::endl;
+ std::cout << std::endl;
+}
+
void LearnNOT() {
std::cout << "Train NOT function with mlp." << std::endl;
@@ -124,6 +145,7 @@ int main() {
LearnNAND();
LearnOR();
LearnNOR();
+ LearnXOR();
LearnNOT();
return 0;
diff --git a/src/Node.h b/src/Node.h
new file mode 100644
index 0000000..f3ea89c
--- /dev/null
+++ b/src/Node.h
@@ -0,0 +1,82 @@
+//============================================================================
+// Name : Node.h
+// Author : David Nogueira
+//============================================================================
+#ifndef NODE_H
+#define NODE_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+#define ZERO_WEIGHT_INITIALIZATION 1
+
+class Node {
+public:
+ Node() {
+ m_bias = 0.0;
+ //m_old_bias = 0.0;
+ m_num_inputs = 0;
+ m_weights.clear();
+ //m_old_weights.clear();
+ };
+ Node(int num_inputs) {
+ m_bias = 0.0;
+ //m_old_bias = 0.0;
+ m_num_inputs = num_inputs;
+ m_weights.clear();
+ //m_old_weights.clear();
+ m_weights = std::vector(num_inputs);
+ //m_old_weights = std::vector(num_inputs);
+
+ //initialize weight vector
+ std::generate_n(m_weights.begin(),
+ num_inputs,
+ (ZERO_WEIGHT_INITIALIZATION) ?
+ utils::gen_rand(0) : utils::gen_rand());
+ };
+ ~Node() {
+ m_weights.clear();
+ //m_old_weights.clear();
+ };
+ int GetInputSize() {
+ return m_num_inputs;
+ }
+ void SetInputSize(int num_inputs) {
+ m_num_inputs = num_inputs;
+ }
+ double GetBias() {
+ return m_bias;
+ }
+ //double GetOldBias() {
+ // return m_old_bias;
+ //}
+ void SetBias(double bias) {
+ m_bias = bias;
+ }
+ //void SetOldBias(double old_bias) {
+ // m_old_bias = old_bias;
+ //}
+ std::vector & GetWeights() {
+ return m_weights;
+ }
+ //std::vector & GetOldWeights() {
+ // return m_old_weights;
+ //}
+ uint32_t GetWeightsVectorSize() const {
+ return m_weights.size();
+ }
+
+protected:
+ int m_num_inputs;
+ double m_bias;
+ //double m_old_bias;
+ std::vector m_weights;
+ //std::vector m_old_weights;
+};
+
+#endif //NODE_H
\ No newline at end of file
diff --git a/src/Sample.h b/src/Sample.h
index f4ca12e..c1de3bf 100644
--- a/src/Sample.h
+++ b/src/Sample.h
@@ -1,3 +1,7 @@
+//============================================================================
+// Name : Sample.h
+// Author : David Nogueira
+//============================================================================
#ifndef TRAININGSAMPLE_H
#define TRAININGSAMPLE_H
diff --git a/src/Utils.h b/src/Utils.h
index c663c41..d2af857 100644
--- a/src/Utils.h
+++ b/src/Utils.h
@@ -1,3 +1,7 @@
+//============================================================================
+// Name : Utils.h
+// Author : David Nogueira
+//============================================================================
#ifndef UTILS_H
#define UTILS_H
@@ -15,10 +19,11 @@ namespace utils {
struct gen_rand {
double factor;
+ double offset;
public:
- gen_rand(double r = 1.0) : factor(r / RAND_MAX) {}
+ gen_rand(double r = 2.0) : factor(r / RAND_MAX), offset(r / 2) {}
double operator()() {
- return rand() * factor;
+ return rand() * factor - offset;
}
};