mirror of
https://github.com/davidalbertonogueira/MLP.git
synced 2025-12-17 04:14:41 +03:00
Almost complete.
Works with zero hidden layers, but not with one or more. Correctness in the backprop phase must be checked.
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -78,6 +78,7 @@ ipch/
|
||||
*.opensdf
|
||||
*.sdf
|
||||
*.cachefile
|
||||
*.db
|
||||
|
||||
# Visual Studio profiler
|
||||
*.psess
|
||||
|
||||
162
MLP_MVS/LayerTest/LayerTest.vcxproj
Normal file
162
MLP_MVS/LayerTest/LayerTest.vcxproj
Normal file
@@ -0,0 +1,162 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|Win32">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|Win32">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{10A8D77B-A596-4B06-87DA-B28492D77905}</ProjectGuid>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>LayerTest</RootNamespace>
|
||||
<WindowsTargetPlatformVersion>8.1</WindowsTargetPlatformVersion>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="Shared">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<AdditionalIncludeDirectories>$(SolutionDir)..\deps</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<AdditionalIncludeDirectories>$(SolutionDir)..\deps</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\src\LayerTest.cpp">
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\MLP_MVS.vcxproj">
|
||||
<Project>{6bfa9d94-b136-4985-83a1-ee76fff6f374}</Project>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
||||
22
MLP_MVS/LayerTest/LayerTest.vcxproj.filters
Normal file
22
MLP_MVS/LayerTest/LayerTest.vcxproj.filters
Normal file
@@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup>
|
||||
<Filter Include="Source Files">
|
||||
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
|
||||
<Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
|
||||
</Filter>
|
||||
<Filter Include="Header Files">
|
||||
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
|
||||
<Extensions>h;hh;hpp;hxx;hm;inl;inc;xsd</Extensions>
|
||||
</Filter>
|
||||
<Filter Include="Resource Files">
|
||||
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
|
||||
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\src\LayerTest.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
162
MLP_MVS/MLPTest/MLPTest.vcxproj
Normal file
162
MLP_MVS/MLPTest/MLPTest.vcxproj
Normal file
@@ -0,0 +1,162 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|Win32">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|Win32">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{BCC41AEC-201D-4861-A756-2E10F95F76C5}</ProjectGuid>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>MLPTest</RootNamespace>
|
||||
<WindowsTargetPlatformVersion>8.1</WindowsTargetPlatformVersion>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="Shared">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<AdditionalIncludeDirectories>$(SolutionDir)..\deps</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<AdditionalIncludeDirectories>$(SolutionDir)..\deps</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\src\MLPTest.cpp">
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">true</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">true</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\MLP_MVS.vcxproj">
|
||||
<Project>{6bfa9d94-b136-4985-83a1-ee76fff6f374}</Project>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
||||
22
MLP_MVS/MLPTest/MLPTest.vcxproj.filters
Normal file
22
MLP_MVS/MLPTest/MLPTest.vcxproj.filters
Normal file
@@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup>
|
||||
<Filter Include="Source Files">
|
||||
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
|
||||
<Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
|
||||
</Filter>
|
||||
<Filter Include="Header Files">
|
||||
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
|
||||
<Extensions>h;hh;hpp;hxx;hm;inl;inc;xsd</Extensions>
|
||||
</Filter>
|
||||
<Filter Include="Resource Files">
|
||||
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
|
||||
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\src\MLPTest.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
@@ -1,9 +1,15 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio 14
|
||||
VisualStudioVersion = 14.0.23107.0
|
||||
VisualStudioVersion = 14.0.25420.1
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MLP_MVS", "MLP_MVS.vcxproj", "{6BFA9D94-B136-4985-83A1-EE76FFF6F374}"
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MLP_lib", "MLP_MVS.vcxproj", "{6BFA9D94-B136-4985-83A1-EE76FFF6F374}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "NodeTest", "NodeTest\NodeTest.vcxproj", "{7CAD640F-63A0-4065-A86B-E65143A8329C}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "MLPTest", "MLPTest\MLPTest.vcxproj", "{BCC41AEC-201D-4861-A756-2E10F95F76C5}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "LayerTest", "LayerTest\LayerTest.vcxproj", "{10A8D77B-A596-4B06-87DA-B28492D77905}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
@@ -21,6 +27,30 @@ Global
|
||||
{6BFA9D94-B136-4985-83A1-EE76FFF6F374}.Release|x64.Build.0 = Release|x64
|
||||
{6BFA9D94-B136-4985-83A1-EE76FFF6F374}.Release|x86.ActiveCfg = Release|Win32
|
||||
{6BFA9D94-B136-4985-83A1-EE76FFF6F374}.Release|x86.Build.0 = Release|Win32
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Debug|x64.ActiveCfg = Debug|x64
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Debug|x64.Build.0 = Debug|x64
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Debug|x86.ActiveCfg = Debug|Win32
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Debug|x86.Build.0 = Debug|Win32
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Release|x64.ActiveCfg = Release|x64
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Release|x64.Build.0 = Release|x64
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Release|x86.ActiveCfg = Release|Win32
|
||||
{7CAD640F-63A0-4065-A86B-E65143A8329C}.Release|x86.Build.0 = Release|Win32
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Debug|x64.ActiveCfg = Debug|x64
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Debug|x64.Build.0 = Debug|x64
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Debug|x86.ActiveCfg = Debug|Win32
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Debug|x86.Build.0 = Debug|Win32
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Release|x64.ActiveCfg = Release|x64
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Release|x64.Build.0 = Release|x64
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Release|x86.ActiveCfg = Release|Win32
|
||||
{BCC41AEC-201D-4861-A756-2E10F95F76C5}.Release|x86.Build.0 = Release|Win32
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Debug|x64.ActiveCfg = Debug|x64
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Debug|x64.Build.0 = Debug|x64
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Debug|x86.ActiveCfg = Debug|Win32
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Debug|x86.Build.0 = Debug|Win32
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Release|x64.ActiveCfg = Release|x64
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Release|x64.Build.0 = Release|x64
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Release|x86.ActiveCfg = Release|Win32
|
||||
{10A8D77B-A596-4B06-87DA-B28492D77905}.Release|x86.Build.0 = Release|Win32
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
||||
@@ -23,29 +23,30 @@
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>MLP_MVS</RootNamespace>
|
||||
<WindowsTargetPlatformVersion>8.1</WindowsTargetPlatformVersion>
|
||||
<ProjectName>MLP_lib</ProjectName>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<ConfigurationType>StaticLibrary</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
@@ -74,12 +75,14 @@
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
<TargetName>MLP</TargetName>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
<TargetName>MLP</TargetName>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<ClCompile>
|
||||
@@ -149,15 +152,30 @@
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClInclude Include="..\deps\microunit.h" />
|
||||
<ClInclude Include="..\src\Layer.h" />
|
||||
<ClInclude Include="..\src\MLP.h" />
|
||||
<ClInclude Include="..\src\Chrono.h" />
|
||||
<ClInclude Include="..\src\Layer.h">
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</ExcludedFromBuild>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\src\MLP.h">
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</ExcludedFromBuild>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\src\Node.h" />
|
||||
<ClInclude Include="..\src\Sample.h" />
|
||||
<ClInclude Include="..\src\Utils.h" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\src\Main.cpp" />
|
||||
<ClCompile Include="..\src\MLP.cpp" />
|
||||
<ClCompile Include="..\src\MLP.cpp">
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|x64'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">false</ExcludedFromBuild>
|
||||
<ExcludedFromBuild Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">false</ExcludedFromBuild>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
|
||||
@@ -33,11 +33,11 @@
|
||||
<ClInclude Include="..\deps\microunit.h">
|
||||
<Filter>Header Files</Filter>
|
||||
</ClInclude>
|
||||
<ClInclude Include="..\src\Chrono.h">
|
||||
<Filter>Header Files</Filter>
|
||||
</ClInclude>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\src\Main.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
<ClCompile Include="..\src\MLP.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
|
||||
157
MLP_MVS/NodeTest/NodeTest.vcxproj
Normal file
157
MLP_MVS/NodeTest/NodeTest.vcxproj
Normal file
@@ -0,0 +1,157 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project DefaultTargets="Build" ToolsVersion="14.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup Label="ProjectConfigurations">
|
||||
<ProjectConfiguration Include="Debug|Win32">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|Win32">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>Win32</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Debug|x64">
|
||||
<Configuration>Debug</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
<ProjectConfiguration Include="Release|x64">
|
||||
<Configuration>Release</Configuration>
|
||||
<Platform>x64</Platform>
|
||||
</ProjectConfiguration>
|
||||
</ItemGroup>
|
||||
<PropertyGroup Label="Globals">
|
||||
<ProjectGuid>{7CAD640F-63A0-4065-A86B-E65143A8329C}</ProjectGuid>
|
||||
<Keyword>Win32Proj</Keyword>
|
||||
<RootNamespace>NodeTest</RootNamespace>
|
||||
<WindowsTargetPlatformVersion>8.1</WindowsTargetPlatformVersion>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>true</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
|
||||
<ConfigurationType>Application</ConfigurationType>
|
||||
<UseDebugLibraries>false</UseDebugLibraries>
|
||||
<PlatformToolset>v140</PlatformToolset>
|
||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
||||
<CharacterSet>Unicode</CharacterSet>
|
||||
</PropertyGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
||||
<ImportGroup Label="ExtensionSettings">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="Shared">
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
||||
</ImportGroup>
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<LinkIncremental>true</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<LinkIncremental>false</LinkIncremental>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>WIN32;_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
|
||||
<ClCompile>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<Optimization>Disabled</Optimization>
|
||||
<PreprocessorDefinitions>_DEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<AdditionalIncludeDirectories>$(SolutionDir)..\deps</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>WIN32;NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
|
||||
<ClCompile>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<PrecompiledHeader>
|
||||
</PrecompiledHeader>
|
||||
<Optimization>MaxSpeed</Optimization>
|
||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
||||
<PreprocessorDefinitions>NDEBUG;_CONSOLE;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<AdditionalIncludeDirectories>$(SolutionDir)..\deps</AdditionalIncludeDirectories>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<SubSystem>Console</SubSystem>
|
||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
||||
<OptimizeReferences>true</OptimizeReferences>
|
||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\src\NodeTest.cpp" />
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ProjectReference Include="..\MLP_MVS.vcxproj">
|
||||
<Project>{6bfa9d94-b136-4985-83a1-ee76fff6f374}</Project>
|
||||
</ProjectReference>
|
||||
</ItemGroup>
|
||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
||||
<ImportGroup Label="ExtensionTargets">
|
||||
</ImportGroup>
|
||||
</Project>
|
||||
22
MLP_MVS/NodeTest/NodeTest.vcxproj.filters
Normal file
22
MLP_MVS/NodeTest/NodeTest.vcxproj.filters
Normal file
@@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ItemGroup>
|
||||
<Filter Include="Source Files">
|
||||
<UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
|
||||
<Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
|
||||
</Filter>
|
||||
<Filter Include="Header Files">
|
||||
<UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
|
||||
<Extensions>h;hh;hpp;hxx;hm;inl;inc;xsd</Extensions>
|
||||
</Filter>
|
||||
<Filter Include="Resource Files">
|
||||
<UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
|
||||
<Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
|
||||
</Filter>
|
||||
</ItemGroup>
|
||||
<ItemGroup>
|
||||
<ClCompile Include="..\..\src\NodeTest.cpp">
|
||||
<Filter>Source Files</Filter>
|
||||
</ClCompile>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
106
src/Chrono.h
Normal file
106
src/Chrono.h
Normal file
@@ -0,0 +1,106 @@
|
||||
/**
|
||||
* @file chrono.h
|
||||
* @author David Alberto Nogueira (dan)
|
||||
* @brief std::chrono wrapper.
|
||||
*
|
||||
* USAGE:
|
||||
* @code{.cpp}
|
||||
* chronowrap::Chronometer chrono; //Declare a Chronometer
|
||||
* chrono.GetTime(); //Start timer
|
||||
* {
|
||||
* ... //do your code
|
||||
* }
|
||||
* chrono.StopTime(); //Stop timer
|
||||
* std::cout << "Time: " << chrono.GetElapsedTime()
|
||||
* << " sec." << std::endl; //Print duration
|
||||
|
||||
* @endcode
|
||||
*
|
||||
* @copyright Copyright (c) 2016, David Alberto Nogueira.
|
||||
* All rights reserved. See licence below.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without
|
||||
* modification, are permitted provided that the following conditions are
|
||||
* met:
|
||||
*
|
||||
* (1) Redistributions of source code must retain the above copyright
|
||||
* notice, this list of conditions and the following disclaimer.
|
||||
*
|
||||
* (2) Redistributions in binary form must reproduce the above copyright
|
||||
* notice, this list of conditions and the following disclaimer in
|
||||
* the documentation and/or other materials provided with the
|
||||
* distribution.
|
||||
*
|
||||
* (3) The name of the author may not be used to
|
||||
* endorse or promote products derived from this software without
|
||||
* specific prior written permission.
|
||||
*
|
||||
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
|
||||
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
||||
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
* DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
|
||||
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
||||
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
|
||||
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
|
||||
* IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
||||
* POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
#ifndef CHRONO_H
|
||||
#define CHRONO_H
|
||||
#include <iostream>
|
||||
#include <chrono>
|
||||
#ifdef _WIN32
|
||||
#include <time.h>
|
||||
#else
|
||||
#include <sys/time.h>
|
||||
#endif
|
||||
|
||||
namespace chronowrap {
|
||||
class Chronometer {
|
||||
public:
|
||||
Chronometer() {
|
||||
time_span = std::chrono::steady_clock::duration::zero();
|
||||
};
|
||||
virtual ~Chronometer() {};
|
||||
|
||||
void GetTime() {
|
||||
clock_begin = std::chrono::steady_clock::now();
|
||||
}
|
||||
void StopTime() {
|
||||
std::chrono::steady_clock::time_point clock_end =
|
||||
std::chrono::steady_clock::now();
|
||||
time_span += clock_end - clock_begin;
|
||||
}
|
||||
//Return elapsed time in seconds
|
||||
double GetElapsedTime() {
|
||||
return double(time_span.count()) * resolution;
|
||||
}
|
||||
void Reset() {
|
||||
time_span = std::chrono::steady_clock::duration::zero();
|
||||
}
|
||||
//in us
|
||||
double GetClockResolutionUS() {
|
||||
return resolution*1e6;
|
||||
}
|
||||
void PrintClockResolution() {
|
||||
std::cout << "clock::period: " << GetClockResolutionUS() << " us.\n";
|
||||
}
|
||||
bool IsClockSteady() {
|
||||
return std::chrono::steady_clock::is_steady;
|
||||
}
|
||||
void PrintClockSteady() {
|
||||
printf("clock::is_steady: %s\n", IsClockSteady() ? "yes" : "no");
|
||||
}
|
||||
|
||||
protected:
|
||||
std::chrono::steady_clock::time_point clock_begin;
|
||||
std::chrono::steady_clock::duration time_span;
|
||||
const double resolution = double(std::chrono::steady_clock::period::num) /
|
||||
double(std::chrono::steady_clock::period::den);
|
||||
};
|
||||
}
|
||||
|
||||
#endif // CHRONO_H
|
||||
65
src/Layer.h
65
src/Layer.h
@@ -23,39 +23,80 @@ public:
|
||||
m_nodes.clear();
|
||||
};
|
||||
|
||||
|
||||
Layer(int num_nodes, int num_inputs_per_node) {
|
||||
Layer(int num_nodes,
|
||||
int num_inputs_per_node,
|
||||
bool use_constant_weight_init = true,
|
||||
double constant_weight_init = 0.5) {
|
||||
m_num_nodes = num_nodes;
|
||||
m_num_inputs_per_node = num_inputs_per_node;
|
||||
m_nodes = std::vector<Node>(num_nodes, Node(num_inputs_per_node));
|
||||
m_nodes.resize(num_nodes,
|
||||
std::move(Node(num_inputs_per_node,
|
||||
use_constant_weight_init,
|
||||
constant_weight_init)));
|
||||
};
|
||||
|
||||
|
||||
~Layer() {
|
||||
m_num_nodes = 0;
|
||||
m_num_inputs_per_node = 0;
|
||||
m_nodes.clear();
|
||||
};
|
||||
|
||||
void GetOutput(const std::vector<double> &input, std::vector<double> * output) const {
|
||||
//std::vector<Node> & GetNodes() {
|
||||
// return m_nodes;
|
||||
//}
|
||||
|
||||
const std::vector<Node> & GetNodes() const {
|
||||
return m_nodes;
|
||||
}
|
||||
|
||||
void GetOutputAfterSigmoid(const std::vector<double> &input, std::vector<double> * output) const {
|
||||
assert(input.size() == m_num_inputs_per_node);
|
||||
|
||||
output->resize(m_num_nodes);
|
||||
|
||||
for (int i = 0; i < m_num_nodes; ++i) {
|
||||
(*output)[i] = m_nodes[i].GetOutput(input);
|
||||
m_nodes[i].GetOutputAfterSigmoid(input, &((*output)[i]));
|
||||
}
|
||||
}
|
||||
|
||||
void UpdateWeights(const std::vector<double> &x,
|
||||
void UpdateWeights(const std::vector<double> &input_layer_activation,
|
||||
const std::vector<double> &deriv_error,
|
||||
double m_learning_rate,
|
||||
double error) {
|
||||
assert(x.size() == m_num_inputs_per_node);
|
||||
std::vector<double> * deltas) {
|
||||
assert(input_layer_activation.size() == m_num_inputs_per_node);
|
||||
assert(deriv_error.size() == m_nodes.size());
|
||||
|
||||
for (size_t i = 0; i < m_nodes.size(); i++)
|
||||
m_nodes[i].UpdateWeights(x, m_learning_rate, error);
|
||||
deltas->resize(m_num_inputs_per_node, 0);
|
||||
|
||||
for (size_t i = 0; i < m_nodes.size(); i++) {
|
||||
double net_sum;
|
||||
m_nodes[i].GetInputInnerProdWithWeights(input_layer_activation, &net_sum);
|
||||
|
||||
//dE/dwij = dE/doj . doj/dnetj . dnetj/dwij
|
||||
double dE_doj = 0.0;
|
||||
double doj_dnetj = 0.0;
|
||||
double dnetj_dwij = 0.0;
|
||||
|
||||
dE_doj = deriv_error[i];
|
||||
doj_dnetj = utils::deriv_sigmoid(net_sum);
|
||||
|
||||
|
||||
for (int j = 0; j < m_num_inputs_per_node; j++) {
|
||||
(*deltas)[j] += dE_doj * doj_dnetj * m_nodes[i].GetWeights()[j];
|
||||
|
||||
dnetj_dwij = input_layer_activation[j];
|
||||
|
||||
m_nodes[i].UpdateWeight(j,
|
||||
-(dE_doj * doj_dnetj * dnetj_dwij),
|
||||
m_learning_rate);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
protected:
|
||||
int m_num_nodes;
|
||||
int m_num_inputs_per_node;
|
||||
int m_num_nodes{ 0 };
|
||||
int m_num_inputs_per_node{ 0 };
|
||||
std::vector<Node> m_nodes;
|
||||
};
|
||||
|
||||
|
||||
22
src/LayerTest.cpp
Normal file
22
src/LayerTest.cpp
Normal file
@@ -0,0 +1,22 @@
|
||||
//============================================================================
|
||||
// Name : LayerTest.cpp
|
||||
// Author : David Nogueira
|
||||
//============================================================================
|
||||
|
||||
#include "Layer.h"
|
||||
#include "Sample.h"
|
||||
#include "Utils.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include "microunit.h"
|
||||
|
||||
int main() {
|
||||
microunit::UnitTester::Run();
|
||||
return 0;
|
||||
}
|
||||
183
src/MLP.cpp
183
src/MLP.cpp
@@ -11,4 +11,187 @@
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
|
||||
bool MLP::ExportNNWeights(std::vector<double> *weights) const {
|
||||
return true;
|
||||
};
|
||||
bool MLP::ImportNNWeights(const std::vector<double> & weights) {
|
||||
return true;
|
||||
};
|
||||
|
||||
void MLP::GetOutput(const std::vector<double> &input,
|
||||
std::vector<double> * output,
|
||||
std::vector<std::vector<double>> * all_layers_activations,
|
||||
bool apply_softmax) const {
|
||||
assert(input.size() == m_num_inputs);
|
||||
int temp_size;
|
||||
if (m_num_hidden_layers == 0)
|
||||
temp_size = m_num_outputs;
|
||||
else
|
||||
temp_size = m_num_nodes_per_hidden_layer;
|
||||
|
||||
std::vector<double> temp_in(m_num_inputs, 0.0);
|
||||
std::vector<double> temp_out(temp_size, 0.0);
|
||||
temp_in = input;
|
||||
|
||||
//m_layers.size() equals (m_num_hidden_layers + 1)
|
||||
for (int i = 0; i < (m_num_hidden_layers + 1); ++i) {
|
||||
if (i > 0) {
|
||||
//Store this layer activation
|
||||
if (all_layers_activations != nullptr)
|
||||
all_layers_activations->emplace_back(std::move(temp_in));
|
||||
|
||||
temp_in.clear();
|
||||
temp_in = temp_out;
|
||||
temp_out.clear();
|
||||
temp_out.resize((i == m_num_hidden_layers) ?
|
||||
m_num_outputs :
|
||||
m_num_nodes_per_hidden_layer);
|
||||
}
|
||||
m_layers[i].GetOutputAfterSigmoid(temp_in, &temp_out);
|
||||
}
|
||||
|
||||
if (apply_softmax && temp_out.size() > 1)
|
||||
utils::Softmax(&temp_out);
|
||||
*output = temp_out;
|
||||
|
||||
//Add last layer activation
|
||||
if (all_layers_activations != nullptr)
|
||||
all_layers_activations->emplace_back(std::move(temp_in));
|
||||
}
|
||||
|
||||
void MLP::GetOutputClass(const std::vector<double> &output, size_t * class_id) const {
|
||||
utils::GetIdMaxElement(output, class_id);
|
||||
}
|
||||
|
||||
void MLP::UpdateWeights(const std::vector<std::vector<double>> & all_layers_activations,
|
||||
const std::vector<double> &deriv_error,
|
||||
double learning_rate) {
|
||||
|
||||
std::vector<double> temp_deriv_error = deriv_error;
|
||||
std::vector<double> deltas{};
|
||||
//m_layers.size() equals (m_num_hidden_layers + 1)
|
||||
for (int i = m_num_hidden_layers; i >= 0; --i) {
|
||||
m_layers[i].UpdateWeights(all_layers_activations[i], temp_deriv_error, learning_rate, &deltas);
|
||||
if (i > 0) {
|
||||
temp_deriv_error.clear();
|
||||
temp_deriv_error = std::move(deltas);
|
||||
deltas.clear();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
void MLP::UpdateMiniBatch(const std::vector<TrainingSample> &training_sample_set_with_bias,
|
||||
double learning_rate,
|
||||
int max_iterations,
|
||||
double min_error_cost) {
|
||||
int num_examples = training_sample_set_with_bias.size();
|
||||
int num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
|
||||
{
|
||||
int layer_i = -1;
|
||||
int node_i = -1;
|
||||
std::cout << "Starting weights:" << std::endl;
|
||||
for (const auto & layer : m_layers) {
|
||||
layer_i++;
|
||||
node_i = -1;
|
||||
std::cout << "Layer " << layer_i << " :" << std::endl;
|
||||
for (const auto & node : layer.GetNodes()) {
|
||||
node_i++;
|
||||
std::cout << "\tNode " << node_i << " :\t";
|
||||
for (auto m_weightselement : node.GetWeights()) {
|
||||
std::cout << m_weightselement << "\t";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (int i = 0; i < max_iterations; i++) {
|
||||
std::cout << "******************************" << std::endl;
|
||||
std::cout << "******** ITER " << i << std::endl;
|
||||
std::cout << "******************************" << std::endl;
|
||||
double current_iteration_cost_function = 0.0;
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
std::vector<double> predicted_output;
|
||||
std::vector< std::vector<double> > all_layers_activations;
|
||||
GetOutput(training_sample_with_bias.input_vector(),
|
||||
&predicted_output,
|
||||
&all_layers_activations);
|
||||
const std::vector<double> & correct_output =
|
||||
training_sample_with_bias.output_vector();
|
||||
|
||||
assert(correct_output.size() == predicted_output.size());
|
||||
std::vector<double> deriv_error_output(predicted_output.size());
|
||||
|
||||
std::cout << training_sample_with_bias << "\t\t";
|
||||
{
|
||||
std::cout << "Predicted output: [";
|
||||
for (int i = 0; i < predicted_output.size(); i++) {
|
||||
if (i != 0)
|
||||
std::cout << ", ";
|
||||
std::cout << predicted_output[i];
|
||||
}
|
||||
std::cout << "]" << std::endl;
|
||||
}
|
||||
|
||||
for (int j = 0; j < predicted_output.size(); j++) {
|
||||
current_iteration_cost_function +=
|
||||
(std::pow)((correct_output[j] - predicted_output[j]), 2);
|
||||
deriv_error_output[j] =
|
||||
-2 * (correct_output[j] - predicted_output[j]);
|
||||
}
|
||||
|
||||
UpdateWeights(all_layers_activations,
|
||||
deriv_error_output,
|
||||
learning_rate);
|
||||
}
|
||||
|
||||
std::cout << "Iteration cost function f(error): "
|
||||
<< current_iteration_cost_function << std::endl;
|
||||
if (current_iteration_cost_function < min_error_cost)
|
||||
break;
|
||||
|
||||
//{
|
||||
// int layer_i = -1;
|
||||
// int node_i = -1;
|
||||
// std::cout << "Current weights:" << std::endl;
|
||||
// for (const auto & layer : m_layers) {
|
||||
// layer_i++;
|
||||
// node_i = -1;
|
||||
// std::cout << "Layer " << layer_i << " :" << std::endl;
|
||||
// for (const auto & node : layer.GetNodes()) {
|
||||
// node_i++;
|
||||
// std::cout << "\tNode " << node_i << " :\t";
|
||||
// for (auto m_weightselement : node.GetWeights()) {
|
||||
// std::cout << m_weightselement << "\t";
|
||||
// }
|
||||
// std::cout << std::endl;
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
}
|
||||
|
||||
std::cout << "******************************" << std::endl;
|
||||
std::cout << "******* TRAINING ENDED *******" << std::endl;
|
||||
std::cout << "******************************" << std::endl;
|
||||
{
|
||||
int layer_i = -1;
|
||||
int node_i = -1;
|
||||
std::cout << "Final weights:" << std::endl;
|
||||
for (const auto & layer : m_layers) {
|
||||
layer_i++;
|
||||
node_i = -1;
|
||||
std::cout << "Layer " << layer_i << " :" << std::endl;
|
||||
for (const auto & node : layer.GetNodes()) {
|
||||
node_i++;
|
||||
std::cout << "\tNode " << node_i << " :\t";
|
||||
for (auto m_weightselement : node.GetWeights()) {
|
||||
std::cout << m_weightselement << "\t";
|
||||
}
|
||||
std::cout << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
76
src/MLP.h
76
src/MLP.h
@@ -23,57 +23,77 @@ public:
|
||||
int num_outputs,
|
||||
int num_hidden_layers,
|
||||
int num_nodes_per_hidden_layer,
|
||||
double learning_rate) {
|
||||
bool use_constant_weight_init = true,
|
||||
double constant_weight_init = 0.5) {
|
||||
|
||||
m_num_inputs = num_inputs;
|
||||
m_num_outputs = num_outputs;
|
||||
m_num_hidden_layers = num_hidden_layers;
|
||||
m_num_nodes_per_hidden_layer = num_nodes_per_hidden_layer;
|
||||
|
||||
m_learning_rate = learning_rate;
|
||||
};
|
||||
CreateMLP(use_constant_weight_init,
|
||||
constant_weight_init);
|
||||
}
|
||||
|
||||
~MLP() {
|
||||
m_num_inputs = 0;
|
||||
m_num_outputs = 0;
|
||||
m_num_hidden_layers = 0;
|
||||
m_num_nodes_per_hidden_layer = 0;
|
||||
m_layers.clear();
|
||||
};
|
||||
|
||||
void CreateMLP() {
|
||||
bool ExportNNWeights(std::vector<double> *weights)const;
|
||||
bool ImportNNWeights(const std::vector<double> & weights);
|
||||
|
||||
void GetOutput(const std::vector<double> &input,
|
||||
std::vector<double> * output,
|
||||
std::vector<std::vector<double>> * all_layers_activations = nullptr,
|
||||
bool apply_softmax = false) const;
|
||||
void GetOutputClass(const std::vector<double> &output, size_t * class_id) const;
|
||||
|
||||
void UpdateMiniBatch(const std::vector<TrainingSample> &training_sample_set_with_bias,
|
||||
double learning_rate,
|
||||
int max_iterations = 5000,
|
||||
double min_error_cost = 0.001);
|
||||
protected:
|
||||
void UpdateWeights(const std::vector<std::vector<double>> & all_layers_activations,
|
||||
const std::vector<double> &error,
|
||||
double learning_rate);
|
||||
private:
|
||||
void CreateMLP(bool use_constant_weight_init,
|
||||
double constant_weight_init = 0.5) {
|
||||
if (m_num_hidden_layers > 0) {
|
||||
//first layer
|
||||
m_layers.emplace_back(Layer(m_num_nodes_per_hidden_layer, m_num_inputs));
|
||||
m_layers.emplace_back(Layer(m_num_nodes_per_hidden_layer,
|
||||
m_num_inputs,
|
||||
use_constant_weight_init,
|
||||
constant_weight_init));
|
||||
//subsequent layers
|
||||
for (int i = 0; i < m_num_hidden_layers - 1; i++) {
|
||||
m_layers.emplace_back(Layer(m_num_nodes_per_hidden_layer,
|
||||
m_num_nodes_per_hidden_layer));
|
||||
m_num_nodes_per_hidden_layer,
|
||||
use_constant_weight_init,
|
||||
constant_weight_init));
|
||||
}
|
||||
//last layer
|
||||
m_layers.emplace_back(Layer(m_num_outputs, m_num_nodes_per_hidden_layer));
|
||||
m_layers.emplace_back(Layer(m_num_outputs,
|
||||
m_num_nodes_per_hidden_layer,
|
||||
use_constant_weight_init,
|
||||
constant_weight_init));
|
||||
} else {
|
||||
m_layers.emplace_back(Layer(m_num_outputs, m_num_inputs));
|
||||
m_layers.emplace_back(Layer(m_num_outputs,
|
||||
m_num_inputs,
|
||||
use_constant_weight_init,
|
||||
constant_weight_init));
|
||||
}
|
||||
}
|
||||
|
||||
size_t GetWeightMatrixCardinality()const;
|
||||
bool ExportWeights(std::vector<double> *weights)const;
|
||||
bool ImportWeights(const std::vector<double> & weights);
|
||||
|
||||
void GetOutput(const std::vector<double> &input, std::vector<double> * output) const;
|
||||
void GetOutputClass(const std::vector<double> &output, size_t * class_id) const;
|
||||
|
||||
void Train(const std::vector<TrainingSample> &training_sample_set,
|
||||
int max_iterations);
|
||||
protected:
|
||||
void UpdateWeights(const std::vector<double> &x,
|
||||
double error);
|
||||
private:
|
||||
|
||||
int m_num_inputs;
|
||||
int m_num_outputs;
|
||||
int m_num_hidden_layers;
|
||||
int m_num_nodes_per_hidden_layer;
|
||||
|
||||
double m_learning_rate;
|
||||
int m_max_iterations;
|
||||
int m_num_inputs{ 0 };
|
||||
int m_num_outputs{ 0 };
|
||||
int m_num_hidden_layers{ 0 };
|
||||
int m_num_nodes_per_hidden_layer{ 0 };
|
||||
|
||||
std::vector<Layer> m_layers;
|
||||
};
|
||||
|
||||
232
src/MLPTest.cpp
Normal file
232
src/MLPTest.cpp
Normal file
@@ -0,0 +1,232 @@
|
||||
//============================================================================
|
||||
// Name : Main.cpp
|
||||
// Author : David Nogueira
|
||||
//============================================================================
|
||||
#include "MLP.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include "microunit.h"
|
||||
|
||||
UNIT(LearnAND) {
|
||||
std::cout << "Train AND function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{0.0}},
|
||||
{{ 0, 1 },{0.0}},
|
||||
{{ 1, 0 },{0.0}},
|
||||
{{ 1, 1 },{1.0}}
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
MLP my_mlp(num_features, 1, 0, 5, true, 0.5);
|
||||
//Train MLP
|
||||
my_mlp.UpdateMiniBatch(training_sample_set_with_bias, 2, 1000, 0.245);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
std::vector<double> output;
|
||||
my_mlp.GetOutput(training_sample.input_vector(), &output);
|
||||
bool predicted_output = output[0]> 0.5 ? true : false;
|
||||
bool correct_output = training_sample.output_vector()[0] > 0.5 ? true : false;
|
||||
ASSERT_TRUE(predicted_output == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnNAND) {
|
||||
std::cout << "Train NAND function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{1.0}},
|
||||
{{ 0, 1 },{1.0}},
|
||||
{{ 1, 0 },{1.0}},
|
||||
{{ 1, 1 },{0.0}}
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
MLP my_mlp(num_features, 1, 0, 5, true, 0.5);
|
||||
//Train MLP
|
||||
my_mlp.UpdateMiniBatch(training_sample_set_with_bias, 2, 1000, 0.245);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
std::vector<double> output;
|
||||
my_mlp.GetOutput(training_sample.input_vector(), &output);
|
||||
bool predicted_output = output[0]> 0.5 ? true : false;
|
||||
bool correct_output = training_sample.output_vector()[0] > 0.5 ? true : false;
|
||||
ASSERT_TRUE(predicted_output == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnOR) {
|
||||
std::cout << "Train OR function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{0.0}},
|
||||
{{ 0, 1 },{1.0}},
|
||||
{{ 1, 0 },{1.0}},
|
||||
{{ 1, 1 },{1.0}}
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
MLP my_mlp(num_features, 1, 0, 5, true, 0.5);
|
||||
//Train MLP
|
||||
my_mlp.UpdateMiniBatch(training_sample_set_with_bias, 2, 1000, 0.245);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
std::vector<double> output;
|
||||
my_mlp.GetOutput(training_sample.input_vector(), &output);
|
||||
bool predicted_output = output[0]> 0.5 ? true : false;
|
||||
bool correct_output = training_sample.output_vector()[0] > 0.5 ? true : false;
|
||||
ASSERT_TRUE(predicted_output == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnNOR) {
|
||||
std::cout << "Train NOR function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{1.0}},
|
||||
{{ 0, 1 },{0.0}},
|
||||
{{ 1, 0 },{0.0}},
|
||||
{{ 1, 1 },{0.0}}
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
MLP my_mlp(num_features, 1, 0, 5, true, 0.5);
|
||||
//Train MLP
|
||||
my_mlp.UpdateMiniBatch(training_sample_set_with_bias, 2, 1000, 0.245);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
std::vector<double> output;
|
||||
my_mlp.GetOutput(training_sample.input_vector(), &output);
|
||||
bool predicted_output = output[0]> 0.5 ? true : false;
|
||||
bool correct_output = training_sample.output_vector()[0] > 0.5 ? true : false;
|
||||
ASSERT_TRUE(predicted_output == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
//UNIT(LearnXOR) {
|
||||
// std::cout << "Train XOR function with mlp." << std::endl;
|
||||
//
|
||||
// std::vector<TrainingSample> training_set =
|
||||
// {
|
||||
// { { 0, 0 },{ 0.0 } },
|
||||
// { { 0, 1 },{ 1.0 } },
|
||||
// { { 1, 0 },{ 1.0 } },
|
||||
// { { 1, 1 },{ 0.0 } }
|
||||
// };
|
||||
// bool bias_already_in = false;
|
||||
// std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
// //set up bias
|
||||
// if (!bias_already_in) {
|
||||
// for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
// training_sample_with_bias.AddBiasValue(1);
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// size_t num_examples = training_sample_set_with_bias.size();
|
||||
// size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
// MLP my_mlp(num_features, 1, 0, 5, true, 0.5);
|
||||
// //Train MLP
|
||||
// my_mlp.UpdateMiniBatch(training_sample_set_with_bias, 2, 1000, 0.245);
|
||||
//
|
||||
// for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
// std::vector<double> output;
|
||||
// my_mlp.GetOutput(training_sample.input_vector(), &output);
|
||||
// bool predicted_output = output[0]> 0.5 ? true : false;
|
||||
// bool correct_output = training_sample.output_vector()[0] > 0.5 ? true : false;
|
||||
// ASSERT_TRUE(predicted_output == correct_output);
|
||||
// }
|
||||
// std::cout << "Trained with success." << std::endl;
|
||||
// std::cout << std::endl;
|
||||
//}
|
||||
|
||||
UNIT(LearnNOT) {
|
||||
std::cout << "Train NOT function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0},{1.0 }},
|
||||
{{ 1},{0.0 }}
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
MLP my_mlp(num_features, 1, 0, 5, true, 0.5);
|
||||
//Train MLP
|
||||
my_mlp.UpdateMiniBatch(training_sample_set_with_bias, 2, 1000, 0.245);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
std::vector<double> output;
|
||||
my_mlp.GetOutput(training_sample.input_vector(), &output);
|
||||
bool predicted_output = output[0]> 0.5 ? true : false;
|
||||
bool correct_output = training_sample.output_vector()[0] > 0.5 ? true : false;
|
||||
ASSERT_TRUE(predicted_output == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
int main() {
|
||||
microunit::UnitTester::Run();
|
||||
return 0;
|
||||
}
|
||||
172
src/Main.cpp
172
src/Main.cpp
@@ -1,172 +0,0 @@
|
||||
//============================================================================
|
||||
// Name : Main.cpp
|
||||
// Author : David Nogueira
|
||||
//============================================================================
|
||||
#include "MLP.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include "microunit.h"
|
||||
|
||||
UNIT(LearnAND) {
|
||||
std::cout << "Train AND function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{1,0}},
|
||||
{{ 0, 1 },{1,0}},
|
||||
{{ 1, 0 },{1,0}},
|
||||
{{ 1, 1 },{0,1}}
|
||||
};
|
||||
|
||||
MLP my_mlp(2, 2, 1, 5, 0.1);
|
||||
my_mlp.Train(training_set, 100);
|
||||
|
||||
for (const auto & training_sample : training_set){
|
||||
size_t class_id;
|
||||
my_mlp.GetOutputClass(training_sample.input_vector(), &class_id);
|
||||
ASSERT_TRUE(class_id ==
|
||||
std::distance(training_sample.output_vector().begin(),
|
||||
std::max_element(training_sample.output_vector().begin(),
|
||||
training_sample.output_vector().end()) ));
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnNAND) {
|
||||
std::cout << "Train NAND function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{0,1}},
|
||||
{{ 0, 1 },{0,1}},
|
||||
{{ 1, 0 },{0,1}},
|
||||
{{ 1, 1 },{1,0}}
|
||||
};
|
||||
|
||||
MLP my_mlp(2, 2, 1, 5, 0.1);
|
||||
my_mlp.Train(training_set, 100);
|
||||
|
||||
for (const auto & training_sample : training_set) {
|
||||
size_t class_id;
|
||||
my_mlp.GetOutputClass(training_sample.input_vector(), &class_id);
|
||||
ASSERT_TRUE(class_id ==
|
||||
std::distance(training_sample.output_vector().begin(),
|
||||
std::max_element(training_sample.output_vector().begin(),
|
||||
training_sample.output_vector().end())));
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnOR) {
|
||||
std::cout << "Train OR function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{1,0}},
|
||||
{{ 0, 1 },{0,1}},
|
||||
{{ 1, 0 },{0,1}},
|
||||
{{ 1, 1 },{0,1}}
|
||||
};
|
||||
|
||||
MLP my_mlp(2, 2, 1, 5, 0.1);
|
||||
my_mlp.Train(training_set, 100);
|
||||
|
||||
for (const auto & training_sample : training_set) {
|
||||
size_t class_id;
|
||||
my_mlp.GetOutputClass(training_sample.input_vector(), &class_id);
|
||||
ASSERT_TRUE(class_id ==
|
||||
std::distance(training_sample.output_vector().begin(),
|
||||
std::max_element(training_sample.output_vector().begin(),
|
||||
training_sample.output_vector().end())));
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnNOR) {
|
||||
std::cout << "Train NOR function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0, 0 },{0,1}},
|
||||
{{ 0, 1 },{1,0}},
|
||||
{{ 1, 0 },{1,0}},
|
||||
{{ 1, 1 },{1,0}}
|
||||
};
|
||||
|
||||
MLP my_mlp(2, 2, 1, 5, 0.1);
|
||||
my_mlp.Train(training_set, 100);
|
||||
|
||||
for (const auto & training_sample : training_set) {
|
||||
size_t class_id;
|
||||
my_mlp.GetOutputClass(training_sample.input_vector(), &class_id);
|
||||
ASSERT_TRUE(class_id ==
|
||||
std::distance(training_sample.output_vector().begin(),
|
||||
std::max_element(training_sample.output_vector().begin(),
|
||||
training_sample.output_vector().end())));
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnXOR) {
|
||||
std::cout << "Train XOR function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{ { 0, 0 },{ 1,0 } },
|
||||
{ { 0, 1 },{ 0,1 } },
|
||||
{ { 1, 0 },{ 0,1 } },
|
||||
{ { 1, 1 },{ 1,0 } }
|
||||
};
|
||||
|
||||
MLP my_mlp(2, 2, 1, 5, 0.1);
|
||||
my_mlp.Train(training_set, 100);
|
||||
|
||||
for (const auto & training_sample : training_set) {
|
||||
size_t class_id;
|
||||
my_mlp.GetOutputClass(training_sample.input_vector(), &class_id);
|
||||
ASSERT_TRUE(class_id ==
|
||||
std::distance(training_sample.output_vector().begin(),
|
||||
std::max_element(training_sample.output_vector().begin(),
|
||||
training_sample.output_vector().end())));
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnNOT) {
|
||||
std::cout << "Train NOT function with mlp." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{{ 0},{0,1}},
|
||||
{{ 1},{1,1}}
|
||||
};
|
||||
|
||||
MLP my_mlp(1, 2, 1, 5, 0.1);
|
||||
my_mlp.Train(training_set, 100);
|
||||
|
||||
for (const auto & training_sample : training_set) {
|
||||
size_t class_id;
|
||||
my_mlp.GetOutputClass(training_sample.input_vector(), &class_id);
|
||||
ASSERT_TRUE(class_id ==
|
||||
std::distance(training_sample.output_vector().begin(),
|
||||
std::max_element(training_sample.output_vector().begin(),
|
||||
training_sample.output_vector().end())));
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
int main() {
|
||||
microunit::UnitTester::Run();
|
||||
return 0;
|
||||
}
|
||||
89
src/Node.h
89
src/Node.h
@@ -5,6 +5,8 @@
|
||||
#ifndef NODE_H
|
||||
#define NODE_H
|
||||
|
||||
#include "Utils.h"
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <iostream>
|
||||
@@ -14,42 +16,58 @@
|
||||
#include <algorithm>
|
||||
#include <cassert> // for assert()
|
||||
|
||||
#define ZERO_WEIGHT_INITIALIZATION 1
|
||||
#define USE_SIGMOID 1
|
||||
#define CONSTANT_WEIGHT_INITIALIZATION 0
|
||||
|
||||
class Node {
|
||||
public:
|
||||
Node() {
|
||||
m_bias = 0.0;
|
||||
m_num_inputs = 0;
|
||||
m_bias = 0.0;
|
||||
m_weights.clear();
|
||||
};
|
||||
Node(int num_inputs) {
|
||||
Node(int num_inputs,
|
||||
bool use_constant_weight_init = true,
|
||||
double constant_weight_init = 0.5) {
|
||||
m_num_inputs = num_inputs;
|
||||
m_bias = 0.0;
|
||||
m_num_inputs = num_inputs + 1;
|
||||
m_weights.clear();
|
||||
m_weights = std::vector<double>(m_num_inputs);
|
||||
|
||||
//initialize weight vector
|
||||
WeightInitialization(m_num_inputs,
|
||||
use_constant_weight_init,
|
||||
constant_weight_init);
|
||||
};
|
||||
|
||||
~Node() {
|
||||
m_num_inputs = 0;
|
||||
m_bias = 0.0;
|
||||
m_weights.clear();
|
||||
};
|
||||
|
||||
void WeightInitialization(int m_num_inputs,
|
||||
bool use_constant_weight_init = true,
|
||||
double constant_weight_init = 0.5) {
|
||||
//initialize weight vector
|
||||
if (use_constant_weight_init) {
|
||||
m_weights.resize(m_num_inputs, constant_weight_init);
|
||||
} else {
|
||||
m_weights.resize(m_num_inputs);
|
||||
std::generate_n(m_weights.begin(),
|
||||
m_num_inputs,
|
||||
(ZERO_WEIGHT_INITIALIZATION) ?
|
||||
utils::gen_rand(0) : utils::gen_rand());
|
||||
};
|
||||
~Node() {
|
||||
m_weights.clear();
|
||||
//m_old_weights.clear();
|
||||
};
|
||||
utils::gen_rand());
|
||||
}
|
||||
}
|
||||
|
||||
int GetInputSize() const {
|
||||
return m_num_inputs;
|
||||
}
|
||||
|
||||
void SetInputSize(int num_inputs) {
|
||||
m_num_inputs = num_inputs;
|
||||
}
|
||||
|
||||
double GetBias() const {
|
||||
return m_bias;
|
||||
}
|
||||
|
||||
void SetBias(double bias) {
|
||||
m_bias = bias;
|
||||
}
|
||||
@@ -66,7 +84,8 @@ public:
|
||||
return m_weights.size();
|
||||
}
|
||||
|
||||
void GetOutput(const std::vector<double> &input, double * output) const {
|
||||
void GetInputInnerProdWithWeights(const std::vector<double> &input,
|
||||
double * output) const {
|
||||
assert(input.size() == m_weights.size());
|
||||
double inner_prod = std::inner_product(begin(input),
|
||||
end(input),
|
||||
@@ -75,27 +94,37 @@ public:
|
||||
*output = inner_prod;
|
||||
}
|
||||
|
||||
void GetFilteredOutput(const std::vector<double> &input, double * bool_output) {
|
||||
double inner_prod;
|
||||
GetOutput(input, &inner_prod);
|
||||
#if USE_SIGMOID == 1
|
||||
double y = utils::sigmoid(inner_prod);
|
||||
*bool_output = (y > 0) ? true : false;
|
||||
#else
|
||||
*bool_output = (inner_prod > 0) ? true : false;
|
||||
#endif
|
||||
void GetOutputAfterSigmoid(const std::vector<double> &input,
|
||||
double * output) const {
|
||||
double inner_prod = 0.0;
|
||||
GetInputInnerProdWithWeights(input, &inner_prod);
|
||||
*output = utils::sigmoid(inner_prod);
|
||||
}
|
||||
|
||||
void GetBooleanOutput(const std::vector<double> &input,
|
||||
bool * bool_output) const {
|
||||
double value;
|
||||
GetOutputAfterSigmoid(input, &value);
|
||||
*bool_output = (value > 0.5) ? true : false;
|
||||
};
|
||||
|
||||
void UpdateWeights(const std::vector<double> &x,
|
||||
double m_learning_rate,
|
||||
double error) {
|
||||
double error,
|
||||
double learning_rate) {
|
||||
assert(x.size() == m_weights.size());
|
||||
for (size_t i = 0; i < m_weights.size(); i++)
|
||||
m_weights[i] += x[i] * m_learning_rate * error;
|
||||
m_weights[i] += x[i] * learning_rate * error;
|
||||
};
|
||||
|
||||
void UpdateWeight(int weight_id,
|
||||
double increment,
|
||||
double learning_rate) {
|
||||
m_weights[weight_id] += learning_rate*increment;
|
||||
}
|
||||
|
||||
protected:
|
||||
int m_num_inputs;
|
||||
double m_bias;
|
||||
int m_num_inputs{ 0 };
|
||||
double m_bias{ 0.0 };
|
||||
std::vector<double> m_weights;
|
||||
};
|
||||
|
||||
|
||||
262
src/NodeTest.cpp
Normal file
262
src/NodeTest.cpp
Normal file
@@ -0,0 +1,262 @@
|
||||
//============================================================================
|
||||
// Name : NodeTest.cpp
|
||||
// Author : David Nogueira
|
||||
//============================================================================
|
||||
#include "Node.h"
|
||||
#include "Sample.h"
|
||||
#include "Utils.h"
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <iostream>
|
||||
#include <sstream>
|
||||
#include <fstream>
|
||||
#include <vector>
|
||||
#include <algorithm>
|
||||
#include "microunit.h"
|
||||
|
||||
namespace {
|
||||
void Train(Node & node,
|
||||
const std::vector<TrainingSample> &training_sample_set_with_bias,
|
||||
double learning_rate,
|
||||
int max_iterations,
|
||||
bool use_constant_weight_init = true,
|
||||
double constant_weight_init = 0.5) {
|
||||
|
||||
//initialize weight vector
|
||||
node.WeightInitialization(training_sample_set_with_bias[0].GetInputVectorSize(),
|
||||
use_constant_weight_init,
|
||||
constant_weight_init);
|
||||
|
||||
std::cout << "Starting weights:\t";
|
||||
for (auto m_weightselement : node.GetWeights())
|
||||
std::cout << m_weightselement << "\t";
|
||||
std::cout << std::endl;
|
||||
|
||||
for (int i = 0; i < max_iterations; i++) {
|
||||
int error_count = 0;
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
bool prediction;
|
||||
node.GetBooleanOutput(training_sample_with_bias.input_vector(), &prediction);
|
||||
bool correct_output = training_sample_with_bias.output_vector()[0] > 0.5 ? true : false;
|
||||
if (prediction != correct_output) {
|
||||
error_count++;
|
||||
double error = (correct_output ? 1 : 0) - (prediction ? 1 : 0);
|
||||
node.UpdateWeights(training_sample_with_bias.input_vector(),
|
||||
learning_rate,
|
||||
error);
|
||||
}
|
||||
}
|
||||
if (error_count == 0) break;
|
||||
}
|
||||
|
||||
std::cout << "Final weights:\t\t";
|
||||
for (auto m_weightselement : node.GetWeights())
|
||||
std::cout << m_weightselement << "\t";
|
||||
std::cout << std::endl;
|
||||
};
|
||||
}
|
||||
|
||||
UNIT(LearnAND) {
|
||||
std::cout << "Train AND function with Node." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{ { 0, 0 },{0.0} },
|
||||
{ { 0, 1 },{0.0} },
|
||||
{ { 1, 0 },{0.0} },
|
||||
{ { 1, 1 },{1.0} }
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
Node my_node(num_features);
|
||||
Train(my_node, training_sample_set_with_bias, 0.1, 100);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
bool class_id;
|
||||
my_node.GetBooleanOutput(training_sample.input_vector(), &class_id);
|
||||
bool correct_output = training_sample.output_vector()[0] > 0 ? true : false;
|
||||
ASSERT_TRUE(class_id == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnNAND) {
|
||||
std::cout << "Train NAND function with Node." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{ { 0, 0 },{1.0} },
|
||||
{ { 0, 1 },{1.0} },
|
||||
{ { 1, 0 },{1.0} },
|
||||
{ { 1, 1 },{0.0} }
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
Node my_node(num_features);
|
||||
Train(my_node, training_sample_set_with_bias, 0.1, 100);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
bool class_id;
|
||||
my_node.GetBooleanOutput(training_sample.input_vector(), &class_id);
|
||||
bool correct_output = training_sample.output_vector()[0] > 0 ? true : false;
|
||||
ASSERT_TRUE(class_id == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnOR) {
|
||||
std::cout << "Train OR function with Node." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{ { 0, 0 },{0.0} },
|
||||
{ { 0, 1 },{1.0} },
|
||||
{ { 1, 0 },{1.0} },
|
||||
{ { 1, 1 },{1.0} }
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
Node my_node(num_features);
|
||||
Train(my_node, training_sample_set_with_bias, 0.1, 100);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
bool class_id;
|
||||
my_node.GetBooleanOutput(training_sample.input_vector(), &class_id);
|
||||
bool correct_output = training_sample.output_vector()[0] > 0 ? true : false;
|
||||
ASSERT_TRUE(class_id == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
UNIT(LearnNOR) {
|
||||
std::cout << "Train NOR function with Node." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{ { 0, 0 },{1.0} },
|
||||
{ { 0, 1 },{0.0} },
|
||||
{ { 1, 0 },{0.0} },
|
||||
{ { 1, 1 },{0.0} }
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
Node my_node(num_features);
|
||||
Train(my_node, training_sample_set_with_bias, 0.1, 100);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
bool class_id;
|
||||
my_node.GetBooleanOutput(training_sample.input_vector(), &class_id);
|
||||
bool correct_output = training_sample.output_vector()[0] > 0 ? true : false;
|
||||
ASSERT_TRUE(class_id == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnNOT) {
|
||||
std::cout << "Train NOT function with Node." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{ { 0 },{1.0} },
|
||||
{ { 1 },{0.0}}
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
Node my_node(num_features);
|
||||
Train(my_node, training_sample_set_with_bias, 0.1, 100);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
bool class_id;
|
||||
my_node.GetBooleanOutput(training_sample.input_vector(), &class_id);
|
||||
bool correct_output = training_sample.output_vector()[0] > 0 ? true : false;
|
||||
ASSERT_TRUE(class_id == correct_output);
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
UNIT(LearnXOR) {
|
||||
std::cout << "Train XOR function with Node." << std::endl;
|
||||
|
||||
std::vector<TrainingSample> training_set =
|
||||
{
|
||||
{ { 0, 0 },{0.0} },
|
||||
{ { 0, 1 },{1.0} },
|
||||
{ { 1, 0 },{1.0} },
|
||||
{ { 1, 1 },{0.0} }
|
||||
};
|
||||
bool bias_already_in = false;
|
||||
std::vector<TrainingSample> training_sample_set_with_bias(training_set);
|
||||
//set up bias
|
||||
if (!bias_already_in) {
|
||||
for (auto & training_sample_with_bias : training_sample_set_with_bias) {
|
||||
training_sample_with_bias.AddBiasValue(1);
|
||||
}
|
||||
}
|
||||
size_t num_examples = training_sample_set_with_bias.size();
|
||||
size_t num_features = training_sample_set_with_bias[0].GetInputVectorSize();
|
||||
Node my_node(num_features);
|
||||
Train(my_node, training_sample_set_with_bias, 0.1, 100);
|
||||
|
||||
for (const auto & training_sample : training_sample_set_with_bias) {
|
||||
bool class_id;
|
||||
my_node.GetBooleanOutput(training_sample.input_vector(), &class_id);
|
||||
bool correct_output = training_sample.output_vector()[0] > 0 ? true : false;
|
||||
if (class_id != correct_output) {
|
||||
std::cout << "Failed to train. " <<
|
||||
" A simple perceptron cannot learn the XOR function." << std::endl;
|
||||
FAIL();
|
||||
}
|
||||
}
|
||||
std::cout << "Trained with success." << std::endl;
|
||||
std::cout << std::endl;
|
||||
}
|
||||
|
||||
int main() {
|
||||
microunit::UnitTester::Run();
|
||||
return 0;
|
||||
}
|
||||
35
src/Sample.h
35
src/Sample.h
@@ -23,7 +23,21 @@ public:
|
||||
void AddBiasValue(double bias_value) {
|
||||
m_input_vector.insert(m_input_vector.begin(), bias_value);
|
||||
}
|
||||
friend std::ostream & operator<<(std::ostream &stream, Sample const & obj) {
|
||||
obj.PrintMyself(stream);
|
||||
return stream;
|
||||
};
|
||||
protected:
|
||||
virtual void PrintMyself(std::ostream& stream) const {
|
||||
stream << "Input vector: [";
|
||||
for (int i = 0; i < m_input_vector.size(); i++) {
|
||||
if (i != 0)
|
||||
stream << ", ";
|
||||
stream << m_input_vector[i];
|
||||
}
|
||||
stream << "]";
|
||||
}
|
||||
|
||||
std::vector<double> m_input_vector;
|
||||
};
|
||||
|
||||
@@ -41,7 +55,28 @@ public:
|
||||
size_t GetOutputVectorSize() const {
|
||||
return m_output_vector.size();
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual void PrintMyself(std::ostream& stream) const {
|
||||
stream << "Input vector: [";
|
||||
for (int i = 0; i < m_input_vector.size(); i++) {
|
||||
if (i != 0)
|
||||
stream << ", ";
|
||||
stream << m_input_vector[i];
|
||||
}
|
||||
stream << "]";
|
||||
|
||||
stream << "; ";
|
||||
|
||||
stream << "Output vector: [";
|
||||
for (int i = 0; i < m_output_vector.size(); i++) {
|
||||
if (i != 0)
|
||||
stream << ", ";
|
||||
stream << m_output_vector[i];
|
||||
}
|
||||
stream << "]";
|
||||
}
|
||||
|
||||
std::vector<double> m_output_vector;
|
||||
};
|
||||
|
||||
|
||||
30
src/Utils.h
30
src/Utils.h
@@ -5,6 +5,7 @@
|
||||
#ifndef UTILS_H
|
||||
#define UTILS_H
|
||||
|
||||
#include "Chrono.h"
|
||||
#include <stdlib.h>
|
||||
#include <math.h>
|
||||
#include <numeric>
|
||||
@@ -47,7 +48,7 @@ inline double deriv_sigmoid(double x) {
|
||||
return sigmoid(x)*(1 - sigmoid(x));
|
||||
};
|
||||
|
||||
void Softmax(std::vector<double> *output) {
|
||||
inline void Softmax(std::vector<double> *output) {
|
||||
size_t num_elements = output->size();
|
||||
std::vector<double> exp_output(num_elements);
|
||||
double exp_total = 0.0;
|
||||
@@ -60,35 +61,10 @@ void Softmax(std::vector<double> *output) {
|
||||
}
|
||||
}
|
||||
|
||||
void GetIdMaxElement(const std::vector<double> &output, size_t * class_id) {
|
||||
inline void GetIdMaxElement(const std::vector<double> &output, size_t * class_id) {
|
||||
*class_id = std::distance(output.begin(),
|
||||
std::max_element(output.begin(),
|
||||
output.end()));
|
||||
}
|
||||
|
||||
class Chronometer {
|
||||
public:
|
||||
Chronometer() {
|
||||
time_span = std::chrono::steady_clock::duration::zero();
|
||||
};
|
||||
virtual ~Chronometer() {};
|
||||
|
||||
void GetTime() {
|
||||
clock_begin = std::chrono::steady_clock::now();
|
||||
}
|
||||
void StopTime() {
|
||||
std::chrono::steady_clock::time_point clock_end = std::chrono::steady_clock::now();
|
||||
time_span += clock_end - clock_begin;
|
||||
}
|
||||
//Return elapsed time in seconds
|
||||
double GetElapsedTime() {
|
||||
return double(time_span.count()) *
|
||||
std::chrono::steady_clock::period::num / std::chrono::steady_clock::period::den;
|
||||
}
|
||||
protected:
|
||||
std::chrono::steady_clock::time_point clock_begin;
|
||||
std::chrono::steady_clock::duration time_span;
|
||||
};
|
||||
|
||||
}
|
||||
#endif // UTILS_H
|
||||
Reference in New Issue
Block a user