-
-
Notifications
You must be signed in to change notification settings - Fork 27
Expand file tree
/
Copy pathProgram.cs
More file actions
106 lines (95 loc) · 3.94 KB
/
Program.cs
File metadata and controls
106 lines (95 loc) · 3.94 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
using Accord.Neuro;
using Accord.Neuro.ActivationFunctions;
using Accord.Neuro.Learning;
using Accord.Neuro.Networks;
using Accord.Math;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using AForge.Neuro.Learning;
using System.IO;
namespace DeepLearning
{
class Program
{
static void Main(string[] args)
{
double[][] inputs;
double[][] outputs;
double[][] testInputs;
double[][] testOutputs;
// Load ascii digits dataset.
inputs = DataManager.Load(@"../../../data/data.txt", out outputs);
// The first 500 data rows will be for training. The rest will be for testing.
testInputs = inputs.Skip(500).ToArray();
testOutputs = outputs.Skip(500).ToArray();
inputs = inputs.Take(500).ToArray();
outputs = outputs.Take(500).ToArray();
// Setup the deep belief network and initialize with random weights.
DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 10);
new GaussianWeights(network, 0.1).Randomize();
network.UpdateVisibleWeights();
// Setup the learning algorithm.
DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network)
{
Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v)
{
LearningRate = 0.1,
Momentum = 0.5,
Decay = 0.001,
}
};
// Setup batches of input for learning.
int batchCount = Math.Max(1, inputs.Length / 100);
// Create mini-batches to speed learning.
int[] groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount);
double[][][] batches = inputs.Subgroups(groups);
// Learning data for the specified layer.
double[][][] layerData;
// Unsupervised learning on each hidden layer, except for the output layer.
for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++)
{
teacher.LayerIndex = layerIndex;
layerData = teacher.GetLayerInput(batches);
for (int i = 0; i < 200; i++)
{
double error = teacher.RunEpoch(layerData) / inputs.Length;
if (i % 10 == 0)
{
Console.WriteLine(i + ", Error = " + error);
}
}
}
// Supervised learning on entire network, to provide output classification.
var teacher2 = new BackPropagationLearning(network)
{
LearningRate = 0.1,
Momentum = 0.5
};
// Run supervised learning.
for (int i = 0; i < 500; i++)
{
double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length;
if (i % 10 == 0)
{
Console.WriteLine(i + ", Error = " + error);
}
}
// Test the resulting accuracy.
int correct = 0;
for (int i = 0; i < inputs.Length; i++)
{
double[] outputValues = network.Compute(testInputs[i]);
if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i]))
{
correct++;
}
}
Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%");
Console.Write("Press any key to quit ..");
Console.ReadKey();
}
}
}