You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
trainer.Train(x, newVolume(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, newShape(1, 1, 10, 1))); // train the network, specifying that x is class zero
82
-
83
-
varprob2=net.Forward(x);
84
-
Console.WriteLine("probability that x is class 0: "+prob2.Get(0));
85
-
// now prints 0.50374, slightly higher than previous 0.50101: the networks
86
-
// weights have been adjusted by the Trainer to give a higher probability to
87
-
// the class we trained the network with (zero)
52
+
usingConvNetSharp.Core.Training.Double;
53
+
usingConvNetSharp.Volume;
54
+
usingConvNetSharp.Volume.Double;
55
+
56
+
namespaceMinimalExample
57
+
{
58
+
internalclassProgram
59
+
{
60
+
privatestaticvoidMain()
61
+
{
62
+
// species a 2-layer neural network with one hidden layer of 20 neurons
63
+
varnet=newNet<double>();
64
+
65
+
// input layer declares size of input. here: 2-D data
66
+
// ConvNetJS works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
67
+
// then the first two dimensions (width, height) will always be kept at size 1
68
+
net.AddLayer(newInputLayer(1, 1, 2));
69
+
70
+
// declare 20 neurons
71
+
net.AddLayer(newFullyConnLayer(20));
72
+
73
+
// declare a ReLU (rectified linear unit non-linearity)
74
+
net.AddLayer(newReluLayer());
75
+
76
+
// declare a fully connected layer that will be used by the softmax layer
77
+
net.AddLayer(newFullyConnLayer(10));
78
+
79
+
// declare the linear classifier on top of the previous hidden layer
80
+
net.AddLayer(newSoftmaxLayer(10));
81
+
82
+
// forward a random data point through the network
trainer.Train(x, BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, newShape(1, 1, 10, 1))); // train the network, specifying that x is class zero
92
+
93
+
varprob2=net.Forward(x);
94
+
Console.WriteLine("probability that x is class 0: "+prob2.Get(0));
95
+
// now prints 0.50374, slightly higher than previous 0.50101: the networks
96
+
// weights have been adjusted by the Trainer to give a higher probability to
97
+
// the class we trained the network with (zero)
98
+
}
99
+
}
100
+
}
88
101
```
89
102
90
103
## Fluent API (see [FluentMnistDemo](https://github.com/cbovar/ConvNetSharp/tree/master/Examples/FluentMnistDemo))
0 commit comments