Skip to content

Commit 9d28974

Browse files
authored
update minimal example
1 parent e271a44 commit 9d28974

1 file changed

Lines changed: 49 additions & 36 deletions

File tree

README.md

Lines changed: 49 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -49,42 +49,55 @@ Tag [v0.2.0](https://github.com/cbovar/ConvNetSharp/tree/v0.2.0) was created jus
4949
Here's a minimum example of defining a **2-layer neural network** and training
5050
it on a single data point:
5151
```c#
52-
// species a 2-layer neural network with one hidden layer of 20 neurons
53-
var net = new Net<double>();
54-
55-
// input layer declares size of input. here: 2-D data
56-
// ConvNetJS works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
57-
// then the first two dimensions (width, height) will always be kept at size 1
58-
net.AddLayer(new InputLayer(1, 1, 2));
59-
60-
// declare 20 neurons
61-
net.AddLayer(new FullyConnLayer(20));
62-
63-
// declare a ReLU (rectified linear unit non-linearity)
64-
net.AddLayer(new ReluLayer());
65-
66-
// declare a fully connected layer that will be used by the softmax layer
67-
net.AddLayer(new FullyConnLayer(10));
68-
69-
// declare the linear classifier on top of the previous hidden layer
70-
net.AddLayer(new SoftmaxLayer(10));
71-
72-
// forward a random data point through the network
73-
var x = new Volume(new[] { 0.3, -0.5 }, new Shape(2));
74-
75-
var prob = net.Forward(x);
76-
77-
// prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
78-
Console.WriteLine("probability that x is class 0: " + prob.Get(0)); // prints e.g. 0.50101
79-
80-
var trainer = new SgdTrainer(net) { LearningRate = 0.01, L2Decay = 0.001 };
81-
trainer.Train(x, new Volume(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(1, 1, 10, 1))); // train the network, specifying that x is class zero
82-
83-
var prob2 = net.Forward(x);
84-
Console.WriteLine("probability that x is class 0: " + prob2.Get(0));
85-
// now prints 0.50374, slightly higher than previous 0.50101: the networks
86-
// weights have been adjusted by the Trainer to give a higher probability to
87-
// the class we trained the network with (zero)
52+
using ConvNetSharp.Core.Training.Double;
53+
using ConvNetSharp.Volume;
54+
using ConvNetSharp.Volume.Double;
55+
56+
namespace MinimalExample
57+
{
58+
internal class Program
59+
{
60+
private static void Main()
61+
{
62+
// species a 2-layer neural network with one hidden layer of 20 neurons
63+
var net = new Net<double>();
64+
65+
// input layer declares size of input. here: 2-D data
66+
// ConvNetJS works on 3-Dimensional volumes (width, height, depth), but if you're not dealing with images
67+
// then the first two dimensions (width, height) will always be kept at size 1
68+
net.AddLayer(new InputLayer(1, 1, 2));
69+
70+
// declare 20 neurons
71+
net.AddLayer(new FullyConnLayer(20));
72+
73+
// declare a ReLU (rectified linear unit non-linearity)
74+
net.AddLayer(new ReluLayer());
75+
76+
// declare a fully connected layer that will be used by the softmax layer
77+
net.AddLayer(new FullyConnLayer(10));
78+
79+
// declare the linear classifier on top of the previous hidden layer
80+
net.AddLayer(new SoftmaxLayer(10));
81+
82+
// forward a random data point through the network
83+
var x = BuilderInstance.Volume.From(new[] { 0.3, -0.5 }, new Shape(2));
84+
85+
var prob = net.Forward(x);
86+
87+
// prob is a Volume. Volumes have a property Weights that stores the raw data, and WeightGradients that stores gradients
88+
Console.WriteLine("probability that x is class 0: " + prob.Get(0)); // prints e.g. 0.50101
89+
90+
var trainer = new SgdTrainer(net) { LearningRate = 0.01, L2Decay = 0.001 };
91+
trainer.Train(x, BuilderInstance.Volume.From(new[] { 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0 }, new Shape(1, 1, 10, 1))); // train the network, specifying that x is class zero
92+
93+
var prob2 = net.Forward(x);
94+
Console.WriteLine("probability that x is class 0: " + prob2.Get(0));
95+
// now prints 0.50374, slightly higher than previous 0.50101: the networks
96+
// weights have been adjusted by the Trainer to give a higher probability to
97+
// the class we trained the network with (zero)
98+
}
99+
}
100+
}
88101
```
89102

90103
## Fluent API (see [FluentMnistDemo](https://github.com/cbovar/ConvNetSharp/tree/master/Examples/FluentMnistDemo))

0 commit comments

Comments
 (0)