Skip to content

Commit a8dae8d

Browse files
authored
Merge pull request #119 from cbovar/Simplify_shape
Simplify shape
2 parents 7dcdab8 + 2634de7 commit a8dae8d

46 files changed

Lines changed: 731 additions & 656 deletions

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

src/ConvNetSharp.Core.Tests/FullyConnLayerTests.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,10 @@ public void Forward()
4949
layer.Init(inputWidth, inputHeight, inputDepth);
5050

5151
// Make sure filter shape had flatten input shape
52-
Assert.AreEqual(1, layer.Filters.Shape.GetDimension(0));
53-
Assert.AreEqual(1, layer.Filters.Shape.GetDimension(1));
54-
Assert.AreEqual(8, layer.Filters.Shape.GetDimension(2));
55-
Assert.AreEqual(2, layer.Filters.Shape.GetDimension(3));
52+
Assert.AreEqual(1, layer.Filters.Shape.Dimensions[0]);
53+
Assert.AreEqual(1, layer.Filters.Shape.Dimensions[1]);
54+
Assert.AreEqual(8, layer.Filters.Shape.Dimensions[2]);
55+
Assert.AreEqual(2, layer.Filters.Shape.Dimensions[3]);
5656

5757
for (var i = 0; i < 8; i++)
5858
{

src/ConvNetSharp.Core.Tests/PoolLayerTests.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ public void Forward()
2929
var input = BuilderInstance.Volume.From(data, new Shape(inputWidth, inputHeight, inputDepth, inputBatchSize));
3030
layer.DoForward(input);
3131

32-
Assert.AreEqual(2, layer.OutputActivation.Shape.GetDimension(0));
33-
Assert.AreEqual(2, layer.OutputActivation.Shape.GetDimension(1));
34-
Assert.AreEqual(4, layer.OutputActivation.Shape.GetDimension(2));
35-
Assert.AreEqual(4, layer.OutputActivation.Shape.GetDimension(3));
32+
Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[0]);
33+
Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[1]);
34+
Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[2]);
35+
Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[3]);
3636

3737
Assert.AreEqual(5.0, layer.OutputActivation.Get(0,0,0,0));
3838
Assert.AreEqual(21.0, layer.OutputActivation.Get(0, 0, 1, 0));

src/ConvNetSharp.Core.Tests/SoftMaxLayerTests.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,10 @@ public SoftmaxLayerTests()
3030
public void OutputIsNormalized()
3131
{
3232
var output = this.layer.DoForward(input, true);
33-
Assert.AreEqual(1, output.Shape.GetDimension(0));
34-
Assert.AreEqual(1, output.Shape.GetDimension(1));
35-
Assert.AreEqual(4, output.Shape.GetDimension(2));
36-
Assert.AreEqual(3, output.Shape.GetDimension(3));
33+
Assert.AreEqual(1, output.Shape.Dimensions[0]);
34+
Assert.AreEqual(1, output.Shape.Dimensions[1]);
35+
Assert.AreEqual(4, output.Shape.Dimensions[2]);
36+
Assert.AreEqual(3, output.Shape.Dimensions[3]);
3737

3838
var values = output.ToArray();
3939
Assert.AreEqual(0.25, values[0]);

src/ConvNetSharp.Core/Fluent/FluentNet.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,8 @@ public int[] GetPrediction()
9292
}
9393

9494
var activation = softmaxLayer.OutputActivation;
95-
var N = activation.Shape.GetDimension(3);
96-
var C = activation.Shape.GetDimension(2);
95+
var N = activation.Shape.Dimensions[3];
96+
var C = activation.Shape.Dimensions[2];
9797
var result = new int[N];
9898

9999
for (var n = 0; n < N; n++)

src/ConvNetSharp.Core/Layers/FullyConnLayer.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,8 @@ public override void Backward(Volume<T> outputGradient)
6363
this.OutputActivationGradients = outputGradient;
6464

6565
// compute gradient wrt weights and data
66-
using (var reshapedInput = this.InputActivation.ReShape(1, 1, -1, this.InputActivation.Shape.GetDimension(3)))
67-
using (var reshapedInputGradients = this.InputActivationGradients.ReShape(1, 1, -1, this.InputActivationGradients.Shape.GetDimension(3)))
66+
using (var reshapedInput = this.InputActivation.ReShape(1, 1, -1, this.InputActivation.Shape.Dimensions[3]))
67+
using (var reshapedInputGradients = this.InputActivationGradients.ReShape(1, 1, -1, this.InputActivationGradients.Shape.Dimensions[3]))
6868
{
6969
reshapedInput.ConvolveGradient(
7070
this.Filters, this.OutputActivationGradients,
@@ -77,7 +77,7 @@ public override void Backward(Volume<T> outputGradient)
7777

7878
protected override Volume<T> Forward(Volume<T> input, bool isTraining = false)
7979
{
80-
using (var reshapedInput = input.ReShape(1, 1, -1, input.Shape.GetDimension(3)))
80+
using (var reshapedInput = input.ReShape(1, 1, -1, input.Shape.Dimensions[3]))
8181
{
8282
reshapedInput.DoConvolution(this.Filters, 0, 1, this.OutputActivation);
8383
this.OutputActivation.DoAdd(this.Bias, this.OutputActivation);

src/ConvNetSharp.Core/Layers/LayerBase.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public virtual Volume<T> DoForward(Volume<T> input, bool isTraining = false)
6767

6868
this.InputActivation = input;
6969

70-
var outputShape = new Shape(this.OutputWidth, this.OutputHeight, this.OutputDepth, input.Shape.DimensionCount == 4 ? input.Shape.GetDimension(3) : 1);
70+
var outputShape = new Shape(this.OutputWidth, this.OutputHeight, this.OutputDepth, input.Shape.Dimensions[3]);
7171

7272
if (this.OutputActivation == null ||
7373
!this.OutputActivation.Shape.Equals(outputShape))

src/ConvNetSharp.Core/Layers/RegressionLayer.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ public override void Backward(Volume<T> outputGradient)
4040
public override void Backward(Volume<T> y, out T loss)
4141
{
4242
var reshape = y.ReShape(new Shape(1, 1, -1, Shape.Keep));
43-
var dy = this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions.ToArray());
43+
var dy = this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions);
4444
reshape.DoSubtractFrom(this.OutputActivation, dy);
4545

4646
if (this._result == null)
@@ -54,7 +54,7 @@ public override void Backward(Volume<T> y, out T loss)
5454
var half = (T)Convert.ChangeType(0.5, typeof(T));
5555
this._result.DoMultiply(this._result, half); // dy * dy * 0.5
5656
this._result.DoSum(this._sum); // sum over all batch
57-
var batchSize = y.Shape.GetDimension(3);
57+
var batchSize = y.Shape.Dimensions[3];
5858
loss = Ops<T>.Divide(this._sum.Get(0), Ops<T>.Cast(batchSize)); // average
5959
}
6060

src/ConvNetSharp.Core/Layers/SoftMaxLayer.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,17 +26,17 @@ public SoftmaxLayer(int classCount)
2626
public override void Backward(Volume<T> y, out T loss)
2727
{
2828
// input gradient = pi - yi
29-
y.DoSubtractFrom(this.OutputActivation, this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions.ToArray()));
29+
y.DoSubtractFrom(this.OutputActivation, this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions));
3030

3131
//loss is the class negative log likelihood
3232
loss = Ops<T>.Zero;
33-
for (var n = 0; n < y.Shape.GetDimension(3); n++)
33+
for (var n = 0; n < y.Shape.Dimensions[3]; n++)
3434
{
35-
for (var d = 0; d < y.Shape.GetDimension(2); d++)
35+
for (var d = 0; d < y.Shape.Dimensions[2]; d++)
3636
{
37-
for (var h = 0; h < y.Shape.GetDimension(1); h++)
37+
for (var h = 0; h < y.Shape.Dimensions[1]; h++)
3838
{
39-
for (var w = 0; w < y.Shape.GetDimension(0); w++)
39+
for (var w = 0; w < y.Shape.Dimensions[0]; w++)
4040
{
4141
var expected = y.Get(w, h, d, n);
4242
var actual = this.OutputActivation.Get(w, h, d, n);

src/ConvNetSharp.Core/Net.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,8 @@ public int[] GetPrediction()
6868
}
6969

7070
var activation = softmaxLayer.OutputActivation;
71-
var N = activation.Shape.GetDimension(3);
72-
var C = activation.Shape.GetDimension(2);
71+
var N = activation.Shape.Dimensions[3];
72+
var C = activation.Shape.Dimensions[2];
7373
var result = new int[N];
7474

7575
for (var n = 0; n < N; n++)

src/ConvNetSharp.Core/Training/TrainerBase.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,15 @@ protected virtual void Backward(Volume<T> y)
2727
{
2828
var chrono = Stopwatch.StartNew();
2929

30-
var batchSize = y.Shape.GetDimension(3);
30+
var batchSize = y.Shape.Dimensions[3];
3131
this.Loss = Ops<T>.Divide(this.Net.Backward(y), Ops<T>.Cast(batchSize));
3232
this.BackwardTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;
3333
}
3434

3535
private void Forward(Volume<T> x)
3636
{
3737
var chrono = Stopwatch.StartNew();
38-
var batchSize = x.Shape.GetDimension(3);
38+
var batchSize = x.Shape.Dimensions[3];
3939
this.Net.Forward(x, true); // also set the flag that lets the net know we're just training
4040
this.ForwardTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;
4141
}
@@ -46,7 +46,7 @@ public virtual void Train(Volume<T> x, Volume<T> y)
4646

4747
Backward(y);
4848

49-
var batchSize = x.Shape.GetDimension(3);
49+
var batchSize = x.Shape.Dimensions[3];
5050
var chrono = Stopwatch.StartNew();
5151
TrainImplem();
5252
this.UpdateWeightsTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;

0 commit comments

Comments
 (0)