Skip to content

Commit a8dae8d

Browse files
authored
Merge pull request #119 from cbovar/Simplify_shape
Simplify shape
2 parents 7dcdab8 + 2634de7 commit a8dae8d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

46 files changed

+731
-656
lines changed

src/ConvNetSharp.Core.Tests/FullyConnLayerTests.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -49,10 +49,10 @@ public void Forward()
4949
layer.Init(inputWidth, inputHeight, inputDepth);
5050

5151
// Make sure filter shape had flatten input shape
52-
Assert.AreEqual(1, layer.Filters.Shape.GetDimension(0));
53-
Assert.AreEqual(1, layer.Filters.Shape.GetDimension(1));
54-
Assert.AreEqual(8, layer.Filters.Shape.GetDimension(2));
55-
Assert.AreEqual(2, layer.Filters.Shape.GetDimension(3));
52+
Assert.AreEqual(1, layer.Filters.Shape.Dimensions[0]);
53+
Assert.AreEqual(1, layer.Filters.Shape.Dimensions[1]);
54+
Assert.AreEqual(8, layer.Filters.Shape.Dimensions[2]);
55+
Assert.AreEqual(2, layer.Filters.Shape.Dimensions[3]);
5656

5757
for (var i = 0; i < 8; i++)
5858
{

src/ConvNetSharp.Core.Tests/PoolLayerTests.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@ public void Forward()
2929
var input = BuilderInstance.Volume.From(data, new Shape(inputWidth, inputHeight, inputDepth, inputBatchSize));
3030
layer.DoForward(input);
3131

32-
Assert.AreEqual(2, layer.OutputActivation.Shape.GetDimension(0));
33-
Assert.AreEqual(2, layer.OutputActivation.Shape.GetDimension(1));
34-
Assert.AreEqual(4, layer.OutputActivation.Shape.GetDimension(2));
35-
Assert.AreEqual(4, layer.OutputActivation.Shape.GetDimension(3));
32+
Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[0]);
33+
Assert.AreEqual(2, layer.OutputActivation.Shape.Dimensions[1]);
34+
Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[2]);
35+
Assert.AreEqual(4, layer.OutputActivation.Shape.Dimensions[3]);
3636

3737
Assert.AreEqual(5.0, layer.OutputActivation.Get(0,0,0,0));
3838
Assert.AreEqual(21.0, layer.OutputActivation.Get(0, 0, 1, 0));

src/ConvNetSharp.Core.Tests/SoftMaxLayerTests.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,10 @@ public SoftmaxLayerTests()
3030
public void OutputIsNormalized()
3131
{
3232
var output = this.layer.DoForward(input, true);
33-
Assert.AreEqual(1, output.Shape.GetDimension(0));
34-
Assert.AreEqual(1, output.Shape.GetDimension(1));
35-
Assert.AreEqual(4, output.Shape.GetDimension(2));
36-
Assert.AreEqual(3, output.Shape.GetDimension(3));
33+
Assert.AreEqual(1, output.Shape.Dimensions[0]);
34+
Assert.AreEqual(1, output.Shape.Dimensions[1]);
35+
Assert.AreEqual(4, output.Shape.Dimensions[2]);
36+
Assert.AreEqual(3, output.Shape.Dimensions[3]);
3737

3838
var values = output.ToArray();
3939
Assert.AreEqual(0.25, values[0]);

src/ConvNetSharp.Core/Fluent/FluentNet.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -92,8 +92,8 @@ public int[] GetPrediction()
9292
}
9393

9494
var activation = softmaxLayer.OutputActivation;
95-
var N = activation.Shape.GetDimension(3);
96-
var C = activation.Shape.GetDimension(2);
95+
var N = activation.Shape.Dimensions[3];
96+
var C = activation.Shape.Dimensions[2];
9797
var result = new int[N];
9898

9999
for (var n = 0; n < N; n++)

src/ConvNetSharp.Core/Layers/FullyConnLayer.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -63,8 +63,8 @@ public override void Backward(Volume<T> outputGradient)
6363
this.OutputActivationGradients = outputGradient;
6464

6565
// compute gradient wrt weights and data
66-
using (var reshapedInput = this.InputActivation.ReShape(1, 1, -1, this.InputActivation.Shape.GetDimension(3)))
67-
using (var reshapedInputGradients = this.InputActivationGradients.ReShape(1, 1, -1, this.InputActivationGradients.Shape.GetDimension(3)))
66+
using (var reshapedInput = this.InputActivation.ReShape(1, 1, -1, this.InputActivation.Shape.Dimensions[3]))
67+
using (var reshapedInputGradients = this.InputActivationGradients.ReShape(1, 1, -1, this.InputActivationGradients.Shape.Dimensions[3]))
6868
{
6969
reshapedInput.ConvolveGradient(
7070
this.Filters, this.OutputActivationGradients,
@@ -77,7 +77,7 @@ public override void Backward(Volume<T> outputGradient)
7777

7878
protected override Volume<T> Forward(Volume<T> input, bool isTraining = false)
7979
{
80-
using (var reshapedInput = input.ReShape(1, 1, -1, input.Shape.GetDimension(3)))
80+
using (var reshapedInput = input.ReShape(1, 1, -1, input.Shape.Dimensions[3]))
8181
{
8282
reshapedInput.DoConvolution(this.Filters, 0, 1, this.OutputActivation);
8383
this.OutputActivation.DoAdd(this.Bias, this.OutputActivation);

src/ConvNetSharp.Core/Layers/LayerBase.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,7 @@ public virtual Volume<T> DoForward(Volume<T> input, bool isTraining = false)
6767

6868
this.InputActivation = input;
6969

70-
var outputShape = new Shape(this.OutputWidth, this.OutputHeight, this.OutputDepth, input.Shape.DimensionCount == 4 ? input.Shape.GetDimension(3) : 1);
70+
var outputShape = new Shape(this.OutputWidth, this.OutputHeight, this.OutputDepth, input.Shape.Dimensions[3]);
7171

7272
if (this.OutputActivation == null ||
7373
!this.OutputActivation.Shape.Equals(outputShape))

src/ConvNetSharp.Core/Layers/RegressionLayer.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ public override void Backward(Volume<T> outputGradient)
4040
public override void Backward(Volume<T> y, out T loss)
4141
{
4242
var reshape = y.ReShape(new Shape(1, 1, -1, Shape.Keep));
43-
var dy = this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions.ToArray());
43+
var dy = this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions);
4444
reshape.DoSubtractFrom(this.OutputActivation, dy);
4545

4646
if (this._result == null)
@@ -54,7 +54,7 @@ public override void Backward(Volume<T> y, out T loss)
5454
var half = (T)Convert.ChangeType(0.5, typeof(T));
5555
this._result.DoMultiply(this._result, half); // dy * dy * 0.5
5656
this._result.DoSum(this._sum); // sum over all batch
57-
var batchSize = y.Shape.GetDimension(3);
57+
var batchSize = y.Shape.Dimensions[3];
5858
loss = Ops<T>.Divide(this._sum.Get(0), Ops<T>.Cast(batchSize)); // average
5959
}
6060

src/ConvNetSharp.Core/Layers/SoftMaxLayer.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -26,17 +26,17 @@ public SoftmaxLayer(int classCount)
2626
public override void Backward(Volume<T> y, out T loss)
2727
{
2828
// input gradient = pi - yi
29-
y.DoSubtractFrom(this.OutputActivation, this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions.ToArray()));
29+
y.DoSubtractFrom(this.OutputActivation, this.InputActivationGradients.ReShape(this.OutputActivation.Shape.Dimensions));
3030

3131
//loss is the class negative log likelihood
3232
loss = Ops<T>.Zero;
33-
for (var n = 0; n < y.Shape.GetDimension(3); n++)
33+
for (var n = 0; n < y.Shape.Dimensions[3]; n++)
3434
{
35-
for (var d = 0; d < y.Shape.GetDimension(2); d++)
35+
for (var d = 0; d < y.Shape.Dimensions[2]; d++)
3636
{
37-
for (var h = 0; h < y.Shape.GetDimension(1); h++)
37+
for (var h = 0; h < y.Shape.Dimensions[1]; h++)
3838
{
39-
for (var w = 0; w < y.Shape.GetDimension(0); w++)
39+
for (var w = 0; w < y.Shape.Dimensions[0]; w++)
4040
{
4141
var expected = y.Get(w, h, d, n);
4242
var actual = this.OutputActivation.Get(w, h, d, n);

src/ConvNetSharp.Core/Net.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,8 +68,8 @@ public int[] GetPrediction()
6868
}
6969

7070
var activation = softmaxLayer.OutputActivation;
71-
var N = activation.Shape.GetDimension(3);
72-
var C = activation.Shape.GetDimension(2);
71+
var N = activation.Shape.Dimensions[3];
72+
var C = activation.Shape.Dimensions[2];
7373
var result = new int[N];
7474

7575
for (var n = 0; n < N; n++)

src/ConvNetSharp.Core/Training/TrainerBase.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,15 +27,15 @@ protected virtual void Backward(Volume<T> y)
2727
{
2828
var chrono = Stopwatch.StartNew();
2929

30-
var batchSize = y.Shape.GetDimension(3);
30+
var batchSize = y.Shape.Dimensions[3];
3131
this.Loss = Ops<T>.Divide(this.Net.Backward(y), Ops<T>.Cast(batchSize));
3232
this.BackwardTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;
3333
}
3434

3535
private void Forward(Volume<T> x)
3636
{
3737
var chrono = Stopwatch.StartNew();
38-
var batchSize = x.Shape.GetDimension(3);
38+
var batchSize = x.Shape.Dimensions[3];
3939
this.Net.Forward(x, true); // also set the flag that lets the net know we're just training
4040
this.ForwardTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;
4141
}
@@ -46,7 +46,7 @@ public virtual void Train(Volume<T> x, Volume<T> y)
4646

4747
Backward(y);
4848

49-
var batchSize = x.Shape.GetDimension(3);
49+
var batchSize = x.Shape.Dimensions[3];
5050
var chrono = Stopwatch.StartNew();
5151
TrainImplem();
5252
this.UpdateWeightsTimeMs = chrono.Elapsed.TotalMilliseconds/batchSize;

src/ConvNetSharp.Flow.Tests/OpTests.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -334,7 +334,7 @@ public void ReshapeDerivate()
334334
result = session.Run(diff,
335335
new Dictionary<string, Volume<T>>
336336
{
337-
{"x", NewVolume(new[] {1.0, 2.0, 3.0, 4.0}, Volume.Shape.From(4))},
337+
{"x", NewVolume(new[] {1.0, 2.0, 3.0, 4.0}, Volume.Shape.From(4, 1, 1, 1))},
338338
{"grad", NewVolume(new[] {1.0, 1.0, 1.0, 1.0}, Volume.Shape.From(1, 1, 4, 1))}
339339
});
340340
Assert.AreEqual(new Shape(4, 1, 1, 1), result.Shape);

src/ConvNetSharp.Flow/Net.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -62,8 +62,8 @@ public List<ParametersAndGradients<T>> GetParametersAndGradients()
6262
public int[] GetPrediction()
6363
{
6464
var activation = this.Op.Evaluate(this.Session);
65-
var N = activation.Shape.GetDimension(3);
66-
var C = activation.Shape.GetDimension(2);
65+
var N = activation.Shape.Dimensions[3];
66+
var C = activation.Shape.Dimensions[2];
6767
var result = new int[N];
6868

6969
for (var n = 0; n < N; n++)

src/ConvNetSharp.Flow/Ops/Concat.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -44,9 +44,9 @@ public override Volume<T> Evaluate(Session<T> session)
4444
var left = this.Parents[0].Evaluate(session);
4545
var right = this.Parents[1].Evaluate(session);
4646

47-
var batchSize = Math.Max(left.Shape.GetDimension(3), right.Shape.GetDimension(3));
47+
var batchSize = Math.Max(left.Shape.Dimensions[3], right.Shape.Dimensions[3]);
4848

49-
int totalLength = (int)(left.Shape.TotalLength / left.Shape.GetDimension(3) + right.Shape.TotalLength / right.Shape.GetDimension(3));
49+
int totalLength = (int)(left.Shape.TotalLength / left.Shape.Dimensions[3] + right.Shape.TotalLength / right.Shape.Dimensions[3]);
5050
if (this.Result == null || this.lastTotalLength != totalLength)
5151
{
5252
this.Result?.Dispose();

src/ConvNetSharp.Flow/Ops/Const.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -88,10 +88,10 @@ public override Dictionary<string, object> GetData()
8888

8989
if (this.OutputShape != null)
9090
{
91-
data["dim0"] = this.OutputShape.GetDimension(0);
92-
data["dim1"] = this.OutputShape.GetDimension(1);
93-
data["dim2"] = this.OutputShape.GetDimension(2);
94-
data["dim3"] = this.OutputShape.GetDimension(3);
91+
data["dim0"] = this.OutputShape.Dimensions[0];
92+
data["dim1"] = this.OutputShape.Dimensions[1];
93+
data["dim2"] = this.OutputShape.Dimensions[2];
94+
data["dim3"] = this.OutputShape.Dimensions[3];
9595
}
9696

9797
return data;

src/ConvNetSharp.Flow/Ops/Convolution.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,19 +91,19 @@ public override Volume<T> Evaluate(Session<T> session)
9191

9292
if (this.Parents[1].Result == null)
9393
{
94-
var count = this.Width * this.Height * x.Shape.GetDimension(2);
94+
var count = this.Width * this.Height * x.Shape.Dimensions[2];
9595
var scale = Math.Sqrt(2.0 / count);
9696

97-
var filterShape = new Shape(this.Width, this.Height, x.Shape.GetDimension(2), this.FilterCount);
97+
var filterShape = new Shape(this.Width, this.Height, x.Shape.Dimensions[2], this.FilterCount);
9898
this.Parents[1].Result = BuilderInstance<T>.Volume.Random(filterShape, 0.0, scale);
9999
}
100100

101101
var outputDepth = this.FilterCount;
102-
var outputWidth = (int)Math.Floor((x.Shape.GetDimension(0) + this.Pad * 2 - this.Width) / (double)this.Stride + 1);
103-
var outputHeight = (int)Math.Floor((x.Shape.GetDimension(1) + this.Pad * 2 - this.Height) / (double)this.Stride + 1);
102+
var outputWidth = (int)Math.Floor((x.Shape.Dimensions[0] + this.Pad * 2 - this.Width) / (double)this.Stride + 1);
103+
var outputHeight = (int)Math.Floor((x.Shape.Dimensions[1] + this.Pad * 2 - this.Height) / (double)this.Stride + 1);
104104

105105
this.Result?.Dispose();
106-
this.Result = BuilderInstance<T>.Volume.SameAs(new Shape(outputWidth, outputHeight, outputDepth, x.Shape.GetDimension(3)));
106+
this.Result = BuilderInstance<T>.Volume.SameAs(new Shape(outputWidth, outputHeight, outputDepth, x.Shape.Dimensions[3]));
107107
}
108108

109109
x.DoConvolution(this.Parents[1].Evaluate(session), this.Pad, this.Stride, this.Result);

src/ConvNetSharp.Flow/Ops/Extract.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ public override Volume<T> Evaluate(Session<T> session)
3838
var length = (int)Convert.ChangeType(this.Parents[1].Evaluate(session).Get(0), typeof(int)); // TODO: Find a way to keep this on host
3939
var offset = (int)Convert.ChangeType(this.Parents[2].Evaluate(session).Get(0), typeof(int)); // TODO: Find a way to keep this on host
4040

41-
var batchSize = x.Shape.GetDimension(3);
41+
var batchSize = x.Shape.Dimensions[3];
4242

4343
int totalLength = length * batchSize;
4444
if (this.Result == null || this.lastTotalLength != totalLength)

src/ConvNetSharp.Flow/Ops/Max.cs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ public override Volume<T> Evaluate(Session<T> session)
4141
this.IsDirty = false;
4242

4343
var x = this.Parents[0].Evaluate(session);
44-
var reshape = x.ReShape(-1, x.Shape.GetDimension(-1));
44+
var reshape = x.ReShape(-1, x.Shape.Dimensions[3]);
4545
var targetShape = new Shape(reshape.Shape);
4646
targetShape.SetDimension(0, 1);
4747

src/ConvNetSharp.Flow/Ops/Pool.cs

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -67,10 +67,10 @@ public override Volume<T> Evaluate(Session<T> session)
6767
this._lastInputShape = new Shape(x.Shape);
6868

6969
var outputShape = new Shape(
70-
(int) Math.Floor((x.Shape.GetDimension(0) + this.HorizontalPad * 2 - this.Width) / (double) this.HorizontalStride + 1),
71-
(int) Math.Floor((x.Shape.GetDimension(1) + this.VerticalPad * 2 - this.Height) / (double) this.VerticalStride + 1),
72-
x.Shape.GetDimension(2),
73-
x.Shape.GetDimension(3)
70+
(int) Math.Floor((x.Shape.Dimensions[0] + this.HorizontalPad * 2 - this.Width) / (double) this.HorizontalStride + 1),
71+
(int) Math.Floor((x.Shape.Dimensions[1] + this.VerticalPad * 2 - this.Height) / (double) this.VerticalStride + 1),
72+
x.Shape.Dimensions[2],
73+
x.Shape.Dimensions[3]
7474
);
7575

7676
this.Result?.Dispose();

src/ConvNetSharp.Flow/Ops/Reshape.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,7 @@ public override Volume<T> Evaluate(Session<T> session)
6666
var shape = this.Parents[1].Evaluate(session);
6767
var s = new[] { shape.Get(0), shape.Get(1), shape.Get(2), shape.Get(3) };
6868
var t = s.Select(o => Convert.ToInt32(o)).ToArray();
69-
this._tempShape = new Shape(t);
69+
this._tempShape = new Shape(t[0], t[1], t[2], t[3]);
7070
this._lastBatchSize = session.BatchSize;
7171
}
7272

@@ -82,10 +82,10 @@ public override Dictionary<string, object> GetData()
8282

8383
if (this.OutputShape != null)
8484
{
85-
data["dim0"] = this.OutputShape.GetDimension(0);
86-
data["dim1"] = this.OutputShape.GetDimension(1);
87-
data["dim2"] = this.OutputShape.GetDimension(2);
88-
data["dim3"] = this.OutputShape.GetDimension(3);
85+
data["dim0"] = this.OutputShape.Dimensions[0];
86+
data["dim1"] = this.OutputShape.Dimensions[1];
87+
data["dim2"] = this.OutputShape.Dimensions[2];
88+
data["dim3"] = this.OutputShape.Dimensions[3];
8989
}
9090

9191
return data;

src/ConvNetSharp.Flow/Ops/Shape.cs

Lines changed: 15 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,16 @@
55

66
namespace ConvNetSharp.Flow.Ops
77
{
8+
/// <summary>
9+
/// Describes the shape of the data.
10+
/// Shape always has 4 dimensons: [width, height, class, batch size]
11+
///
12+
/// e.g. A 1D array fits in a volume that has a shape of [1,1,n,1]
13+
/// A 2D array fits in a volume that has a shape of [w,h,1,1]
14+
/// A 2D array with 3 channels (a color image for example) fits in a volume that has a shape of [w,h,3,1]
15+
/// 10 2D arrays (e.g. 10 grayscale images) fits in a volume that a shape of [w,h,1,10]
16+
/// </summary>
17+
/// <typeparam name="T">type of data (double or float)</typeparam>
818
public class Shape<T> : Op<T> where T : struct, IEquatable<T>, IFormattable
919
{
1020
private readonly VolumeBuilder<T> _builder;
@@ -53,14 +63,14 @@ public override Volume<T> Evaluate(Session<T> session)
5363

5464
if (this.Index == -1)
5565
{
56-
this.Result.Set(0, Ops<T>.Cast(y.Shape.GetDimension(0)));
57-
this.Result.Set(1, Ops<T>.Cast(y.Shape.GetDimension(1)));
58-
this.Result.Set(2, Ops<T>.Cast(y.Shape.GetDimension(2)));
59-
this.Result.Set(3, Ops<T>.Cast(y.Shape.GetDimension(3)));
66+
this.Result.Set(0, Ops<T>.Cast(y.Shape.Dimensions[0]));
67+
this.Result.Set(1, Ops<T>.Cast(y.Shape.Dimensions[1]));
68+
this.Result.Set(2, Ops<T>.Cast(y.Shape.Dimensions[2]));
69+
this.Result.Set(3, Ops<T>.Cast(y.Shape.Dimensions[3]));
6070
}
6171
else
6272
{
63-
this.Result.Set(0, Ops<T>.Cast(y.Shape.GetDimension(this.Index)));
73+
this.Result.Set(0, Ops<T>.Cast(y.Shape.Dimensions[this.Index]));
6474
}
6575

6676
return base.Evaluate(session);

src/ConvNetSharp.Flow/Ops/SoftMaxCrossEntropy.cs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -44,13 +44,13 @@ public override Volume<T> Evaluate(Session<T> session)
4444
var outputActivation = this.Parents[0].Evaluate(session);
4545

4646
var loss = Ops<T>.Zero;
47-
for (var n = 0; n < y.Shape.GetDimension(3); n++)
47+
for (var n = 0; n < y.Shape.Dimensions[3]; n++)
4848
{
49-
for (var d = 0; d < y.Shape.GetDimension(2); d++)
49+
for (var d = 0; d < y.Shape.Dimensions[2]; d++)
5050
{
51-
for (var h = 0; h < y.Shape.GetDimension(1); h++)
51+
for (var h = 0; h < y.Shape.Dimensions[1]; h++)
5252
{
53-
for (var w = 0; w < y.Shape.GetDimension(0); w++)
53+
for (var w = 0; w < y.Shape.Dimensions[0]; w++)
5454
{
5555
var expected = y.Get(w, h, d, n);
5656
var actual = outputActivation.Get(w, h, d, n);
@@ -66,7 +66,7 @@ public override Volume<T> Evaluate(Session<T> session)
6666
}
6767
}
6868

69-
var batchSize = outputActivation.Shape.GetDimension(3);
69+
var batchSize = outputActivation.Shape.Dimensions[3];
7070
loss = Ops<T>.Divide(Ops<T>.Negate(loss), Ops<T>.Cast(batchSize));
7171
this.Result.Set(0, loss);
7272

0 commit comments

Comments
 (0)