Skip to content

Commit ce9b38b

Browse files
authored
Reformatting BinaryClassification samples to width 85 (dotnet#3946)
* reformatted BinaryClassification samples * Update AveragedPerceptron.cs fixing spacing * Update AveragedPerceptronWithOptions.cs fixing whitespace * Update AveragedPerceptron.cs * Update AveragedPerceptron.cs * Update BinaryClassification.ttinclude fixing whitespace * Update FactorizationMachine.cs fixing whitespace * Update FastForest.cs fixing whitespace * Update FastForestWithOptions.cs fixing whitespace * Update FastTree.cs fixing whitespace * Update FastTreeWithOptions.cs fixing whitespace * Update FieldAwareFactorizationMachine.cs fixing whitespace * Update FieldAwareFactorizationMachine.cs * Update FieldAwareFactorizationMachine.tt fixing whitespace * Update FieldAwareFactorizationMachineWithOptions.cs fixing whitespace * Update FieldAwareFactorizationMachine.cs * Update FieldAwareFactorizationMachineWithOptions.tt fixing whitespace * Update LbfgsLogisticRegression.cs fixing whitespace * Update LbfgsLogisticRegressionWithOptions.cs fixing whitespace * Update LightGbm.cs fixing whitespace * Update LightGbmWithOptions.cs fixing whitespace * Update LinearSvm.cs fixing whitespace * Update LinearSvmWithOptions.cs fixing whitespace * Update MultipleFeatureColumnsBinaryClassification.ttinclude fixing whitespace * Update PriorTrainer.cs fixing whitespace * Update AveragedPerceptron.cs * Update AveragedPerceptronWithOptions.cs * Update BinaryClassification.ttinclude * Update FactorizationMachine.cs * Update FastForestWithOptions.cs * Update FastTree.cs * Update FastTreeWithOptions.cs * Update LbfgsLogisticRegression.cs * Update LbfgsLogisticRegressionWithOptions.cs * Update LightGbm.cs * Update LightGbmWithOptions.cs * Update LinearSvm.cs * Update LinearSvmWithOptions.cs * Update SdcaLogisticRegression.cs * Update SdcaLogisticRegressionWithOptions.cs * Update SdcaNonCalibrated.cs * Update SdcaNonCalibratedWithOptions.cs * Update SdcaNonCalibrated.cs * Update SdcaLogisticRegressionWithOptions.cs * Update SdcaLogisticRegression.cs * Update SgdCalibrated.cs * Update SgdCalibratedWithOptions.cs * Update SgdNonCalibrated.cs * Update SgdNonCalibratedWithOptions.cs * Update SymbolicSgdLogisticRegression.cs * Update SymbolicSgdLogisticRegressionWithOptions.cs * Update Program.cs changing back * Update Program.cs * Update Program.cs * Update Program.cs * Update Program.cs * Update Program.cs * fixed tab issues * fixed indentations * fixed commented-on parts
1 parent 4fecfb2 commit ce9b38b

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

43 files changed

+1513
-713
lines changed

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptron.cs

Lines changed: 38 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -10,35 +10,43 @@ public static class AveragedPerceptron
1010
{
1111
public static void Example()
1212
{
13-
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
14-
// as a catalog of available operations and as the source of randomness.
15-
// Setting the seed to a fixed number in this example to make outputs deterministic.
13+
// Create a new context for ML.NET operations. It can be used for
14+
// exception tracking and logging, as a catalog of available operations
15+
// and as the source of randomness. Setting the seed to a fixed number
16+
// in this example to make outputs deterministic.
1617
var mlContext = new MLContext(seed: 0);
1718

1819
// Create a list of training data points.
1920
var dataPoints = GenerateRandomDataPoints(1000);
2021

21-
// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
22+
// Convert the list of data points to an IDataView object, which is
23+
// consumable by ML.NET API.
2224
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
2325

2426
// Define the trainer.
25-
var pipeline = mlContext.BinaryClassification.Trainers.AveragedPerceptron();
27+
var pipeline = mlContext.BinaryClassification.Trainers
28+
.AveragedPerceptron();
2629

2730
// Train the model.
2831
var model = pipeline.Fit(trainingData);
2932

30-
// Create testing data. Use different random seed to make it different from training data.
31-
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
33+
// Create testing data. Use different random seed to make it different
34+
// from training data.
35+
var testData = mlContext.Data
36+
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
3237

3338
// Run the model on test data set.
3439
var transformedTestData = model.Transform(testData);
3540

3641
// Convert IDataView object to a list.
37-
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();
42+
var predictions = mlContext.Data
43+
.CreateEnumerable<Prediction>(transformedTestData,
44+
reuseRowObject: false).ToList();
3845

3946
// Print 5 predictions.
4047
foreach (var p in predictions.Take(5))
41-
Console.WriteLine($"Label: {p.Label}, Prediction: {p.PredictedLabel}");
48+
Console.WriteLine($"Label: {p.Label}, "
49+
+ $"Prediction: {p.PredictedLabel}");
4250

4351
// Expected output:
4452
// Label: True, Prediction: True
@@ -48,7 +56,9 @@ public static void Example()
4856
// Label: False, Prediction: False
4957

5058
// Evaluate the overall metrics.
51-
var metrics = mlContext.BinaryClassification.EvaluateNonCalibrated(transformedTestData);
59+
var metrics = mlContext.BinaryClassification
60+
.EvaluateNonCalibrated(transformedTestData);
61+
5262
PrintMetrics(metrics);
5363

5464
// Expected output:
@@ -71,7 +81,9 @@ public static void Example()
7181
// Precision || 0.7402 | 0.7061 |
7282
}
7383

74-
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
84+
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
85+
int seed=0)
86+
7587
{
7688
var random = new Random(seed);
7789
float randomFloat() => (float)random.NextDouble();
@@ -82,13 +94,18 @@ private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int se
8294
{
8395
Label = label,
8496
// Create random features that are correlated with the label.
85-
// For data points with false label, the feature values are slightly increased by adding a constant.
86-
Features = Enumerable.Repeat(label, 50).Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray()
97+
// For data points with false label, the feature values are
98+
// slightly increased by adding a constant.
99+
Features = Enumerable.Repeat(label, 50)
100+
.Select(x => x ? randomFloat() : randomFloat() +
101+
0.1f).ToArray()
102+
87103
};
88104
}
89105
}
90106

91-
// Example with label and 50 feature values. A data set is a collection of such examples.
107+
// Example with label and 50 feature values. A data set is a collection of
108+
// such examples.
92109
private class DataPoint
93110
{
94111
public bool Label { get; set; }
@@ -111,11 +128,16 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics)
111128
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
112129
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
113130
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
114-
Console.WriteLine($"Negative Precision: {metrics.NegativePrecision:F2}");
131+
Console.WriteLine($"Negative Precision: " +
132+
$"{metrics.NegativePrecision:F2}");
133+
115134
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
116-
Console.WriteLine($"Positive Precision: {metrics.PositivePrecision:F2}");
135+
Console.WriteLine($"Positive Precision: " +
136+
$"{metrics.PositivePrecision:F2}");
137+
117138
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
118139
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
119140
}
120141
}
121142
}
143+

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptronWithOptions.cs

Lines changed: 38 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -11,15 +11,17 @@ public static class AveragedPerceptronWithOptions
1111
{
1212
public static void Example()
1313
{
14-
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
15-
// as a catalog of available operations and as the source of randomness.
16-
// Setting the seed to a fixed number in this example to make outputs deterministic.
14+
// Create a new context for ML.NET operations. It can be used for
15+
// exception tracking and logging, as a catalog of available operations
16+
// and as the source of randomness. Setting the seed to a fixed number
17+
// in this example to make outputs deterministic.
1718
var mlContext = new MLContext(seed: 0);
1819

1920
// Create a list of training data points.
2021
var dataPoints = GenerateRandomDataPoints(1000);
2122

22-
// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
23+
// Convert the list of data points to an IDataView object, which is
24+
// consumable by ML.NET API.
2325
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
2426

2527
// Define trainer options.
@@ -33,23 +35,29 @@ public static void Example()
3335
};
3436

3537
// Define the trainer.
36-
var pipeline = mlContext.BinaryClassification.Trainers.AveragedPerceptron(options);
38+
var pipeline = mlContext.BinaryClassification.Trainers
39+
.AveragedPerceptron(options);
3740

3841
// Train the model.
3942
var model = pipeline.Fit(trainingData);
4043

41-
// Create testing data. Use different random seed to make it different from training data.
42-
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
44+
// Create testing data. Use different random seed to make it different
45+
// from training data.
46+
var testData = mlContext.Data
47+
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
4348

4449
// Run the model on test data set.
4550
var transformedTestData = model.Transform(testData);
4651

4752
// Convert IDataView object to a list.
48-
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();
53+
var predictions = mlContext.Data
54+
.CreateEnumerable<Prediction>(transformedTestData,
55+
reuseRowObject: false).ToList();
4956

5057
// Print 5 predictions.
5158
foreach (var p in predictions.Take(5))
52-
Console.WriteLine($"Label: {p.Label}, Prediction: {p.PredictedLabel}");
59+
Console.WriteLine($"Label: {p.Label}, "
60+
+ $"Prediction: {p.PredictedLabel}");
5361

5462
// Expected output:
5563
// Label: True, Prediction: True
@@ -59,7 +67,9 @@ public static void Example()
5967
// Label: False, Prediction: False
6068

6169
// Evaluate the overall metrics.
62-
var metrics = mlContext.BinaryClassification.EvaluateNonCalibrated(transformedTestData);
70+
var metrics = mlContext.BinaryClassification
71+
.EvaluateNonCalibrated(transformedTestData);
72+
6373
PrintMetrics(metrics);
6474

6575
// Expected output:
@@ -82,7 +92,9 @@ public static void Example()
8292
// Precision || 0.7402 | 0.7061 |
8393
}
8494

85-
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
95+
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
96+
int seed=0)
97+
8698
{
8799
var random = new Random(seed);
88100
float randomFloat() => (float)random.NextDouble();
@@ -93,13 +105,18 @@ private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int se
93105
{
94106
Label = label,
95107
// Create random features that are correlated with the label.
96-
// For data points with false label, the feature values are slightly increased by adding a constant.
97-
Features = Enumerable.Repeat(label, 50).Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray()
108+
// For data points with false label, the feature values are
109+
// slightly increased by adding a constant.
110+
Features = Enumerable.Repeat(label, 50)
111+
.Select(x => x ? randomFloat() : randomFloat() +
112+
0.1f).ToArray()
113+
98114
};
99115
}
100116
}
101117

102-
// Example with label and 50 feature values. A data set is a collection of such examples.
118+
// Example with label and 50 feature values. A data set is a collection of
119+
// such examples.
103120
private class DataPoint
104121
{
105122
public bool Label { get; set; }
@@ -122,11 +139,16 @@ private static void PrintMetrics(BinaryClassificationMetrics metrics)
122139
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
123140
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
124141
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
125-
Console.WriteLine($"Negative Precision: {metrics.NegativePrecision:F2}");
142+
Console.WriteLine($"Negative Precision: " +
143+
$"{metrics.NegativePrecision:F2}");
144+
126145
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
127-
Console.WriteLine($"Positive Precision: {metrics.PositivePrecision:F2}");
146+
Console.WriteLine($"Positive Precision: " +
147+
$"{metrics.PositivePrecision:F2}");
148+
128149
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
129150
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
130151
}
131152
}
132153
}
154+

docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/BinaryClassification.ttinclude

Lines changed: 47 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -13,63 +13,79 @@ namespace Samples.Dynamic.Trainers.BinaryClassification
1313
{<#=Comments#>
1414
public static void Example()
1515
{
16-
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
17-
// as a catalog of available operations and as the source of randomness.
18-
// Setting the seed to a fixed number in this example to make outputs deterministic.
16+
// Create a new context for ML.NET operations. It can be used for
17+
// exception tracking and logging, as a catalog of available operations
18+
// and as the source of randomness. Setting the seed to a fixed number
19+
// in this example to make outputs deterministic.
1920
var mlContext = new MLContext(seed: 0);
2021

2122
// Create a list of training data points.
2223
var dataPoints = GenerateRandomDataPoints(1000);
2324

24-
// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
25+
// Convert the list of data points to an IDataView object, which is
26+
// consumable by ML.NET API.
2527
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
2628
<# if (CacheData) { #>
2729

28-
// ML.NET doesn't cache data set by default. Therefore, if one reads a data set from a file and accesses it many times,
29-
// it can be slow due to expensive featurization and disk operations. When the considered data can fit into memory,
30-
// a solution is to cache the data in memory. Caching is especially helpful when working with iterative algorithms
30+
// ML.NET doesn't cache data set by default. Therefore, if one reads a
31+
// data set from a file and accesses it many times, it can be slow due
32+
// to expensive featurization and disk operations. When the considered
33+
// data can fit into memory, a solution is to cache the data in memory.
34+
// Caching is especially helpful when working with iterative algorithms
3135
// which needs many data passes.
3236
trainingData = mlContext.Data.Cache(trainingData);
3337
<# } #>
3438

3539
<# if (TrainerOptions == null) { #>
3640
// Define the trainer.
37-
var pipeline = mlContext.BinaryClassification.Trainers.<#=Trainer#>();
41+
var pipeline = mlContext.BinaryClassification.Trainers
42+
.<#=Trainer#>();
3843
<# } else { #>
3944
// Define trainer options.
4045
var options = new <#=TrainerOptions#>;
4146

4247
// Define the trainer.
43-
var pipeline = mlContext.BinaryClassification.Trainers.<#=Trainer#>(options);
48+
var pipeline = mlContext.BinaryClassification.Trainers
49+
.<#=Trainer#>(options);
4450
<# } #>
4551

4652
// Train the model.
4753
var model = pipeline.Fit(trainingData);
4854

49-
// Create testing data. Use different random seed to make it different from training data.
50-
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
55+
// Create testing data. Use different random seed to make it different
56+
// from training data.
57+
var testData = mlContext.Data
58+
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));
5159

5260
// Run the model on test data set.
5361
var transformedTestData = model.Transform(testData);
5462

5563
// Convert IDataView object to a list.
56-
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();
64+
var predictions = mlContext.Data
65+
.CreateEnumerable<Prediction>(transformedTestData,
66+
reuseRowObject: false).ToList();
5767

5868
// Print 5 predictions.
5969
foreach (var p in predictions.Take(5))
60-
Console.WriteLine($"Label: {p.Label}, Prediction: {p.PredictedLabel}");
70+
Console.WriteLine($"Label: {p.Label}, "
71+
+ $"Prediction: {p.PredictedLabel}");
6172

6273
<#=ExpectedOutputPerInstance#>
63-
<# string Evaluator = IsCalibrated ? "Evaluate" : "EvaluateNonCalibrated"; #>
74+
<# string Evaluator = IsCalibrated ? "Evaluate" :
75+
"EvaluateNonCalibrated"; #>
6476

6577
// Evaluate the overall metrics.
66-
var metrics = mlContext.BinaryClassification.<#=Evaluator#>(transformedTestData);
78+
var metrics = mlContext.BinaryClassification
79+
.<#=Evaluator#>(transformedTestData);
80+
6781
PrintMetrics(metrics);
6882

6983
<#=ExpectedOutput#>
7084
}
7185

72-
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
86+
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
87+
int seed=0)
88+
7389
{
7490
var random = new Random(seed);
7591
float randomFloat() => (float)random.NextDouble();
@@ -80,13 +96,18 @@ namespace Samples.Dynamic.Trainers.BinaryClassification
8096
{
8197
Label = label,
8298
// Create random features that are correlated with the label.
83-
// For data points with false label, the feature values are slightly increased by adding a constant.
84-
Features = Enumerable.Repeat(label, 50).Select(x => x ? randomFloat() : randomFloat() + <#=DataSepValue#>).ToArray()
99+
// For data points with false label, the feature values are
100+
// slightly increased by adding a constant.
101+
Features = Enumerable.Repeat(label, 50)
102+
.Select(x => x ? randomFloat() : randomFloat() +
103+
<#=DataSepValue#>).ToArray()
104+
85105
};
86106
}
87107
}
88108

89-
// Example with label and 50 feature values. A data set is a collection of such examples.
109+
// Example with label and 50 feature values. A data set is a collection of
110+
// such examples.
90111
private class DataPoint
91112
{
92113
public bool Label { get; set; }
@@ -109,11 +130,15 @@ namespace Samples.Dynamic.Trainers.BinaryClassification
109130
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
110131
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
111132
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
112-
Console.WriteLine($"Negative Precision: {metrics.NegativePrecision:F2}");
133+
Console.WriteLine($"Negative Precision: " +
134+
$"{metrics.NegativePrecision:F2}");
135+
113136
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
114-
Console.WriteLine($"Positive Precision: {metrics.PositivePrecision:F2}");
137+
Console.WriteLine($"Positive Precision: " +
138+
$"{metrics.PositivePrecision:F2}");
139+
115140
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
116141
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
117142
}
118143
}
119-
}
144+
}

0 commit comments

Comments
 (0)