StandardTrainersCatalog.SgdNonCalibrated Method
Definition
Important
Some information relates to prerelease product that may be substantially modified before it’s released. Microsoft makes no warranties, express or implied, with respect to the information provided here.
Overloads
SgdNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, SgdNonCalibratedTrainer+Options) |
Create SgdNonCalibratedTrainer with advanced options, which predicts a target using a linear classification model. Stochastic gradient descent (SGD) is an iterative algorithm that optimizes a differentiable objective function. |
SgdNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, IClassificationLoss, Int32, Double, Single) |
Create SgdNonCalibratedTrainer, which predicts a target using a linear classification model. Stochastic gradient descent (SGD) is an iterative algorithm that optimizes a differentiable objective function. |
SgdNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, SgdNonCalibratedTrainer+Options)
Create SgdNonCalibratedTrainer with advanced options, which predicts a target using a linear classification model. Stochastic gradient descent (SGD) is an iterative algorithm that optimizes a differentiable objective function.
public static Microsoft.ML.Trainers.SgdNonCalibratedTrainer SgdNonCalibrated (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Microsoft.ML.Trainers.SgdNonCalibratedTrainer.Options options);
static member SgdNonCalibrated : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * Microsoft.ML.Trainers.SgdNonCalibratedTrainer.Options -> Microsoft.ML.Trainers.SgdNonCalibratedTrainer
<Extension()>
Public Function SgdNonCalibrated (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, options As SgdNonCalibratedTrainer.Options) As SgdNonCalibratedTrainer
Parameters
The binary classification catalog trainer object.
- options
- SgdNonCalibratedTrainer.Options
Trainer options.
Returns
Examples
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;
namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class SgdNonCalibratedWithOptions
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
var options = new SgdNonCalibratedTrainer.Options
{
LearningRate = 0.01,
NumberOfIterations = 10,
L2Regularization = 1e-7f
};
// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers
.SgdNonCalibrated(options);
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Print 5 predictions.
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, "
+ $"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: True, Prediction: False
// Label: False, Prediction: False
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: False
// Evaluate the overall metrics.
var metrics = mlContext.BinaryClassification
.EvaluateNonCalibrated(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Accuracy: 0.59
// AUC: 0.61
// F1 Score: 0.41
// Negative Precision: 0.57
// Negative Recall: 0.85
// Positive Precision: 0.64
// Positive Recall: 0.30
//
// TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0))
// Confusion table
// ||======================
// PREDICTED || positive | negative | Recall
// TRUTH ||======================
// positive || 137 | 101 | 0.5756
// negative || 118 | 144 | 0.5496
// ||======================
// Precision || 0.5373 | 0.5878 |
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are
// slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50)
.Select(x => x ? randomFloat() : randomFloat() +
0.03f).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
// Pretty-print BinaryClassificationMetrics objects.
private static void PrintMetrics(BinaryClassificationMetrics metrics)
{
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
Console.WriteLine($"Negative Precision: " +
$"{metrics.NegativePrecision:F2}");
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
Console.WriteLine($"Positive Precision: " +
$"{metrics.PositivePrecision:F2}");
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}
Applies to
SgdNonCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, IClassificationLoss, Int32, Double, Single)
Create SgdNonCalibratedTrainer, which predicts a target using a linear classification model. Stochastic gradient descent (SGD) is an iterative algorithm that optimizes a differentiable objective function.
public static Microsoft.ML.Trainers.SgdNonCalibratedTrainer SgdNonCalibrated (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", string exampleWeightColumnName = default, Microsoft.ML.Trainers.IClassificationLoss lossFunction = default, int numberOfIterations = 20, double learningRate = 0.01, float l2Regularization = 1E-06);
static member SgdNonCalibrated : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * string * string * string * Microsoft.ML.Trainers.IClassificationLoss * int * double * single -> Microsoft.ML.Trainers.SgdNonCalibratedTrainer
<Extension()>
Public Function SgdNonCalibrated (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional exampleWeightColumnName As String = Nothing, Optional lossFunction As IClassificationLoss = Nothing, Optional numberOfIterations As Integer = 20, Optional learningRate As Double = 0.01, Optional l2Regularization As Single = 1E-06) As SgdNonCalibratedTrainer
Parameters
The binary classification catalog trainer object.
- labelColumnName
- String
The name of the label column, or dependent variable. The column data must be Boolean.
- featureColumnName
- String
The features, or independent variables. The column data must be a known-sized vector of Single.
- exampleWeightColumnName
- String
The name of the example weight column (optional).
- lossFunction
- IClassificationLoss
The loss function minimized in the training process. Using, for example, HingeLoss leads to a support vector machine trainer.
- numberOfIterations
- Int32
The maximum number of passes through the training dataset; set to 1 to simulate online learning.
- learningRate
- Double
The initial learning rate used by SGD.
- l2Regularization
- Single
The L2 weight for regularization.
Returns
Examples
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class SgdNonCalibrated
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers
.SgdNonCalibrated();
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Print 5 predictions.
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, "
+ $"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: True, Prediction: False
// Label: False, Prediction: False
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: False
// Evaluate the overall metrics.
var metrics = mlContext.BinaryClassification
.EvaluateNonCalibrated(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Accuracy: 0.60
// AUC: 0.63
// F1 Score: 0.43
// Negative Precision: 0.58
// Negative Recall: 0.85
// Positive Precision: 0.66
// Positive Recall: 0.32
//
// TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0))
// Confusion table
// ||======================
// PREDICTED || positive | negative | Recall
// TRUTH ||======================
// positive || 76 | 162 | 0.3193
// negative || 42 | 220 | 0.8397
// ||======================
// Precision || 0.6441 | 0.5759 |
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are
// slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50)
.Select(x => x ? randomFloat() : randomFloat() +
0.03f).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
// Pretty-print BinaryClassificationMetrics objects.
private static void PrintMetrics(BinaryClassificationMetrics metrics)
{
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
Console.WriteLine($"Negative Precision: " +
$"{metrics.NegativePrecision:F2}");
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
Console.WriteLine($"Positive Precision: " +
$"{metrics.PositivePrecision:F2}");
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}