StandardTrainersCatalog.LbfgsMaximumEntropy Method
Definition
Important
Some information relates to prerelease product that may be substantially modified before it’s released. Microsoft makes no warranties, express or implied, with respect to the information provided here.
Overloads
LbfgsMaximumEntropy(MulticlassClassificationCatalog+MulticlassClassificationTrainers, LbfgsMaximumEntropyMulticlassTrainer+Options) |
Create LbfgsMaximumEntropyMulticlassTrainer with advanced options, which predicts a target using a maximum entropy classification model trained with the L-BFGS method. |
LbfgsMaximumEntropy(MulticlassClassificationCatalog+MulticlassClassificationTrainers, String, String, String, Single, Single, Single, Int32, Boolean) |
Create LbfgsMaximumEntropyMulticlassTrainer, which predicts a target using a maximum entropy classification model trained with the L-BFGS method. |
LbfgsMaximumEntropy(MulticlassClassificationCatalog+MulticlassClassificationTrainers, LbfgsMaximumEntropyMulticlassTrainer+Options)
Create LbfgsMaximumEntropyMulticlassTrainer with advanced options, which predicts a target using a maximum entropy classification model trained with the L-BFGS method.
public static Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer LbfgsMaximumEntropy (this Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer.Options options);
static member LbfgsMaximumEntropy : Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers * Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer.Options -> Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer
<Extension()>
Public Function LbfgsMaximumEntropy (catalog As MulticlassClassificationCatalog.MulticlassClassificationTrainers, options As LbfgsMaximumEntropyMulticlassTrainer.Options) As LbfgsMaximumEntropyMulticlassTrainer
Parameters
Advanced arguments to the algorithm.
Returns
Examples
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;
namespace Samples.Dynamic.Trainers.MulticlassClassification
{
public static class LbfgsMaximumEntropyWithOptions
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
var options = new LbfgsMaximumEntropyMulticlassTrainer.Options
{
HistorySize = 50,
L1Regularization = 0.1f,
NumberOfThreads = 1
};
// Define the trainer.
var pipeline =
// Convert the string labels into key types.
mlContext.Transforms.Conversion.MapValueToKey("Label")
// Apply LbfgsMaximumEntropy multiclass trainer.
.Append(mlContext.MulticlassClassification.Trainers
.LbfgsMaximumEntropy(options));
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Look at 5 predictions
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, " +
$"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: 1, Prediction: 1
// Label: 2, Prediction: 2
// Label: 3, Prediction: 2
// Label: 2, Prediction: 2
// Label: 3, Prediction: 3
// Evaluate the overall metrics
var metrics = mlContext.MulticlassClassification
.Evaluate(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Micro Accuracy: 0.91
// Macro Accuracy: 0.91
// Log Loss: 0.22
// Log Loss Reduction: 0.80
// Confusion table
// ||========================
// PREDICTED || 0 | 1 | 2 | Recall
// TRUTH ||========================
// 0 || 147 | 0 | 13 | 0.9188
// 1 || 0 | 165 | 12 | 0.9322
// 2 || 11 | 7 | 145 | 0.8896
// ||========================
// Precision ||0.9304 |0.9593 |0.8529 |
}
// Generates random uniform doubles in [-0.5, 0.5)
// range with labels 1, 2 or 3.
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)(random.NextDouble() - 0.5);
for (int i = 0; i < count; i++)
{
// Generate Labels that are integers 1, 2 or 3
var label = random.Next(1, 4);
yield return new DataPoint
{
Label = (uint)label,
// Create random features that are correlated with the label.
// The feature values are slightly increased by adding a
// constant multiple of label.
Features = Enumerable.Repeat(label, 20)
.Select(x => randomFloat() + label * 0.2f).ToArray()
};
}
}
// Example with label and 20 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public uint Label { get; set; }
[VectorType(20)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public uint Label { get; set; }
// Predicted label from the trainer.
public uint PredictedLabel { get; set; }
}
// Pretty-print MulticlassClassificationMetrics objects.
public static void PrintMetrics(MulticlassClassificationMetrics metrics)
{
Console.WriteLine($"Micro Accuracy: {metrics.MicroAccuracy:F2}");
Console.WriteLine($"Macro Accuracy: {metrics.MacroAccuracy:F2}");
Console.WriteLine($"Log Loss: {metrics.LogLoss:F2}");
Console.WriteLine(
$"Log Loss Reduction: {metrics.LogLossReduction:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}
Applies to
LbfgsMaximumEntropy(MulticlassClassificationCatalog+MulticlassClassificationTrainers, String, String, String, Single, Single, Single, Int32, Boolean)
Create LbfgsMaximumEntropyMulticlassTrainer, which predicts a target using a maximum entropy classification model trained with the L-BFGS method.
public static Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer LbfgsMaximumEntropy (this Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", string exampleWeightColumnName = default, float l1Regularization = 1, float l2Regularization = 1, float optimizationTolerance = 1E-07, int historySize = 20, bool enforceNonNegativity = false);
static member LbfgsMaximumEntropy : Microsoft.ML.MulticlassClassificationCatalog.MulticlassClassificationTrainers * string * string * string * single * single * single * int * bool -> Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer
<Extension()>
Public Function LbfgsMaximumEntropy (catalog As MulticlassClassificationCatalog.MulticlassClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional exampleWeightColumnName As String = Nothing, Optional l1Regularization As Single = 1, Optional l2Regularization As Single = 1, Optional optimizationTolerance As Single = 1E-07, Optional historySize As Integer = 20, Optional enforceNonNegativity As Boolean = false) As LbfgsMaximumEntropyMulticlassTrainer
Parameters
- labelColumnName
- String
The name of the label column. The column data must be KeyDataViewType.
- featureColumnName
- String
The name of the feature column. The column data must be a known-sized vector of Single.
- exampleWeightColumnName
- String
The name of the example weight column (optional).
- l1Regularization
- Single
The L1 regularization hyperparameter. Higher values will tend to lead to more sparse model.
- l2Regularization
- Single
The L2 weight for regularization.
- optimizationTolerance
- Single
Threshold for optimizer convergence.
- historySize
- Int32
Memory size for LbfgsMaximumEntropyMulticlassTrainer. Low=faster, less accurate.
- enforceNonNegativity
- Boolean
Enforce non-negative weights.
Returns
Examples
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
namespace Samples.Dynamic.Trainers.MulticlassClassification
{
public static class LbfgsMaximumEntropy
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
var pipeline =
// Convert the string labels into key types.
mlContext.Transforms.Conversion
.MapValueToKey(nameof(DataPoint.Label))
// Apply LbfgsMaximumEntropy multiclass trainer.
.Append(mlContext.MulticlassClassification.Trainers
.LbfgsMaximumEntropy());
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Look at 5 predictions
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, " +
$"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: 1, Prediction: 1
// Label: 2, Prediction: 2
// Label: 3, Prediction: 2
// Label: 2, Prediction: 2
// Label: 3, Prediction: 3
// Evaluate the overall metrics
var metrics = mlContext.MulticlassClassification
.Evaluate(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Micro Accuracy: 0.91
// Macro Accuracy: 0.91
// Log Loss: 0.24
// Log Loss Reduction: 0.79
// Confusion table
// ||========================
// PREDICTED || 0 | 1 | 2 | Recall
// TRUTH ||========================
// 0 || 148 | 0 | 12 | 0.9250
// 1 || 0 | 165 | 12 | 0.9322
// 2 || 11 | 7 | 145 | 0.8896
// ||========================
// Precision ||0.9308 |0.9593 |0.8580 |
}
// Generates random uniform doubles in [-0.5, 0.5)
// range with labels 1, 2 or 3.
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)(random.NextDouble() - 0.5);
for (int i = 0; i < count; i++)
{
// Generate Labels that are integers 1, 2 or 3
var label = random.Next(1, 4);
yield return new DataPoint
{
Label = (uint)label,
// Create random features that are correlated with the label.
// The feature values are slightly increased by adding a
// constant multiple of label.
Features = Enumerable.Repeat(label, 20)
.Select(x => randomFloat() + label * 0.2f).ToArray()
};
}
}
// Example with label and 20 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public uint Label { get; set; }
[VectorType(20)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public uint Label { get; set; }
// Predicted label from the trainer.
public uint PredictedLabel { get; set; }
}
// Pretty-print MulticlassClassificationMetrics objects.
public static void PrintMetrics(MulticlassClassificationMetrics metrics)
{
Console.WriteLine($"Micro Accuracy: {metrics.MicroAccuracy:F2}");
Console.WriteLine($"Macro Accuracy: {metrics.MacroAccuracy:F2}");
Console.WriteLine($"Log Loss: {metrics.LogLoss:F2}");
Console.WriteLine(
$"Log Loss Reduction: {metrics.LogLossReduction:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}