StandardTrainersCatalog.SgdCalibrated Yöntem
Tanım
Önemli
Bazı bilgiler ürünün ön sürümüyle ilgilidir ve sürüm öncesinde önemli değişiklikler yapılmış olabilir. Burada verilen bilgilerle ilgili olarak Microsoft açık veya zımni hiçbir garanti vermez.
Aşırı Yüklemeler
SgdCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, SgdCalibratedTrainer+Options) |
Doğrusal sınıflandırma modeli kullanarak hedefi tahmin eden gelişmiş seçeneklerle oluşturma SgdCalibratedTrainer . Stokastik gradyan azalma (SGD), farklı bir nesnel işlevi en iyi duruma getiren yinelemeli bir algoritmadır. |
SgdCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, Int32, Double, Single) |
Doğrusal sınıflandırma modeli kullanarak hedefi tahmin eden öğesini oluşturun SgdCalibratedTrainer. Stokastik gradyan azalma (SGD), farklı bir nesnel işlevi en iyi duruma getiren yinelemeli bir algoritmadır. |
SgdCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, SgdCalibratedTrainer+Options)
Doğrusal sınıflandırma modeli kullanarak hedefi tahmin eden gelişmiş seçeneklerle oluşturma SgdCalibratedTrainer . Stokastik gradyan azalma (SGD), farklı bir nesnel işlevi en iyi duruma getiren yinelemeli bir algoritmadır.
public static Microsoft.ML.Trainers.SgdCalibratedTrainer SgdCalibrated (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Microsoft.ML.Trainers.SgdCalibratedTrainer.Options options);
static member SgdCalibrated : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * Microsoft.ML.Trainers.SgdCalibratedTrainer.Options -> Microsoft.ML.Trainers.SgdCalibratedTrainer
<Extension()>
Public Function SgdCalibrated (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, options As SgdCalibratedTrainer.Options) As SgdCalibratedTrainer
Parametreler
İkili sınıflandırma kataloğu eğitmen nesnesi.
- options
- SgdCalibratedTrainer.Options
Eğitmen seçenekleri.
Döndürülenler
Örnekler
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;
namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class SgdCalibratedWithOptions
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
var options = new SgdCalibratedTrainer.Options()
{
// Make the convergence tolerance tighter.
ConvergenceTolerance = 5e-5,
// Increase the maximum number of passes over training data.
NumberOfIterations = 30,
// Give the instances of the positive class slightly more weight.
PositiveInstanceWeight = 1.2f,
};
// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers
.SgdCalibrated(options);
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Print 5 predictions.
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, "
+ $"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: True, Prediction: False
// Label: False, Prediction: False
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: False
// Evaluate the overall metrics.
var metrics = mlContext.BinaryClassification
.Evaluate(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Accuracy: 0.60
// AUC: 0.65
// F1 Score: 0.50
// Negative Precision: 0.59
// Negative Recall: 0.74
// Positive Precision: 0.61
// Positive Recall: 0.43
//
// TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0))
// Confusion table
// ||======================
// PREDICTED || positive | negative | Recall
// TRUTH ||======================
// positive || 184 | 54 | 0.7731
// negative || 156 | 106 | 0.4046
// ||======================
// Precision || 0.5412 | 0.6625 |
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are
// slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50)
.Select(x => x ? randomFloat() : randomFloat() +
0.03f).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
// Pretty-print BinaryClassificationMetrics objects.
private static void PrintMetrics(BinaryClassificationMetrics metrics)
{
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
Console.WriteLine($"Negative Precision: " +
$"{metrics.NegativePrecision:F2}");
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
Console.WriteLine($"Positive Precision: " +
$"{metrics.PositivePrecision:F2}");
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}
Şunlara uygulanır
SgdCalibrated(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, String, Int32, Double, Single)
Doğrusal sınıflandırma modeli kullanarak hedefi tahmin eden öğesini oluşturun SgdCalibratedTrainer. Stokastik gradyan azalma (SGD), farklı bir nesnel işlevi en iyi duruma getiren yinelemeli bir algoritmadır.
public static Microsoft.ML.Trainers.SgdCalibratedTrainer SgdCalibrated (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", string exampleWeightColumnName = default, int numberOfIterations = 20, double learningRate = 0.01, float l2Regularization = 1E-06);
static member SgdCalibrated : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * string * string * string * int * double * single -> Microsoft.ML.Trainers.SgdCalibratedTrainer
<Extension()>
Public Function SgdCalibrated (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional exampleWeightColumnName As String = Nothing, Optional numberOfIterations As Integer = 20, Optional learningRate As Double = 0.01, Optional l2Regularization As Single = 1E-06) As SgdCalibratedTrainer
Parametreler
İkili sınıflandırma kataloğu eğitmen nesnesi.
- labelColumnName
- String
Etiket sütununun adı veya bağımlı değişken. Sütun verileri olmalıdır Boolean.
- featureColumnName
- String
Özellikler veya bağımsız değişkenler. Sütun verileri bilinen boyutlu bir vektör Singleolmalıdır.
- exampleWeightColumnName
- String
Örnek ağırlık sütununun adı (isteğe bağlı).
- numberOfIterations
- Int32
Eğitim veri kümesinden geçen maksimum geçiş sayısı; çevrimiçi öğrenme simülasyonu yapmak için 1 olarak ayarlayın.
- learningRate
- Double
SGD tarafından kullanılan ilk öğrenme oranı.
- l2Regularization
- Single
Düzenlileştirme için L2 ağırlığı.
Döndürülenler
Örnekler
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class SgdCalibrated
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers
.SgdCalibrated();
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Print 5 predictions.
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, "
+ $"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: True, Prediction: False
// Label: False, Prediction: False
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: False
// Evaluate the overall metrics.
var metrics = mlContext.BinaryClassification
.Evaluate(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Accuracy: 0.60
// AUC: 0.63
// F1 Score: 0.43
// Negative Precision: 0.58
// Negative Recall: 0.85
// Positive Precision: 0.66
// Positive Recall: 0.32
//
// TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0))
// Confusion table
// ||======================
// PREDICTED || positive | negative | Recall
// TRUTH ||======================
// positive || 77 | 161 | 0.3235
// negative || 43 | 219 | 0.8359
// ||======================
// Precision || 0.6417 | 0.5763 |
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are
// slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50)
.Select(x => x ? randomFloat() : randomFloat() +
0.03f).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
// Pretty-print BinaryClassificationMetrics objects.
private static void PrintMetrics(BinaryClassificationMetrics metrics)
{
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
Console.WriteLine($"Negative Precision: " +
$"{metrics.NegativePrecision:F2}");
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
Console.WriteLine($"Positive Precision: " +
$"{metrics.PositivePrecision:F2}");
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}