MklComponentsCatalog.SymbolicSgdLogisticRegression 메서드
정의
중요
일부 정보는 릴리스되기 전에 상당 부분 수정될 수 있는 시험판 제품과 관련이 있습니다. Microsoft는 여기에 제공된 정보에 대해 어떠한 명시적이거나 묵시적인 보증도 하지 않습니다.
오버로드
SymbolicSgdLogisticRegression(BinaryClassificationCatalog+BinaryClassificationTrainers, SymbolicSgdLogisticRegressionBinaryTrainer+Options) |
부울 레이블 데이터를 통해 학습된 선형 이진 분류 모델을 사용하여 대상을 예측하는 고급 옵션을 사용하여 만듭니 SymbolicSgdLogisticRegressionBinaryTrainer 다. SGD(확률 그라데이션 하강)는 서로 다른 목표 함수를 최적화하는 반복 알고리즘입니다. 심 SymbolicSgdLogisticRegressionBinaryTrainer볼 실행을 사용하여 SGD를 병렬화합니다. |
SymbolicSgdLogisticRegression(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, Int32) |
부울 레이블 데이터를 통해 학습된 선형 이진 분류 모델을 사용하여 대상을 예측하는 를 만듭니 SymbolicSgdLogisticRegressionBinaryTrainer다. SGD(확률 그라데이션 하강)는 서로 다른 목표 함수를 최적화하는 반복 알고리즘입니다. 심 SymbolicSgdLogisticRegressionBinaryTrainer볼 실행을 사용하여 SGD를 병렬화합니다. |
SymbolicSgdLogisticRegression(BinaryClassificationCatalog+BinaryClassificationTrainers, SymbolicSgdLogisticRegressionBinaryTrainer+Options)
부울 레이블 데이터를 통해 학습된 선형 이진 분류 모델을 사용하여 대상을 예측하는 고급 옵션을 사용하여 만듭니 SymbolicSgdLogisticRegressionBinaryTrainer 다. SGD(확률 그라데이션 하강)는 서로 다른 목표 함수를 최적화하는 반복 알고리즘입니다. 심 SymbolicSgdLogisticRegressionBinaryTrainer볼 실행을 사용하여 SGD를 병렬화합니다.
public static Microsoft.ML.Trainers.SymbolicSgdLogisticRegressionBinaryTrainer SymbolicSgdLogisticRegression (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Microsoft.ML.Trainers.SymbolicSgdLogisticRegressionBinaryTrainer.Options options);
static member SymbolicSgdLogisticRegression : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * Microsoft.ML.Trainers.SymbolicSgdLogisticRegressionBinaryTrainer.Options -> Microsoft.ML.Trainers.SymbolicSgdLogisticRegressionBinaryTrainer
<Extension()>
Public Function SymbolicSgdLogisticRegression (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, options As SymbolicSgdLogisticRegressionBinaryTrainer.Options) As SymbolicSgdLogisticRegressionBinaryTrainer
매개 변수
알고리즘 고급 옵션입니다. SymbolicSgdLogisticRegressionBinaryTrainer.Options을 참조하세요.
반환
예제
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;
namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class SymbolicSgdLogisticRegressionWithOptions
{
// This example requires installation of additional NuGet package for
// Microsoft.ML.FastTree at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define trainer options.
var options = new SymbolicSgdLogisticRegressionBinaryTrainer.Options()
{
LearningRate = 0.2f,
NumberOfIterations = 10,
NumberOfThreads = 1,
};
// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers
.SymbolicSgdLogisticRegression(options);
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Print 5 predictions.
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, "
+ $"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: True, Prediction: False
// Label: False, Prediction: False
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: False
// Evaluate the overall metrics.
var metrics = mlContext.BinaryClassification
.Evaluate(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Accuracy: 0.72
// AUC: 0.81
// F1 Score: 0.66
// Negative Precision: 0.68
// Negative Recall: 0.87
// Positive Precision: 0.80
// Positive Recall: 0.56
//
// TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0))
// Confusion table
// ||======================
// PREDICTED || positive | negative | Recall
// TRUTH ||======================
// positive || 133 | 105 | 0.5588
// negative || 34 | 228 | 0.8702
// ||======================
// Precision || 0.7964 | 0.6847 |
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are
// slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50)
.Select(x => x ? randomFloat() : randomFloat() +
0.1f).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
// Pretty-print BinaryClassificationMetrics objects.
private static void PrintMetrics(BinaryClassificationMetrics metrics)
{
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
Console.WriteLine($"Negative Precision: " +
$"{metrics.NegativePrecision:F2}");
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
Console.WriteLine($"Positive Precision: " +
$"{metrics.PositivePrecision:F2}");
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}
적용 대상
SymbolicSgdLogisticRegression(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, Int32)
부울 레이블 데이터를 통해 학습된 선형 이진 분류 모델을 사용하여 대상을 예측하는 를 만듭니 SymbolicSgdLogisticRegressionBinaryTrainer다. SGD(확률 그라데이션 하강)는 서로 다른 목표 함수를 최적화하는 반복 알고리즘입니다. 심 SymbolicSgdLogisticRegressionBinaryTrainer볼 실행을 사용하여 SGD를 병렬화합니다.
public static Microsoft.ML.Trainers.SymbolicSgdLogisticRegressionBinaryTrainer SymbolicSgdLogisticRegression (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", int numberOfIterations = 50);
static member SymbolicSgdLogisticRegression : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * string * string * int -> Microsoft.ML.Trainers.SymbolicSgdLogisticRegressionBinaryTrainer
<Extension()>
Public Function SymbolicSgdLogisticRegression (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional numberOfIterations As Integer = 50) As SymbolicSgdLogisticRegressionBinaryTrainer
매개 변수
- numberOfIterations
- Int32
학습 반복 횟수입니다.
반환
예제
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
namespace Samples.Dynamic.Trainers.BinaryClassification
{
public static class SymbolicSgdLogisticRegression
{
// This example requires installation of additional NuGet package for
// Microsoft.ML.FastTree at
// https://www.nuget.org/packages/Microsoft.ML.FastTree/
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for
// exception tracking and logging, as a catalog of available operations
// and as the source of randomness. Setting the seed to a fixed number
// in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);
// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);
// Convert the list of data points to an IDataView object, which is
// consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);
// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers
.SymbolicSgdLogisticRegression();
// Train the model.
var model = pipeline.Fit(trainingData);
// Create testing data. Use different random seed to make it different
// from training data.
var testData = mlContext.Data
.LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));
// Run the model on test data set.
var transformedTestData = model.Transform(testData);
// Convert IDataView object to a list.
var predictions = mlContext.Data
.CreateEnumerable<Prediction>(transformedTestData,
reuseRowObject: false).ToList();
// Print 5 predictions.
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, "
+ $"Prediction: {p.PredictedLabel}");
// Expected output:
// Label: True, Prediction: False
// Label: False, Prediction: False
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: True
// Evaluate the overall metrics.
var metrics = mlContext.BinaryClassification
.Evaluate(transformedTestData);
PrintMetrics(metrics);
// Expected output:
// Accuracy: 0.69
// AUC: 0.76
// F1 Score: 0.68
// Negative Precision: 0.72
// Negative Recall: 0.66
// Positive Precision: 0.66
// Positive Recall: 0.71
//
// TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0))
// Confusion table
// ||======================
// PREDICTED || positive | negative | Recall
// TRUTH ||======================
// positive || 196 | 42 | 0.8235
// negative || 42 | 220 | 0.8397
// ||======================
// Precision || 0.8235 | 0.8397 |
}
private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
int seed = 0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are
// slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50)
.Select(x => x ? randomFloat() : randomFloat() +
0.1f).ToArray()
};
}
}
// Example with label and 50 feature values. A data set is a collection of
// such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}
// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
// Pretty-print BinaryClassificationMetrics objects.
private static void PrintMetrics(BinaryClassificationMetrics metrics)
{
Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
Console.WriteLine($"Negative Precision: " +
$"{metrics.NegativePrecision:F2}");
Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
Console.WriteLine($"Positive Precision: " +
$"{metrics.PositivePrecision:F2}");
Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
}
}
}