Udostępnij za pośrednictwem


StandardTrainersCatalog.AveragedPerceptron Metoda

Definicja

Przeciążenia

AveragedPerceptron(BinaryClassificationCatalog+BinaryClassificationTrainers, AveragedPerceptronTrainer+Options)

Utwórz element AveragedPerceptronTrainer z opcjami zaawansowanymi, które przewidują cel przy użyciu liniowego modelu klasyfikacji binarnej wytrenowanego na danych etykiet logicznych.

AveragedPerceptron(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, IClassificationLoss, Single, Boolean, Single, Int32)

Utwórz obiekt AveragedPerceptronTrainer, który przewiduje cel przy użyciu liniowego modelu klasyfikacji binarnej wytrenowanego na danych etykiet logicznych.

AveragedPerceptron(BinaryClassificationCatalog+BinaryClassificationTrainers, AveragedPerceptronTrainer+Options)

Utwórz element AveragedPerceptronTrainer z opcjami zaawansowanymi, które przewidują cel przy użyciu liniowego modelu klasyfikacji binarnej wytrenowanego na danych etykiet logicznych.

public static Microsoft.ML.Trainers.AveragedPerceptronTrainer AveragedPerceptron (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Microsoft.ML.Trainers.AveragedPerceptronTrainer.Options options);
static member AveragedPerceptron : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * Microsoft.ML.Trainers.AveragedPerceptronTrainer.Options -> Microsoft.ML.Trainers.AveragedPerceptronTrainer
<Extension()>
Public Function AveragedPerceptron (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, options As AveragedPerceptronTrainer.Options) As AveragedPerceptronTrainer

Parametry

catalog
BinaryClassificationCatalog.BinaryClassificationTrainers

Obiekt trenera wykazu klasyfikacji binarnej.

options
AveragedPerceptronTrainer.Options

Opcje trenera.

Zwraca

Przykłady

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;

namespace Samples.Dynamic.Trainers.BinaryClassification
{
    public static class AveragedPerceptronWithOptions
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define trainer options.
            var options = new AveragedPerceptronTrainer.Options
            {
                LossFunction = new SmoothedHingeLoss(),
                LearningRate = 0.1f,
                LazyUpdate = false,
                RecencyGain = 0.1f,
                NumberOfIterations = 10
            };

            // Define the trainer.
            var pipeline = mlContext.BinaryClassification.Trainers
                .AveragedPerceptron(options);

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Print 5 predictions.
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, "
                    + $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: True, Prediction: True
            //   Label: False, Prediction: False
            //   Label: True, Prediction: True
            //   Label: True, Prediction: True
            //   Label: False, Prediction: False

            // Evaluate the overall metrics.
            var metrics = mlContext.BinaryClassification
                .EvaluateNonCalibrated(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Accuracy: 0.89
            //   AUC: 0.96
            //   F1 Score: 0.88
            //   Negative Precision: 0.87
            //   Negative Recall: 0.92
            //   Positive Precision: 0.91
            //   Positive Recall: 0.85
            //
            // TEST POSITIVE RATIO:    0.4760 (238.0/(238.0+262.0))
            //   Confusion table
            //             ||======================
            //   PREDICTED || positive | negative | Recall
            //   TRUTH     ||======================
            //    positive ||      151 |       87 | 0.6345
            //    negative ||       53 |      209 | 0.7977
            //             ||======================
            //   Precision ||   0.7402 |   0.7061 |
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)random.NextDouble();
            for (int i = 0; i < count; i++)
            {
                var label = randomFloat() > 0.5f;
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    // For data points with false label, the feature values are
                    // slightly increased by adding a constant.
                    Features = Enumerable.Repeat(label, 50)
                        .Select(x => x ? randomFloat() : randomFloat() +
                        0.1f).ToArray()

                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public bool Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public bool Label { get; set; }
            // Predicted label from the trainer.
            public bool PredictedLabel { get; set; }
        }

        // Pretty-print BinaryClassificationMetrics objects.
        private static void PrintMetrics(BinaryClassificationMetrics metrics)
        {
            Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
            Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
            Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
            Console.WriteLine($"Negative Precision: " +
                $"{metrics.NegativePrecision:F2}");

            Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
            Console.WriteLine($"Positive Precision: " +
                $"{metrics.PositivePrecision:F2}");

            Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}

Dotyczy

AveragedPerceptron(BinaryClassificationCatalog+BinaryClassificationTrainers, String, String, IClassificationLoss, Single, Boolean, Single, Int32)

Utwórz obiekt AveragedPerceptronTrainer, który przewiduje cel przy użyciu liniowego modelu klasyfikacji binarnej wytrenowanego na danych etykiet logicznych.

public static Microsoft.ML.Trainers.AveragedPerceptronTrainer AveragedPerceptron (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", Microsoft.ML.Trainers.IClassificationLoss lossFunction = default, float learningRate = 1, bool decreaseLearningRate = false, float l2Regularization = 0, int numberOfIterations = 10);
public static Microsoft.ML.Trainers.AveragedPerceptronTrainer AveragedPerceptron (this Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", Microsoft.ML.Trainers.IClassificationLoss lossFunction = default, float learningRate = 1, bool decreaseLearningRate = false, float l2Regularization = 0, int numberOfIterations = 1);
static member AveragedPerceptron : Microsoft.ML.BinaryClassificationCatalog.BinaryClassificationTrainers * string * string * Microsoft.ML.Trainers.IClassificationLoss * single * bool * single * int -> Microsoft.ML.Trainers.AveragedPerceptronTrainer
<Extension()>
Public Function AveragedPerceptron (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional lossFunction As IClassificationLoss = Nothing, Optional learningRate As Single = 1, Optional decreaseLearningRate As Boolean = false, Optional l2Regularization As Single = 0, Optional numberOfIterations As Integer = 10) As AveragedPerceptronTrainer
<Extension()>
Public Function AveragedPerceptron (catalog As BinaryClassificationCatalog.BinaryClassificationTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional lossFunction As IClassificationLoss = Nothing, Optional learningRate As Single = 1, Optional decreaseLearningRate As Boolean = false, Optional l2Regularization As Single = 0, Optional numberOfIterations As Integer = 1) As AveragedPerceptronTrainer

Parametry

catalog
BinaryClassificationCatalog.BinaryClassificationTrainers

Obiekt trenera wykazu klasyfikacji binarnej.

labelColumnName
String

Nazwa kolumny etykiety. Dane kolumny muszą mieć wartość Boolean.

featureColumnName
String

Nazwa kolumny funkcji. Dane kolumn muszą być znanym wektorem .Single

lossFunction
IClassificationLoss

Funkcja utraty zminimalizowana w procesie trenowania. Jeśli nullparametr jest HingeLoss używany i prowadzi do uśrednionego perceptronu maksymalnego marginesu.

learningRate
Single

Początkowy współczynnik uczenia używany przez SGD.

decreaseLearningRate
Boolean

true aby zmniejszyć learningRate postęp iteracji; w przeciwnym razie false. Wartość domyślna to false.

l2Regularization
Single

Waga L2 do regularyzacji.

numberOfIterations
Int32

Liczba przejść przez zestaw danych trenowania.

Zwraca

Przykłady

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;

namespace Samples.Dynamic.Trainers.BinaryClassification
{
    public static class AveragedPerceptron
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define the trainer.
            var pipeline = mlContext.BinaryClassification.Trainers
                .AveragedPerceptron();

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data
                .LoadFromEnumerable(GenerateRandomDataPoints(500, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data
                .CreateEnumerable<Prediction>(transformedTestData,
                reuseRowObject: false).ToList();

            // Print 5 predictions.
            foreach (var p in predictions.Take(5))
                Console.WriteLine($"Label: {p.Label}, "
                    + $"Prediction: {p.PredictedLabel}");

            // Expected output:
            //   Label: True, Prediction: True
            //   Label: False, Prediction: False
            //   Label: True, Prediction: True
            //   Label: True, Prediction: False
            //   Label: False, Prediction: False

            // Evaluate the overall metrics.
            var metrics = mlContext.BinaryClassification
                .EvaluateNonCalibrated(transformedTestData);

            PrintMetrics(metrics);

            // Expected output:
            //   Accuracy: 0.72
            //   AUC: 0.79
            //   F1 Score: 0.68
            //   Negative Precision: 0.71
            //   Negative Recall: 0.80
            //   Positive Precision: 0.74
            //   Positive Recall: 0.63
            //
            //   TEST POSITIVE RATIO:    0.4760 (238.0/(238.0+262.0))
            //   Confusion table
            //             ||======================
            //   PREDICTED || positive | negative | Recall
            //   TRUTH     ||======================
            //    positive ||      151 |       87 | 0.6345
            //    negative ||       53 |      209 | 0.7977
            //             ||======================
            //   Precision ||   0.7402 |   0.7061 |
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)

        {
            var random = new Random(seed);
            float randomFloat() => (float)random.NextDouble();
            for (int i = 0; i < count; i++)
            {
                var label = randomFloat() > 0.5f;
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    // For data points with false label, the feature values are
                    // slightly increased by adding a constant.
                    Features = Enumerable.Repeat(label, 50)
                        .Select(x => x ? randomFloat() : randomFloat() +
                        0.1f).ToArray()

                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public bool Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public bool Label { get; set; }
            // Predicted label from the trainer.
            public bool PredictedLabel { get; set; }
        }

        // Pretty-print BinaryClassificationMetrics objects.
        private static void PrintMetrics(BinaryClassificationMetrics metrics)
        {
            Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}");
            Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}");
            Console.WriteLine($"F1 Score: {metrics.F1Score:F2}");
            Console.WriteLine($"Negative Precision: " +
                $"{metrics.NegativePrecision:F2}");

            Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}");
            Console.WriteLine($"Positive Precision: " +
                $"{metrics.PositivePrecision:F2}");

            Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n");
            Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable());
        }
    }
}

Dotyczy