Bagikan melalui


StandardTrainersCatalog.OnlineGradientDescent Metode

Definisi

Overload

OnlineGradientDescent(RegressionCatalog+RegressionTrainers, String, String, IRegressionLoss, Single, Boolean, Single, Int32)

Buat OnlineGradientDescentTrainer, yang memprediksi target menggunakan model regresi linier.

OnlineGradientDescent(RegressionCatalog+RegressionTrainers, OnlineGradientDescentTrainer+Options)

Buat OnlineGradientDescentTrainer menggunakan opsi tingkat lanjut, yang memprediksi target menggunakan model regresi linier.

OnlineGradientDescent(RegressionCatalog+RegressionTrainers, String, String, IRegressionLoss, Single, Boolean, Single, Int32)

Buat OnlineGradientDescentTrainer, yang memprediksi target menggunakan model regresi linier.

public static Microsoft.ML.Trainers.OnlineGradientDescentTrainer OnlineGradientDescent (this Microsoft.ML.RegressionCatalog.RegressionTrainers catalog, string labelColumnName = "Label", string featureColumnName = "Features", Microsoft.ML.Trainers.IRegressionLoss lossFunction = default, float learningRate = 0.1, bool decreaseLearningRate = true, float l2Regularization = 0, int numberOfIterations = 1);
static member OnlineGradientDescent : Microsoft.ML.RegressionCatalog.RegressionTrainers * string * string * Microsoft.ML.Trainers.IRegressionLoss * single * bool * single * int -> Microsoft.ML.Trainers.OnlineGradientDescentTrainer
<Extension()>
Public Function OnlineGradientDescent (catalog As RegressionCatalog.RegressionTrainers, Optional labelColumnName As String = "Label", Optional featureColumnName As String = "Features", Optional lossFunction As IRegressionLoss = Nothing, Optional learningRate As Single = 0.1, Optional decreaseLearningRate As Boolean = true, Optional l2Regularization As Single = 0, Optional numberOfIterations As Integer = 1) As OnlineGradientDescentTrainer

Parameter

catalog
RegressionCatalog.RegressionTrainers

Objek pelatih katalog regresi.

labelColumnName
String

Nama kolom label. Data kolom harus Single.

featureColumnName
String

Nama kolom fitur. Data kolom harus merupakan vektor berukuran besar yang diketahui dari Single.

lossFunction
IRegressionLoss

Fungsi kerugian diminimalkan dalam proses pelatihan. Menggunakan, misalnya, SquaredLoss mengarah ke pelatih persegi paling sedikit.

learningRate
Single

Tingkat pembelajaran awal yang digunakan oleh SGD.

decreaseLearningRate
Boolean

Kurangi tingkat pembelajaran saat iterasi berlangsung.

l2Regularization
Single

Berat L2 untuk regularisasi.

numberOfIterations
Int32

Jumlah melewati himpunan data pelatihan.

Mengembalikan

Contoh

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;

namespace Samples.Dynamic.Trainers.Regression
{
    public static class OnlineGradientDescent
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define the trainer.
            var pipeline = mlContext.Regression.Trainers.OnlineGradientDescent(
                labelColumnName: nameof(DataPoint.Label),
                featureColumnName: nameof(DataPoint.Features));

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data.LoadFromEnumerable(
                GenerateRandomDataPoints(5, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data.CreateEnumerable<Prediction>(
                transformedTestData, reuseRowObject: false).ToList();

            // Look at 5 predictions for the Label, side by side with the actual
            // Label for comparison.
            foreach (var p in predictions)
                Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");

            // This trainer is not numerically stable.
            // Please see issue #2425.

            // Evaluate the overall metrics
            var metrics = mlContext.Regression.Evaluate(transformedTestData);
            PrintMetrics(metrics);


        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)
        {
            var random = new Random(seed);
            for (int i = 0; i < count; i++)
            {
                float label = (float)random.NextDouble();
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    Features = Enumerable.Repeat(label, 50).Select(
                        x => x + (float)random.NextDouble()).ToArray()
                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public float Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public float Label { get; set; }
            // Predicted score from the trainer.
            public float Score { get; set; }
        }

        // Print some evaluation metrics to regression problems.
        private static void PrintMetrics(RegressionMetrics metrics)
        {
            Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
            Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
            Console.WriteLine(
                "Root Mean Squared Error: " + metrics.RootMeanSquaredError);

            Console.WriteLine("RSquared: " + metrics.RSquared);
        }
    }
}

Berlaku untuk

OnlineGradientDescent(RegressionCatalog+RegressionTrainers, OnlineGradientDescentTrainer+Options)

Buat OnlineGradientDescentTrainer menggunakan opsi tingkat lanjut, yang memprediksi target menggunakan model regresi linier.

public static Microsoft.ML.Trainers.OnlineGradientDescentTrainer OnlineGradientDescent (this Microsoft.ML.RegressionCatalog.RegressionTrainers catalog, Microsoft.ML.Trainers.OnlineGradientDescentTrainer.Options options);
static member OnlineGradientDescent : Microsoft.ML.RegressionCatalog.RegressionTrainers * Microsoft.ML.Trainers.OnlineGradientDescentTrainer.Options -> Microsoft.ML.Trainers.OnlineGradientDescentTrainer
<Extension()>
Public Function OnlineGradientDescent (catalog As RegressionCatalog.RegressionTrainers, options As OnlineGradientDescentTrainer.Options) As OnlineGradientDescentTrainer

Parameter

catalog
RegressionCatalog.RegressionTrainers

Objek pelatih katalog regresi.

options
OnlineGradientDescentTrainer.Options

Opsi pelatih.

Mengembalikan

Contoh

using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML;
using Microsoft.ML.Data;
using Microsoft.ML.Trainers;

namespace Samples.Dynamic.Trainers.Regression
{
    public static class OnlineGradientDescentWithOptions
    {
        public static void Example()
        {
            // Create a new context for ML.NET operations. It can be used for
            // exception tracking and logging, as a catalog of available operations
            // and as the source of randomness. Setting the seed to a fixed number
            // in this example to make outputs deterministic.
            var mlContext = new MLContext(seed: 0);

            // Create a list of training data points.
            var dataPoints = GenerateRandomDataPoints(1000);

            // Convert the list of data points to an IDataView object, which is
            // consumable by ML.NET API.
            var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

            // Define trainer options.
            var options = new OnlineGradientDescentTrainer.Options
            {
                LabelColumnName = nameof(DataPoint.Label),
                FeatureColumnName = nameof(DataPoint.Features),
                // Change the loss function.
                LossFunction = new TweedieLoss(),
                // Give an extra gain to more recent updates.
                RecencyGain = 0.1f,
                // Turn off lazy updates.
                LazyUpdate = false,
                // Specify scale for initial weights.
                InitialWeightsDiameter = 0.2f
            };

            // Define the trainer.
            var pipeline =
                mlContext.Regression.Trainers.OnlineGradientDescent(options);

            // Train the model.
            var model = pipeline.Fit(trainingData);

            // Create testing data. Use different random seed to make it different
            // from training data.
            var testData = mlContext.Data.LoadFromEnumerable(
                GenerateRandomDataPoints(5, seed: 123));

            // Run the model on test data set.
            var transformedTestData = model.Transform(testData);

            // Convert IDataView object to a list.
            var predictions = mlContext.Data.CreateEnumerable<Prediction>(
                transformedTestData, reuseRowObject: false).ToList();

            // Look at 5 predictions for the Label, side by side with the actual
            // Label for comparison.
            foreach (var p in predictions)
                Console.WriteLine($"Label: {p.Label:F3}, Prediction: {p.Score:F3}");

            // This trainer is not numerically stable.
            // Please see issue #2425.

            // Evaluate the overall metrics
            var metrics = mlContext.Regression.Evaluate(transformedTestData);
            PrintMetrics(metrics);

            // This trainer is not numerically stable. Please see
            // issue #2425.
        }

        private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count,
            int seed = 0)
        {
            var random = new Random(seed);
            for (int i = 0; i < count; i++)
            {
                float label = (float)random.NextDouble();
                yield return new DataPoint
                {
                    Label = label,
                    // Create random features that are correlated with the label.
                    Features = Enumerable.Repeat(label, 50).Select(
                        x => x + (float)random.NextDouble()).ToArray()
                };
            }
        }

        // Example with label and 50 feature values. A data set is a collection of
        // such examples.
        private class DataPoint
        {
            public float Label { get; set; }
            [VectorType(50)]
            public float[] Features { get; set; }
        }

        // Class used to capture predictions.
        private class Prediction
        {
            // Original label.
            public float Label { get; set; }
            // Predicted score from the trainer.
            public float Score { get; set; }
        }

        // Print some evaluation metrics to regression problems.
        private static void PrintMetrics(RegressionMetrics metrics)
        {
            Console.WriteLine("Mean Absolute Error: " + metrics.MeanAbsoluteError);
            Console.WriteLine("Mean Squared Error: " + metrics.MeanSquaredError);
            Console.WriteLine(
                "Root Mean Squared Error: " + metrics.RootMeanSquaredError);

            Console.WriteLine("RSquared: " + metrics.RSquared);
        }
    }
}

Berlaku untuk