diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance/PFIHelper.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance/PFIHelper.cs index 75b2633f37..95c64e629c 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance/PFIHelper.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/PermutationFeatureImportance/PFIHelper.cs @@ -47,7 +47,7 @@ private class BinaryOutputRow private readonly static Action GreaterThanAverage = (input, output) => output.AboveAverage = input.MedianHomeValue > 22.6; - public static float[] GetLinearModelWeights(OrdinaryLeastSquaresRegressionModelParameters linearModel) + public static float[] GetLinearModelWeights(OlsModelParameters linearModel) { return linearModel.Weights.ToArray(); } diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscent.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscent.cs index 5c0756062b..aea4a1e8dc 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscent.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscent.cs @@ -61,7 +61,7 @@ public static void Example() // we could do so by tweaking the 'advancedSetting'. var advancedPipeline = mlContext.Transforms.Text.FeaturizeText("SentimentText", "Features") .Append(mlContext.BinaryClassification.Trainers.SdcaCalibrated( - new SdcaCalibratedBinaryClassificationTrainer.Options { + new SdcaCalibratedBinaryTrainer.Options { LabelColumnName = "Sentiment", FeatureColumnName = "Features", ConvergenceTolerance = 0.01f, // The learning rate for adjusting bias from being regularized diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscentWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscentWithOptions.cs index c45393c658..d8c89a47bd 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscentWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscentWithOptions.cs @@ -22,7 +22,7 @@ public static void Example() var trainTestData = mlContext.Data.TrainTestSplit(data, testFraction: 0.1); // Define the trainer options. - var options = new SdcaCalibratedBinaryClassificationTrainer.Options() + var options = new SdcaCalibratedBinaryTrainer.Options() { // Make the convergence tolerance tighter. ConvergenceTolerance = 0.05f, diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/StochasticDualCoordinateAscentWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/StochasticDualCoordinateAscentWithOptions.cs index 11363006ba..2d6440dc22 100644 --- a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/StochasticDualCoordinateAscentWithOptions.cs +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/StochasticDualCoordinateAscentWithOptions.cs @@ -26,7 +26,7 @@ public static void Example() // CC 1.216908,1.248052,1.391902,0.4326252,1.099942,0.9262842,1.334019,1.08762,0.9468155,0.4811099 // DD 0.7871246,1.053327,0.8971719,1.588544,1.242697,1.362964,0.6303943,0.9810045,0.9431419,1.557455 - var options = new SdcaMulticlassClassificationTrainer.Options + var options = new SdcaMulticlassTrainer.Options { // Add custom loss LossFunction = new HingeLoss(), diff --git a/src/Microsoft.ML.FastTree/FastTreeArguments.cs b/src/Microsoft.ML.FastTree/FastTreeArguments.cs index cef28a8311..1b31bcd631 100644 --- a/src/Microsoft.ML.FastTree/FastTreeArguments.cs +++ b/src/Microsoft.ML.FastTree/FastTreeArguments.cs @@ -9,7 +9,7 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Trainers.FastTree; -[assembly: EntryPointModule(typeof(FastTreeBinaryClassificationTrainer.Options))] +[assembly: EntryPointModule(typeof(FastTreeBinaryTrainer.Options))] [assembly: EntryPointModule(typeof(FastTreeRegressionTrainer.Options))] [assembly: EntryPointModule(typeof(FastTreeTweedieTrainer.Options))] [assembly: EntryPointModule(typeof(FastTreeRankingTrainer.Options))] @@ -52,10 +52,10 @@ public enum EarlyStoppingRankingMetric } // XML docs are provided in the other part of this partial class. No need to duplicate the content here. - public sealed partial class FastTreeBinaryClassificationTrainer + public sealed partial class FastTreeBinaryTrainer { /// - /// Options for the . + /// Options for the . /// [TlcModule.Component(Name = LoadNameValue, FriendlyName = UserNameValue, Desc = Summary)] public sealed class Options : BoostedTreeOptions, IFastTreeTrainerFactory @@ -102,7 +102,7 @@ public Options() EarlyStoppingMetric = EarlyStoppingMetric.L1Norm; } - ITrainer IComponentFactory.CreateComponent(IHostEnvironment env) => new FastTreeBinaryClassificationTrainer(env, this); + ITrainer IComponentFactory.CreateComponent(IHostEnvironment env) => new FastTreeBinaryTrainer(env, this); } } diff --git a/src/Microsoft.ML.FastTree/FastTreeClassification.cs b/src/Microsoft.ML.FastTree/FastTreeClassification.cs index 55a6878aa8..78e0911957 100644 --- a/src/Microsoft.ML.FastTree/FastTreeClassification.cs +++ b/src/Microsoft.ML.FastTree/FastTreeClassification.cs @@ -13,14 +13,14 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Trainers.FastTree; -[assembly: LoadableClass(FastTreeBinaryClassificationTrainer.Summary, typeof(FastTreeBinaryClassificationTrainer), typeof(FastTreeBinaryClassificationTrainer.Options), +[assembly: LoadableClass(FastTreeBinaryTrainer.Summary, typeof(FastTreeBinaryTrainer), typeof(FastTreeBinaryTrainer.Options), new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer), typeof(SignatureTreeEnsembleTrainer), typeof(SignatureFeatureScorerTrainer) }, - FastTreeBinaryClassificationTrainer.UserNameValue, - FastTreeBinaryClassificationTrainer.LoadNameValue, + FastTreeBinaryTrainer.UserNameValue, + FastTreeBinaryTrainer.LoadNameValue, "FastTreeClassification", "FastTree", "ft", - FastTreeBinaryClassificationTrainer.ShortName, + FastTreeBinaryTrainer.ShortName, // FastRank names "FastRankBinaryClassification", @@ -101,8 +101,8 @@ private static IPredictorProducing Create(IHostEnvironment env, ModelLoad /// The for training a decision tree binary classification model using FastTree. /// /// - public sealed partial class FastTreeBinaryClassificationTrainer : - BoostingFastTreeTrainerBase>, CalibratedModelParametersBase> { @@ -118,7 +118,7 @@ public sealed partial class FastTreeBinaryClassificationTrainer : private double _sigmoidParameter; /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The private instance of . /// The name of the label column. @@ -128,7 +128,7 @@ public sealed partial class FastTreeBinaryClassificationTrainer : /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data. /// The max number of leaves in each regression tree. /// Total number of decision trees to create in the ensemble. - internal FastTreeBinaryClassificationTrainer(IHostEnvironment env, + internal FastTreeBinaryTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -143,11 +143,11 @@ internal FastTreeBinaryClassificationTrainer(IHostEnvironment env, } /// - /// Initializes a new instance of by using the class. + /// Initializes a new instance of by using the class. /// /// The instance of . /// Algorithm advanced settings. - internal FastTreeBinaryClassificationTrainer(IHostEnvironment env, Options options) + internal FastTreeBinaryTrainer(IHostEnvironment env, Options options) : base(env, options, TrainerUtils.MakeBoolScalarLabel(options.LabelColumnName)) { // Set the sigmoid parameter to the 2 * learning rate, for traditional FastTreeClassification loss @@ -278,7 +278,7 @@ private protected override BinaryPredictionTransformer new BinaryPredictionTransformer>(Host, model, trainSchema, FeatureColumn.Name); /// - /// Trains a using both training and validation data, returns + /// Trains a using both training and validation data, returns /// a . /// public BinaryPredictionTransformer> Fit(IDataView trainData, IDataView validationData) @@ -403,18 +403,18 @@ public void AdjustTreeOutputs(IChannel ch, InternalRegressionTree tree, internal static partial class FastTree { [TlcModule.EntryPoint(Name = "Trainers.FastTreeBinaryClassifier", - Desc = FastTreeBinaryClassificationTrainer.Summary, - UserName = FastTreeBinaryClassificationTrainer.UserNameValue, - ShortName = FastTreeBinaryClassificationTrainer.ShortName)] - public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, FastTreeBinaryClassificationTrainer.Options input) + Desc = FastTreeBinaryTrainer.Summary, + UserName = FastTreeBinaryTrainer.UserNameValue, + ShortName = FastTreeBinaryTrainer.ShortName)] + public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, FastTreeBinaryTrainer.Options input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("TrainFastTree"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); - return TrainerEntryPointsUtils.Train(host, input, - () => new FastTreeBinaryClassificationTrainer(host, input), + return TrainerEntryPointsUtils.Train(host, input, + () => new FastTreeBinaryTrainer(host, input), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.ExampleWeightColumnName), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.RowGroupColumnName)); diff --git a/src/Microsoft.ML.FastTree/GamClassification.cs b/src/Microsoft.ML.FastTree/GamClassification.cs index 0123fe1a05..3d21561370 100644 --- a/src/Microsoft.ML.FastTree/GamClassification.cs +++ b/src/Microsoft.ML.FastTree/GamClassification.cs @@ -13,16 +13,16 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Trainers.FastTree; -[assembly: LoadableClass(GamBinaryClassificationTrainer.Summary, - typeof(GamBinaryClassificationTrainer), typeof(GamBinaryClassificationTrainer.Options), +[assembly: LoadableClass(GamBinaryTrainer.Summary, + typeof(GamBinaryTrainer), typeof(GamBinaryTrainer.Options), new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer), typeof(SignatureFeatureScorerTrainer) }, - GamBinaryClassificationTrainer.UserNameValue, - GamBinaryClassificationTrainer.LoadNameValue, - GamBinaryClassificationTrainer.ShortName, DocName = "trainer/GAM.md")] + GamBinaryTrainer.UserNameValue, + GamBinaryTrainer.LoadNameValue, + GamBinaryTrainer.ShortName, DocName = "trainer/GAM.md")] -[assembly: LoadableClass(typeof(IPredictorProducing), typeof(BinaryClassificationGamModelParameters), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(IPredictorProducing), typeof(GamBinaryModelParameters), null, typeof(SignatureLoadModel), "GAM Binary Class Predictor", - BinaryClassificationGamModelParameters.LoaderSignature)] + GamBinaryModelParameters.LoaderSignature)] namespace Microsoft.ML.Trainers.FastTree { @@ -30,13 +30,13 @@ namespace Microsoft.ML.Trainers.FastTree /// The for training a binary classification model with generalized additive models (GAM). /// /// - public sealed class GamBinaryClassificationTrainer : - GamTrainerBase>, - CalibratedModelParametersBase> + public sealed class GamBinaryTrainer : + GamTrainerBase>, + CalibratedModelParametersBase> { /// - /// Options for the . + /// Options for the . /// public sealed class Options : OptionsBase { @@ -57,16 +57,16 @@ public sealed class Options : OptionsBase private protected override bool NeedCalibration => true; /// - /// Initializes a new instance of + /// Initializes a new instance of /// - internal GamBinaryClassificationTrainer(IHostEnvironment env, Options options) + internal GamBinaryTrainer(IHostEnvironment env, Options options) : base(env, options, LoadNameValue, TrainerUtils.MakeBoolScalarLabel(options.LabelColumnName)) { _sigmoidParameter = 1; } /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The private instance of . /// The name of the label column. @@ -75,7 +75,7 @@ internal GamBinaryClassificationTrainer(IHostEnvironment env, Options options) /// The number of iterations to use in learning the features. /// The learning rate. GAMs work best with a small learning rate. /// The maximum number of bins to use to approximate features - internal GamBinaryClassificationTrainer(IHostEnvironment env, + internal GamBinaryTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string rowGroupColumnName = null, @@ -111,18 +111,18 @@ private static bool[] ConvertTargetsToBool(double[] targets) Parallel.Invoke(new ParallelOptions { MaxDegreeOfParallelism = BlockingThreadPool.NumThreads }, actions); return boolArray; } - private protected override CalibratedModelParametersBase TrainModelCore(TrainContext context) + private protected override CalibratedModelParametersBase TrainModelCore(TrainContext context) { TrainBase(context); - var predictor = new BinaryClassificationGamModelParameters(Host, + var predictor = new GamBinaryModelParameters(Host, BinUpperBounds, BinEffects, MeanEffect, InputLength, FeatureMap); var calibrator = new PlattCalibrator(Host, -1.0 * _sigmoidParameter, 0); - return new ValueMapperCalibratedModelParameters(Host, predictor, calibrator); + return new ValueMapperCalibratedModelParameters(Host, predictor, calibrator); } private protected override ObjectiveFunctionBase CreateObjectiveFunction() { - return new FastTreeBinaryClassificationTrainer.ObjectiveImpl( + return new FastTreeBinaryTrainer.ObjectiveImpl( TrainSet, ConvertTargetsToBool(TrainSet.Targets), GamTrainerOptions.LearningRate, @@ -146,15 +146,15 @@ private protected override void DefinePruningTest() PruningTest = new TestHistory(validTest, PruningLossIndex); } - private protected override BinaryPredictionTransformer> - MakeTransformer(CalibratedModelParametersBase model, DataViewSchema trainSchema) - => new BinaryPredictionTransformer>(Host, model, trainSchema, FeatureColumn.Name); + private protected override BinaryPredictionTransformer> + MakeTransformer(CalibratedModelParametersBase model, DataViewSchema trainSchema) + => new BinaryPredictionTransformer>(Host, model, trainSchema, FeatureColumn.Name); /// - /// Trains a using both training and validation data, returns + /// Trains a using both training and validation data, returns /// a . /// - public BinaryPredictionTransformer> Fit(IDataView trainData, IDataView validationData) + public BinaryPredictionTransformer> Fit(IDataView trainData, IDataView validationData) => TrainTransformer(trainData, validationData); private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) @@ -171,7 +171,7 @@ private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape /// /// The model parameters class for Binary Classification GAMs /// - public sealed class BinaryClassificationGamModelParameters : GamModelParametersBase, IPredictorProducing + public sealed class GamBinaryModelParameters : GamModelParametersBase, IPredictorProducing { internal const string LoaderSignature = "BinaryClassGamPredictor"; private protected override PredictionKind PredictionKind => PredictionKind.BinaryClassification; @@ -188,11 +188,11 @@ public sealed class BinaryClassificationGamModelParameters : GamModelParametersB /// A map from the feature shape functions, as described by and . /// to the input feature. Used when the number of input features is different than the number of shape functions. Use default if all features have /// a shape function. - internal BinaryClassificationGamModelParameters(IHostEnvironment env, + internal GamBinaryModelParameters(IHostEnvironment env, double[][] binUpperBounds, double[][] binEffects, double intercept, int inputLength, int[] featureToInputMap) : base(env, LoaderSignature, binUpperBounds, binEffects, intercept, inputLength, featureToInputMap) { } - private BinaryClassificationGamModelParameters(IHostEnvironment env, ModelLoadContext ctx) + private GamBinaryModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, LoaderSignature, ctx) { } private static VersionInfo GetVersionInfo() @@ -205,7 +205,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010002, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(BinaryClassificationGamModelParameters).Assembly.FullName); + loaderAssemblyName: typeof(GamBinaryModelParameters).Assembly.FullName); } private static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) @@ -214,12 +214,12 @@ private static IPredictorProducing Create(IHostEnvironment env, ModelLoad env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - var predictor = new BinaryClassificationGamModelParameters(env, ctx); + var predictor = new GamBinaryModelParameters(env, ctx); ICalibrator calibrator; ctx.LoadModelOrNull(env, out calibrator, @"Calibrator"); if (calibrator == null) return predictor; - return new SchemaBindableCalibratedModelParameters(env, predictor, calibrator); + return new SchemaBindableCalibratedModelParameters(env, predictor, calibrator); } private protected override void SaveCore(ModelSaveContext ctx) diff --git a/src/Microsoft.ML.FastTree/GamModelParameters.cs b/src/Microsoft.ML.FastTree/GamModelParameters.cs index 7aedfcc78f..0104c352b4 100644 --- a/src/Microsoft.ML.FastTree/GamModelParameters.cs +++ b/src/Microsoft.ML.FastTree/GamModelParameters.cs @@ -879,12 +879,12 @@ private Context Init(IChannel ch) // 2. RegressionGamModelParameters // For (1), the trained model, GamModelParametersBase, is a field we need to extract. For (2), // we don't need to do anything because RegressionGamModelParameters is derived from GamModelParametersBase. - var calibrated = rawPred as CalibratedModelParametersBase; + var calibrated = rawPred as CalibratedModelParametersBase; while (calibrated != null) { hadCalibrator = true; rawPred = calibrated.SubModel; - calibrated = rawPred as CalibratedModelParametersBase; + calibrated = rawPred as CalibratedModelParametersBase; } var pred = rawPred as GamModelParametersBase; ch.CheckUserArg(pred != null, nameof(ImplOptions.InputModelFile), "Predictor was not a " + nameof(GamModelParametersBase)); diff --git a/src/Microsoft.ML.FastTree/GamRegression.cs b/src/Microsoft.ML.FastTree/GamRegression.cs index 96cf24d4b8..7193b12b9e 100644 --- a/src/Microsoft.ML.FastTree/GamRegression.cs +++ b/src/Microsoft.ML.FastTree/GamRegression.cs @@ -17,9 +17,9 @@ GamRegressionTrainer.LoadNameValue, GamRegressionTrainer.ShortName, DocName = "trainer/GAM.md")] -[assembly: LoadableClass(typeof(RegressionGamModelParameters), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(GamRegressionModelParameters), null, typeof(SignatureLoadModel), "GAM Regression Predictor", - RegressionGamModelParameters.LoaderSignature)] + GamRegressionModelParameters.LoaderSignature)] namespace Microsoft.ML.Trainers.FastTree { @@ -27,7 +27,7 @@ namespace Microsoft.ML.Trainers.FastTree /// The for training a regression model with generalized additive models (GAM). /// /// - public sealed class GamRegressionTrainer : GamTrainerBase, RegressionGamModelParameters> + public sealed class GamRegressionTrainer : GamTrainerBase, GamRegressionModelParameters> { /// /// Options for the . @@ -55,7 +55,7 @@ internal GamRegressionTrainer(IHostEnvironment env, Options options) : base(env, options, LoadNameValue, TrainerUtils.MakeR4ScalarColumn(options.LabelColumnName)) { } /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The private instance of . /// The name of the label column. @@ -80,10 +80,10 @@ private protected override void CheckLabel(RoleMappedData data) data.CheckRegressionLabel(); } - private protected override RegressionGamModelParameters TrainModelCore(TrainContext context) + private protected override GamRegressionModelParameters TrainModelCore(TrainContext context) { TrainBase(context); - return new RegressionGamModelParameters(Host, BinUpperBounds, BinEffects, MeanEffect, InputLength, FeatureMap); + return new GamRegressionModelParameters(Host, BinUpperBounds, BinEffects, MeanEffect, InputLength, FeatureMap); } private protected override ObjectiveFunctionBase CreateObjectiveFunction() @@ -99,14 +99,14 @@ private protected override void DefinePruningTest() PruningTest = new TestHistory(validTest, PruningLossIndex); } - private protected override RegressionPredictionTransformer MakeTransformer(RegressionGamModelParameters model, DataViewSchema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + private protected override RegressionPredictionTransformer MakeTransformer(GamRegressionModelParameters model, DataViewSchema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); /// /// Trains a using both training and validation data, returns /// a . /// - public RegressionPredictionTransformer Fit(IDataView trainData, IDataView validationData) + public RegressionPredictionTransformer Fit(IDataView trainData, IDataView validationData) => TrainTransformer(trainData, validationData); private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) @@ -121,7 +121,7 @@ private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape /// /// The model parameters class for Binary Classification GAMs /// - public sealed class RegressionGamModelParameters : GamModelParametersBase + public sealed class GamRegressionModelParameters : GamModelParametersBase { internal const string LoaderSignature = "RegressionGamPredictor"; private protected override PredictionKind PredictionKind => PredictionKind.Regression; @@ -138,11 +138,11 @@ public sealed class RegressionGamModelParameters : GamModelParametersBase /// A map from the feature shape functions (as described by the binUpperBounds and BinEffects) /// to the input feature. Used when the number of input features is different than the number of shape functions. Use default if all features have /// a shape function. - internal RegressionGamModelParameters(IHostEnvironment env, + internal GamRegressionModelParameters(IHostEnvironment env, double[][] binUpperBounds, double[][] binEffects, double intercept, int inputLength = -1, int[] featureToInputMap = null) : base(env, LoaderSignature, binUpperBounds, binEffects, intercept, inputLength, featureToInputMap) { } - private RegressionGamModelParameters(IHostEnvironment env, ModelLoadContext ctx) + private GamRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, LoaderSignature, ctx) { } private static VersionInfo GetVersionInfo() @@ -155,16 +155,16 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010002, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(RegressionGamModelParameters).Assembly.FullName); + loaderAssemblyName: typeof(GamRegressionModelParameters).Assembly.FullName); } - private static RegressionGamModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) + private static GamRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new RegressionGamModelParameters(env, ctx); + return new GamRegressionModelParameters(env, ctx); } private protected override void SaveCore(ModelSaveContext ctx) diff --git a/src/Microsoft.ML.FastTree/GamTrainer.cs b/src/Microsoft.ML.FastTree/GamTrainer.cs index 6650909731..68bb88c594 100644 --- a/src/Microsoft.ML.FastTree/GamTrainer.cs +++ b/src/Microsoft.ML.FastTree/GamTrainer.cs @@ -696,16 +696,16 @@ public static CommonOutputs.RegressionOutput TrainRegression(IHostEnvironment en () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.ExampleWeightColumnName)); } - [TlcModule.EntryPoint(Name = "Trainers.GeneralizedAdditiveModelBinaryClassifier", Desc = GamBinaryClassificationTrainer.Summary, UserName = GamBinaryClassificationTrainer.UserNameValue, ShortName = GamBinaryClassificationTrainer.ShortName)] - public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, GamBinaryClassificationTrainer.Options input) + [TlcModule.EntryPoint(Name = "Trainers.GeneralizedAdditiveModelBinaryClassifier", Desc = GamBinaryTrainer.Summary, UserName = GamBinaryTrainer.UserNameValue, ShortName = GamBinaryTrainer.ShortName)] + public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, GamBinaryTrainer.Options input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("TrainGAM"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); - return TrainerEntryPointsUtils.Train(host, input, - () => new GamBinaryClassificationTrainer(host, input), + return TrainerEntryPointsUtils.Train(host, input, + () => new GamBinaryTrainer(host, input), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.ExampleWeightColumnName)); } diff --git a/src/Microsoft.ML.FastTree/RandomForestClassification.cs b/src/Microsoft.ML.FastTree/RandomForestClassification.cs index d379859ffc..f226d0024f 100644 --- a/src/Microsoft.ML.FastTree/RandomForestClassification.cs +++ b/src/Microsoft.ML.FastTree/RandomForestClassification.cs @@ -13,17 +13,17 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Trainers.FastTree; -[assembly: LoadableClass(FastForestBinaryClassificationTrainer.Summary, typeof(FastForestBinaryClassificationTrainer), typeof(FastForestBinaryClassificationTrainer.Options), +[assembly: LoadableClass(FastForestBinaryTrainer.Summary, typeof(FastForestBinaryTrainer), typeof(FastForestBinaryTrainer.Options), new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer), typeof(SignatureTreeEnsembleTrainer), typeof(SignatureFeatureScorerTrainer) }, - FastForestBinaryClassificationTrainer.UserNameValue, - FastForestBinaryClassificationTrainer.LoadNameValue, + FastForestBinaryTrainer.UserNameValue, + FastForestBinaryTrainer.LoadNameValue, "FastForest", - FastForestBinaryClassificationTrainer.ShortName, + FastForestBinaryTrainer.ShortName, "ffc")] -[assembly: LoadableClass(typeof(IPredictorProducing), typeof(FastForestClassificationModelParameters), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(IPredictorProducing), typeof(FastForestBinaryModelParameters), null, typeof(SignatureLoadModel), "FastForest Binary Executor", - FastForestClassificationModelParameters.LoaderSignature)] + FastForestBinaryModelParameters.LoaderSignature)] [assembly: LoadableClass(typeof(void), typeof(FastForest), null, typeof(SignatureEntryPointModule), "FastForest")] @@ -48,7 +48,7 @@ internal FastForestOptionsBase() } } - public sealed class FastForestClassificationModelParameters : + public sealed class FastForestBinaryModelParameters : TreeEnsembleModelParametersBasedOnQuantileRegressionTree { internal const string LoaderSignature = "FastForestBinaryExec"; @@ -67,7 +67,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010005, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(FastForestClassificationModelParameters).Assembly.FullName); + loaderAssemblyName: typeof(FastForestBinaryModelParameters).Assembly.FullName); } private protected override uint VerNumFeaturesSerialized => 0x00010003; @@ -81,11 +81,11 @@ private static VersionInfo GetVersionInfo() /// private protected override PredictionKind PredictionKind => PredictionKind.BinaryClassification; - internal FastForestClassificationModelParameters(IHostEnvironment env, InternalTreeEnsemble trainedEnsemble, int featureCount, string innerArgs) + internal FastForestBinaryModelParameters(IHostEnvironment env, InternalTreeEnsemble trainedEnsemble, int featureCount, string innerArgs) : base(env, RegistrationName, trainedEnsemble, featureCount, innerArgs) { } - private FastForestClassificationModelParameters(IHostEnvironment env, ModelLoadContext ctx) + private FastForestBinaryModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx, GetVersionInfo()) { } @@ -101,12 +101,12 @@ private static IPredictorProducing Create(IHostEnvironment env, ModelLoad Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - var predictor = new FastForestClassificationModelParameters(env, ctx); + var predictor = new FastForestBinaryModelParameters(env, ctx); ICalibrator calibrator; ctx.LoadModelOrNull(env, out calibrator, @"Calibrator"); if (calibrator == null) return predictor; - return new SchemaBindableCalibratedModelParameters(env, predictor, calibrator); + return new SchemaBindableCalibratedModelParameters(env, predictor, calibrator); } } @@ -114,11 +114,11 @@ private static IPredictorProducing Create(IHostEnvironment env, ModelLoad /// The for training a decision tree binary classification model using Fast Forest. /// /// - public sealed partial class FastForestBinaryClassificationTrainer : - RandomForestTrainerBase, FastForestClassificationModelParameters> + public sealed partial class FastForestBinaryTrainer : + RandomForestTrainerBase, FastForestBinaryModelParameters> { /// - /// Options for the . + /// Options for the . /// public sealed class Options : FastForestOptionsBase { @@ -146,7 +146,7 @@ public sealed class Options : FastForestOptionsBase private protected override bool NeedCalibration => true; /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The private instance of . /// The name of the label column. @@ -155,7 +155,7 @@ public sealed class Options : FastForestOptionsBase /// The max number of leaves in each regression tree. /// Total number of decision trees to create in the ensemble. /// The minimal number of documents allowed in a leaf of a regression tree, out of the subsampled data. - internal FastForestBinaryClassificationTrainer(IHostEnvironment env, + internal FastForestBinaryTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -169,16 +169,16 @@ internal FastForestBinaryClassificationTrainer(IHostEnvironment env, } /// - /// Initializes a new instance of by using the class. + /// Initializes a new instance of by using the class. /// /// The instance of . /// Algorithm advanced settings. - internal FastForestBinaryClassificationTrainer(IHostEnvironment env, Options options) + internal FastForestBinaryTrainer(IHostEnvironment env, Options options) : base(env, options, TrainerUtils.MakeBoolScalarLabel(options.LabelColumnName)) { } - private protected override FastForestClassificationModelParameters TrainModelCore(TrainContext context) + private protected override FastForestBinaryModelParameters TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); var trainData = context.TrainingSet; @@ -201,7 +201,7 @@ private protected override FastForestClassificationModelParameters TrainModelCor // calibrator, transform the scores using that. // REVIEW: Need a way to signal the outside world that we prefer simple sigmoid? - return new FastForestClassificationModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions); + return new FastForestBinaryModelParameters(Host, TrainedEnsemble, FeatureCount, InnerOptions); } private protected override ObjectiveFunctionBase ConstructObjFunc(IChannel ch) @@ -221,14 +221,14 @@ private protected override Test ConstructTestForTrainingData() return new BinaryClassificationTest(ConstructScoreTracker(TrainSet), _trainSetLabels, 1); } - private protected override BinaryPredictionTransformer MakeTransformer(FastForestClassificationModelParameters model, DataViewSchema trainSchema) - => new BinaryPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + private protected override BinaryPredictionTransformer MakeTransformer(FastForestBinaryModelParameters model, DataViewSchema trainSchema) + => new BinaryPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); /// - /// Trains a using both training and validation data, returns + /// Trains a using both training and validation data, returns /// a . /// - public BinaryPredictionTransformer Fit(IDataView trainData, IDataView validationData) + public BinaryPredictionTransformer Fit(IDataView trainData, IDataView validationData) => TrainTransformer(trainData, validationData); private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) @@ -263,18 +263,18 @@ protected override void GetGradientInOneQuery(int query, int threadIndex) internal static partial class FastForest { [TlcModule.EntryPoint(Name = "Trainers.FastForestBinaryClassifier", - Desc = FastForestBinaryClassificationTrainer.Summary, - UserName = FastForestBinaryClassificationTrainer.UserNameValue, - ShortName = FastForestBinaryClassificationTrainer.ShortName)] - public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, FastForestBinaryClassificationTrainer.Options input) + Desc = FastForestBinaryTrainer.Summary, + UserName = FastForestBinaryTrainer.UserNameValue, + ShortName = FastForestBinaryTrainer.ShortName)] + public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, FastForestBinaryTrainer.Options input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("TrainFastForest"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); - return TrainerEntryPointsUtils.Train(host, input, - () => new FastForestBinaryClassificationTrainer(host, input), + return TrainerEntryPointsUtils.Train(host, input, + () => new FastForestBinaryTrainer(host, input), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.ExampleWeightColumnName), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.RowGroupColumnName), diff --git a/src/Microsoft.ML.FastTree/TreeTrainersCatalog.cs b/src/Microsoft.ML.FastTree/TreeTrainersCatalog.cs index 78a885c549..18a2737d02 100644 --- a/src/Microsoft.ML.FastTree/TreeTrainersCatalog.cs +++ b/src/Microsoft.ML.FastTree/TreeTrainersCatalog.cs @@ -61,7 +61,7 @@ public static FastTreeRegressionTrainer FastTree(this RegressionCatalog.Regressi } /// - /// Predict a target using a decision tree binary classification model trained with the . + /// Predict a target using a decision tree binary classification model trained with the . /// /// The . /// The name of the label column. @@ -71,7 +71,7 @@ public static FastTreeRegressionTrainer FastTree(this RegressionCatalog.Regressi /// The maximum number of leaves per decision tree. /// The minimal number of data points required to form a new tree leaf. /// The learning rate. - public static FastTreeBinaryClassificationTrainer FastTree(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + public static FastTreeBinaryTrainer FastTree(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -82,22 +82,22 @@ public static FastTreeBinaryClassificationTrainer FastTree(this BinaryClassifica { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new FastTreeBinaryClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, numberOfTrees, minimumExampleCountPerLeaf, learningRate); + return new FastTreeBinaryTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, numberOfTrees, minimumExampleCountPerLeaf, learningRate); } /// - /// Predict a target using a decision tree binary classification model trained with the and advanced options. + /// Predict a target using a decision tree binary classification model trained with the and advanced options. /// /// The . /// Trainer options. - public static FastTreeBinaryClassificationTrainer FastTree(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, - FastTreeBinaryClassificationTrainer.Options options) + public static FastTreeBinaryTrainer FastTree(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + FastTreeBinaryTrainer.Options options) { Contracts.CheckValue(catalog, nameof(catalog)); Contracts.CheckValue(options, nameof(options)); var env = CatalogUtils.GetEnvironment(catalog); - return new FastTreeBinaryClassificationTrainer(env, options); + return new FastTreeBinaryTrainer(env, options); } /// @@ -143,7 +143,7 @@ public static FastTreeRankingTrainer FastTree(this RankingCatalog.RankingTrainer } /// - /// Predict a target using generalized additive models (GAM) trained with the . + /// Predict a target using generalized additive models (GAM) trained with the . /// /// The . /// The name of the label column. @@ -152,7 +152,7 @@ public static FastTreeRankingTrainer FastTree(this RankingCatalog.RankingTrainer /// The number of iterations to use in learning the features. /// The maximum number of bins to use to approximate features. /// The learning rate. GAMs work best with a small learning rate. - public static GamBinaryClassificationTrainer Gam(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + public static GamBinaryTrainer Gam(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -162,20 +162,20 @@ public static GamBinaryClassificationTrainer Gam(this BinaryClassificationCatalo { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new GamBinaryClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfIterations, learningRate, maximumBinCountPerFeature); + return new GamBinaryTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfIterations, learningRate, maximumBinCountPerFeature); } /// - /// Predict a target using generalized additive models (GAM) trained with the . + /// Predict a target using generalized additive models (GAM) trained with the . /// /// The . /// Trainer options. - public static GamBinaryClassificationTrainer Gam(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, - GamBinaryClassificationTrainer.Options options) + public static GamBinaryTrainer Gam(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + GamBinaryTrainer.Options options) { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new GamBinaryClassificationTrainer(env, options); + return new GamBinaryTrainer(env, options); } /// @@ -293,7 +293,7 @@ public static FastForestRegressionTrainer FastForest(this RegressionCatalog.Regr } /// - /// Predict a target using a decision tree regression model trained with the . + /// Predict a target using a decision tree regression model trained with the . /// /// The . /// The name of the label column. @@ -302,7 +302,7 @@ public static FastForestRegressionTrainer FastForest(this RegressionCatalog.Regr /// Total number of decision trees to create in the ensemble. /// The maximum number of leaves per decision tree. /// The minimal number of data points required to form a new tree leaf. - public static FastForestBinaryClassificationTrainer FastForest(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + public static FastForestBinaryTrainer FastForest(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -312,22 +312,22 @@ public static FastForestBinaryClassificationTrainer FastForest(this BinaryClassi { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new FastForestBinaryClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, numberOfTrees, minDatapointsInLeaves); + return new FastForestBinaryTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, numberOfTrees, minDatapointsInLeaves); } /// - /// Predict a target using a decision tree regression model trained with the and advanced options. + /// Predict a target using a decision tree regression model trained with the and advanced options. /// /// The . /// Trainer options. - public static FastForestBinaryClassificationTrainer FastForest(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, - FastForestBinaryClassificationTrainer.Options options) + public static FastForestBinaryTrainer FastForest(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + FastForestBinaryTrainer.Options options) { Contracts.CheckValue(catalog, nameof(catalog)); Contracts.CheckValue(options, nameof(options)); var env = CatalogUtils.GetEnvironment(catalog); - return new FastForestBinaryClassificationTrainer(env, options); + return new FastForestBinaryTrainer(env, options); } } } diff --git a/src/Microsoft.ML.LightGbm.StaticPipe/LightGbmStaticExtensions.cs b/src/Microsoft.ML.LightGbm.StaticPipe/LightGbmStaticExtensions.cs index 439cb28a40..129ab2f593 100644 --- a/src/Microsoft.ML.LightGbm.StaticPipe/LightGbmStaticExtensions.cs +++ b/src/Microsoft.ML.LightGbm.StaticPipe/LightGbmStaticExtensions.cs @@ -98,7 +98,7 @@ public static Scalar LightGbm(this RegressionCatalog.RegressionTrainers c } /// - /// Predict a target using a tree binary classification model trained with the . + /// Predict a target using a tree binary classification model trained with the . /// /// The . /// The label column. @@ -136,7 +136,7 @@ public static (Scalar score, Scalar probability, Scalar pred var rec = new TrainerEstimatorReconciler.BinaryClassifier( (env, labelName, featuresName, weightsName) => { - var trainer = new LightGbmBinaryClassificationTrainer(env, labelName, featuresName, weightsName, numberOfLeaves, + var trainer = new LightGbmBinaryTrainer(env, labelName, featuresName, weightsName, numberOfLeaves, minimumExampleCountPerLeaf, learningRate, numberOfIterations); if (onFit != null) @@ -149,7 +149,7 @@ public static (Scalar score, Scalar probability, Scalar pred } /// - /// Predict a target using a tree binary classification model trained with the . + /// Predict a target using a tree binary classification model trained with the . /// /// The . /// The label column. @@ -177,7 +177,7 @@ public static (Scalar score, Scalar probability, Scalar pred options.FeatureColumnName = featuresName; options.ExampleWeightColumnName = weightsName; - var trainer = new LightGbmBinaryClassificationTrainer(env, options); + var trainer = new LightGbmBinaryTrainer(env, options); if (onFit != null) return trainer.WithOnFitDelegate(trans => onFit(trans.Model)); @@ -278,7 +278,7 @@ public static Scalar LightGbm(this RankingCatalog.RankingTrainers c } /// - /// Predict a target using a tree multiclass classification model trained with the . + /// Predict a target using a tree multiclass classification model trained with the . /// /// The multiclass classification catalog trainer object. /// The label, or dependent variable. @@ -317,7 +317,7 @@ public static (Vector score, Key predictedLabel) var rec = new TrainerEstimatorReconciler.MulticlassClassificationReconciler( (env, labelName, featuresName, weightsName) => { - var trainer = new LightGbmMulticlassClassificationTrainer(env, labelName, featuresName, weightsName, numberOfLeaves, + var trainer = new LightGbmMulticlassTrainer(env, labelName, featuresName, weightsName, numberOfLeaves, minimumExampleCountPerLeaf, learningRate, numberOfIterations); if (onFit != null) @@ -329,7 +329,7 @@ public static (Vector score, Key predictedLabel) } /// - /// Predict a target using a tree multiclass classification model trained with the . + /// Predict a target using a tree multiclass classification model trained with the . /// /// The multiclass classification catalog trainer object. /// The label, or dependent variable. @@ -359,7 +359,7 @@ public static (Vector score, Key predictedLabel) options.FeatureColumnName = featuresName; options.ExampleWeightColumnName = weightsName; - var trainer = new LightGbmMulticlassClassificationTrainer(env, options); + var trainer = new LightGbmMulticlassTrainer(env, options); if (onFit != null) return trainer.WithOnFitDelegate(trans => onFit(trans.Model)); diff --git a/src/Microsoft.ML.LightGbm/LightGbmArguments.cs b/src/Microsoft.ML.LightGbm/LightGbmArguments.cs index 05476859a1..cb8a18b384 100644 --- a/src/Microsoft.ML.LightGbm/LightGbmArguments.cs +++ b/src/Microsoft.ML.LightGbm/LightGbmArguments.cs @@ -150,7 +150,7 @@ public sealed class TreeBooster : BoosterParameter public class Options : ISupportBoosterParameterFactory { /// - /// Whether training data is unbalanced. Used by . + /// Whether training data is unbalanced. Used by . /// [Argument(ArgumentType.AtMostOnce, HelpText = "Use for binary classification when training data is not balanced.", ShortName = "us")] public bool UnbalancedSets = false; @@ -263,7 +263,7 @@ public class Options : ISupportBoosterParameterFactory public double L1Regularization = 0; /// - /// Controls the balance of positive and negative weights in . + /// Controls the balance of positive and negative weights in . /// /// /// This is useful for training on unbalanced data. A typical value to consider is sum(negative cases) / sum(positive cases). @@ -518,7 +518,7 @@ public enum EvalMetricType public EvalMetricType EvaluationMetric = EvalMetricType.DefaultMetric; /// - /// Whether to use softmax loss. Used only by . + /// Whether to use softmax loss. Used only by . /// [Argument(ArgumentType.AtMostOnce, HelpText = "Use softmax loss for the multi classification.")] [TlcModule.SweepableDiscreteParam("UseSoftmax", new object[] { true, false })] @@ -542,9 +542,9 @@ public enum EvalMetricType public string CustomGains = "0,3,7,15,31,63,127,255,511,1023,2047,4095"; /// - /// Parameter for the sigmoid function. Used only by , , and . + /// Parameter for the sigmoid function. Used only by , , and . /// - [Argument(ArgumentType.AtMostOnce, HelpText = "Parameter for the sigmoid function. Used only in " + nameof(LightGbmBinaryClassificationTrainer) + ", " + nameof(LightGbmMulticlassClassificationTrainer) + + [Argument(ArgumentType.AtMostOnce, HelpText = "Parameter for the sigmoid function. Used only in " + nameof(LightGbmBinaryTrainer) + ", " + nameof(LightGbmMulticlassTrainer) + " and in " + nameof(LightGbmRankingTrainer) + ".", ShortName = "sigmoid")] [TGUI(Label = "Sigmoid", SuggestedSweeps = "0.5,1")] public double Sigmoid = 0.5; diff --git a/src/Microsoft.ML.LightGbm/LightGbmBinaryTrainer.cs b/src/Microsoft.ML.LightGbm/LightGbmBinaryTrainer.cs index 1a10dc68f5..73f47c097d 100644 --- a/src/Microsoft.ML.LightGbm/LightGbmBinaryTrainer.cs +++ b/src/Microsoft.ML.LightGbm/LightGbmBinaryTrainer.cs @@ -10,9 +10,9 @@ using Microsoft.ML.Trainers.FastTree; using Microsoft.ML.Trainers.LightGbm; -[assembly: LoadableClass(LightGbmBinaryClassificationTrainer.Summary, typeof(LightGbmBinaryClassificationTrainer), typeof(Options), +[assembly: LoadableClass(LightGbmBinaryTrainer.Summary, typeof(LightGbmBinaryTrainer), typeof(Options), new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer), typeof(SignatureTreeEnsembleTrainer) }, - LightGbmBinaryClassificationTrainer.UserName, LightGbmBinaryClassificationTrainer.LoadNameValue, LightGbmBinaryClassificationTrainer.ShortName, DocName = "trainer/LightGBM.md")] + LightGbmBinaryTrainer.UserName, LightGbmBinaryTrainer.LoadNameValue, LightGbmBinaryTrainer.ShortName, DocName = "trainer/LightGBM.md")] [assembly: LoadableClass(typeof(IPredictorProducing), typeof(LightGbmBinaryModelParameters), null, typeof(SignatureLoadModel), "LightGBM Binary Executor", @@ -82,7 +82,7 @@ private static IPredictorProducing Create(IHostEnvironment env, ModelLoad /// The for training a boosted decision tree binary classification model using LightGBM. /// /// - public sealed class LightGbmBinaryClassificationTrainer : LightGbmTrainerBase>, CalibratedModelParametersBase> { @@ -93,13 +93,13 @@ public sealed class LightGbmBinaryClassificationTrainer : LightGbmTrainerBase PredictionKind.BinaryClassification; - internal LightGbmBinaryClassificationTrainer(IHostEnvironment env, Options options) + internal LightGbmBinaryTrainer(IHostEnvironment env, Options options) : base(env, LoadNameValue, options, TrainerUtils.MakeBoolScalarLabel(options.LabelColumnName)) { } /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The private instance of . /// The name of The label column. @@ -109,7 +109,7 @@ internal LightGbmBinaryClassificationTrainer(IHostEnvironment env, Options optio /// The minimal number of data points allowed in a leaf of the tree, out of the subsampled data. /// The learning rate. /// Number of iterations. - internal LightGbmBinaryClassificationTrainer(IHostEnvironment env, + internal LightGbmBinaryTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -165,7 +165,7 @@ private protected override BinaryPredictionTransformer new BinaryPredictionTransformer>(Host, model, trainSchema, FeatureColumn.Name); /// - /// Trains a using both training and validation data, returns + /// Trains a using both training and validation data, returns /// a . /// public BinaryPredictionTransformer> Fit(IDataView trainData, IDataView validationData) @@ -179,9 +179,9 @@ internal static partial class LightGbm { [TlcModule.EntryPoint( Name = "Trainers.LightGbmBinaryClassifier", - Desc = LightGbmBinaryClassificationTrainer.Summary, - UserName = LightGbmBinaryClassificationTrainer.UserName, - ShortName = LightGbmBinaryClassificationTrainer.ShortName)] + Desc = LightGbmBinaryTrainer.Summary, + UserName = LightGbmBinaryTrainer.UserName, + ShortName = LightGbmBinaryTrainer.ShortName)] public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, Options input) { Contracts.CheckValue(env, nameof(env)); @@ -190,7 +190,7 @@ public static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironm EntryPointUtils.CheckInputArgs(host, input); return TrainerEntryPointsUtils.Train(host, input, - () => new LightGbmBinaryClassificationTrainer(host, input), + () => new LightGbmBinaryTrainer(host, input), getLabel: () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName), getWeight: () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.ExampleWeightColumnName)); } diff --git a/src/Microsoft.ML.LightGbm/LightGbmCatalog.cs b/src/Microsoft.ML.LightGbm/LightGbmCatalog.cs index d9d106830b..e740ee3836 100644 --- a/src/Microsoft.ML.LightGbm/LightGbmCatalog.cs +++ b/src/Microsoft.ML.LightGbm/LightGbmCatalog.cs @@ -66,7 +66,7 @@ public static LightGbmRegressionTrainer LightGbm(this RegressionCatalog.Regressi } /// - /// Predict a target using a gradient boosting decision tree binary classification model trained with the . + /// Predict a target using a gradient boosting decision tree binary classification model trained with the . /// /// The . /// The name of the label column. @@ -83,7 +83,7 @@ public static LightGbmRegressionTrainer LightGbm(this RegressionCatalog.Regressi /// ]]> /// /// - public static LightGbmBinaryClassificationTrainer LightGbm(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + public static LightGbmBinaryTrainer LightGbm(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -94,11 +94,11 @@ public static LightGbmBinaryClassificationTrainer LightGbm(this BinaryClassifica { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new LightGbmBinaryClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, minimumExampleCountPerLeaf, learningRate, numberOfIterations); + return new LightGbmBinaryTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, minimumExampleCountPerLeaf, learningRate, numberOfIterations); } /// - /// Predict a target using a gradient boosting decision tree binary classification model trained with the and advanced options. + /// Predict a target using a gradient boosting decision tree binary classification model trained with the and advanced options. /// /// The . /// Trainer options. @@ -109,12 +109,12 @@ public static LightGbmBinaryClassificationTrainer LightGbm(this BinaryClassifica /// ]]> /// /// - public static LightGbmBinaryClassificationTrainer LightGbm(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + public static LightGbmBinaryTrainer LightGbm(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Options options) { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new LightGbmBinaryClassificationTrainer(env, options); + return new LightGbmBinaryTrainer(env, options); } /// @@ -172,7 +172,7 @@ public static LightGbmRankingTrainer LightGbm(this RankingCatalog.RankingTrainer } /// - /// Predict a target using a gradient boosting decision tree multiclass classification model trained with the . + /// Predict a target using a gradient boosting decision tree multiclass classification model trained with the . /// /// The . /// The name of the label column. @@ -189,7 +189,7 @@ public static LightGbmRankingTrainer LightGbm(this RankingCatalog.RankingTrainer /// ]]> /// /// - public static LightGbmMulticlassClassificationTrainer LightGbm(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, + public static LightGbmMulticlassTrainer LightGbm(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -200,11 +200,11 @@ public static LightGbmMulticlassClassificationTrainer LightGbm(this MulticlassCl { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new LightGbmMulticlassClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, minimumExampleCountPerLeaf, learningRate, numberOfIterations); + return new LightGbmMulticlassTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, numberOfLeaves, minimumExampleCountPerLeaf, learningRate, numberOfIterations); } /// - /// Predict a target using a gradient boosting decision tree multiclass classification model trained with the and advanced options. + /// Predict a target using a gradient boosting decision tree multiclass classification model trained with the and advanced options. /// /// The . /// Trainer options. @@ -215,12 +215,12 @@ public static LightGbmMulticlassClassificationTrainer LightGbm(this MulticlassCl /// ]]> /// /// - public static LightGbmMulticlassClassificationTrainer LightGbm(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, + public static LightGbmMulticlassTrainer LightGbm(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, Options options) { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new LightGbmMulticlassClassificationTrainer(env, options); + return new LightGbmMulticlassTrainer(env, options); } } } diff --git a/src/Microsoft.ML.LightGbm/LightGbmMulticlassTrainer.cs b/src/Microsoft.ML.LightGbm/LightGbmMulticlassTrainer.cs index bb8da22838..d4126d65d9 100644 --- a/src/Microsoft.ML.LightGbm/LightGbmMulticlassTrainer.cs +++ b/src/Microsoft.ML.LightGbm/LightGbmMulticlassTrainer.cs @@ -12,9 +12,9 @@ using Microsoft.ML.Trainers.FastTree; using Microsoft.ML.Trainers.LightGbm; -[assembly: LoadableClass(LightGbmMulticlassClassificationTrainer.Summary, typeof(LightGbmMulticlassClassificationTrainer), typeof(Options), +[assembly: LoadableClass(LightGbmMulticlassTrainer.Summary, typeof(LightGbmMulticlassTrainer), typeof(Options), new[] { typeof(SignatureMulticlassClassifierTrainer), typeof(SignatureTrainer) }, - "LightGBM Multi-class Classifier", LightGbmMulticlassClassificationTrainer.LoadNameValue, LightGbmMulticlassClassificationTrainer.ShortName, DocName = "trainer/LightGBM.md")] + "LightGBM Multi-class Classifier", LightGbmMulticlassTrainer.LoadNameValue, LightGbmMulticlassTrainer.ShortName, DocName = "trainer/LightGBM.md")] namespace Microsoft.ML.Trainers.LightGbm { @@ -22,7 +22,7 @@ namespace Microsoft.ML.Trainers.LightGbm /// The for training a boosted decision tree multi-class classification model using LightGBM. /// /// - public sealed class LightGbmMulticlassClassificationTrainer : LightGbmTrainerBase, MulticlassPredictionTransformer, OneVersusAllModelParameters> + public sealed class LightGbmMulticlassTrainer : LightGbmTrainerBase, MulticlassPredictionTransformer, OneVersusAllModelParameters> { internal const string Summary = "LightGBM Multi Class Classifier"; internal const string LoadNameValue = "LightGBMMulticlass"; @@ -34,14 +34,14 @@ public sealed class LightGbmMulticlassClassificationTrainer : LightGbmTrainerBas private int _tlcNumClass; private protected override PredictionKind PredictionKind => PredictionKind.MulticlassClassification; - internal LightGbmMulticlassClassificationTrainer(IHostEnvironment env, Options options) + internal LightGbmMulticlassTrainer(IHostEnvironment env, Options options) : base(env, LoadNameValue, options, TrainerUtils.MakeU4ScalarColumn(options.LabelColumnName)) { _numClass = -1; } /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The private instance of . /// The name of The label column. @@ -51,7 +51,7 @@ internal LightGbmMulticlassClassificationTrainer(IHostEnvironment env, Options o /// The minimal number of data points allowed in a leaf of the tree, out of the subsampled data. /// The learning rate. /// The number of iterations to use. - internal LightGbmMulticlassClassificationTrainer(IHostEnvironment env, + internal LightGbmMulticlassTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -223,7 +223,7 @@ private protected override MulticlassPredictionTransformer new MulticlassPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name); /// - /// Trains a using both training and validation data, returns + /// Trains a using both training and validation data, returns /// a . /// public MulticlassPredictionTransformer Fit(IDataView trainData, IDataView validationData) @@ -238,8 +238,8 @@ internal static partial class LightGbm [TlcModule.EntryPoint( Name = "Trainers.LightGbmClassifier", Desc = "Train a LightGBM multi class model.", - UserName = LightGbmMulticlassClassificationTrainer.Summary, - ShortName = LightGbmMulticlassClassificationTrainer.ShortName)] + UserName = LightGbmMulticlassTrainer.Summary, + ShortName = LightGbmMulticlassTrainer.ShortName)] public static CommonOutputs.MulticlassClassificationOutput TrainMulticlass(IHostEnvironment env, Options input) { Contracts.CheckValue(env, nameof(env)); @@ -248,7 +248,7 @@ public static CommonOutputs.MulticlassClassificationOutput TrainMulticlass(IHost EntryPointUtils.CheckInputArgs(host, input); return TrainerEntryPointsUtils.Train(host, input, - () => new LightGbmMulticlassClassificationTrainer(host, input), + () => new LightGbmMulticlassTrainer(host, input), getLabel: () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName), getWeight: () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.ExampleWeightColumnName)); } diff --git a/src/Microsoft.ML.Mkl.Components/OlsLinearRegression.cs b/src/Microsoft.ML.Mkl.Components/OlsLinearRegression.cs index c47a634126..1dadcec991 100644 --- a/src/Microsoft.ML.Mkl.Components/OlsLinearRegression.cs +++ b/src/Microsoft.ML.Mkl.Components/OlsLinearRegression.cs @@ -23,16 +23,16 @@ OlsTrainer.LoadNameValue, OlsTrainer.ShortName)] -[assembly: LoadableClass(typeof(OrdinaryLeastSquaresRegressionModelParameters), null, typeof(SignatureLoadModel), +[assembly: LoadableClass(typeof(OlsModelParameters), null, typeof(SignatureLoadModel), "OLS Linear Regression Executor", - OrdinaryLeastSquaresRegressionModelParameters.LoaderSignature)] + OlsModelParameters.LoaderSignature)] [assembly: LoadableClass(typeof(void), typeof(OlsTrainer), null, typeof(SignatureEntryPointModule), OlsTrainer.LoadNameValue)] namespace Microsoft.ML.Trainers { /// - public sealed class OlsTrainer : TrainerEstimatorBase, OrdinaryLeastSquaresRegressionModelParameters> + public sealed class OlsTrainer : TrainerEstimatorBase, OlsModelParameters> { /// Advanced options for trainer. public sealed class Options : TrainerInputBaseWithWeight @@ -85,8 +85,8 @@ internal OlsTrainer(IHostEnvironment env, Options options) _perParameterSignificance = options.CalculateStatistics; } - private protected override RegressionPredictionTransformer MakeTransformer(OrdinaryLeastSquaresRegressionModelParameters model, DataViewSchema trainSchema) - => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); + private protected override RegressionPredictionTransformer MakeTransformer(OlsModelParameters model, DataViewSchema trainSchema) + => new RegressionPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) { @@ -105,7 +105,7 @@ private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape /// Either p, or 0 or 1 if it was outside the range 0 to 1 private static Double ProbClamp(Double p) => Math.Max(0, Math.Min(p, 1)); - private protected override OrdinaryLeastSquaresRegressionModelParameters TrainModelCore(TrainContext context) + private protected override OlsModelParameters TrainModelCore(TrainContext context) { using (var ch = Host.Start("Training")) { @@ -136,7 +136,7 @@ private protected override OrdinaryLeastSquaresRegressionModelParameters TrainMo } } - private OrdinaryLeastSquaresRegressionModelParameters TrainCore(IChannel ch, FloatLabelCursor.Factory cursorFactory, int featureCount) + private OlsModelParameters TrainCore(IChannel ch, FloatLabelCursor.Factory cursorFactory, int featureCount) { Host.AssertValue(ch); ch.AssertValue(cursorFactory); @@ -267,7 +267,7 @@ private OrdinaryLeastSquaresRegressionModelParameters TrainCore(IChannel ch, Flo { // We would expect the solution to the problem to be exact in this case. ch.Info("Number of examples equals number of parameters, solution is exact but no statistics can be derived"); - return new OrdinaryLeastSquaresRegressionModelParameters(Host, in weights, bias); + return new OlsModelParameters(Host, in weights, bias); } Double rss = 0; // residual sum of squares @@ -303,7 +303,7 @@ private OrdinaryLeastSquaresRegressionModelParameters TrainCore(IChannel ch, Flo // Also we can't estimate it, unless we can estimate the variance, which requires more examples than // parameters. if (!_perParameterSignificance || m >= n) - return new OrdinaryLeastSquaresRegressionModelParameters(Host, in weights, bias, rSquared: rSquared, rSquaredAdjusted: rSquaredAdjusted); + return new OlsModelParameters(Host, in weights, bias, rSquared: rSquared, rSquaredAdjusted: rSquaredAdjusted); ch.Assert(!Double.IsNaN(rSquaredAdjusted)); var standardErrors = new Double[m]; @@ -350,7 +350,7 @@ private OrdinaryLeastSquaresRegressionModelParameters TrainCore(IChannel ch, Flo ch.Check(0 <= pValues[i] && pValues[i] <= 1, "p-Value calculated outside expected [0,1] range"); } - return new OrdinaryLeastSquaresRegressionModelParameters(Host, in weights, bias, standardErrors, tValues, pValues, rSquared, rSquaredAdjusted); + return new OlsModelParameters(Host, in weights, bias, standardErrors, tValues, pValues, rSquared, rSquaredAdjusted); } internal static class Mkl @@ -509,7 +509,7 @@ internal static CommonOutputs.RegressionOutput TrainRegression(IHostEnvironment /// /// A linear predictor for which per parameter significance statistics are available. /// - public sealed class OrdinaryLeastSquaresRegressionModelParameters : RegressionModelParameters + public sealed class OlsModelParameters : RegressionModelParameters { internal const string LoaderSignature = "OlsLinearRegressionExec"; internal const string RegistrationName = "OlsLinearRegressionPredictor"; @@ -525,7 +525,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010001, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(OrdinaryLeastSquaresRegressionModelParameters).Assembly.FullName); + loaderAssemblyName: typeof(OlsModelParameters).Assembly.FullName); } /// @@ -587,7 +587,7 @@ private static VersionInfo GetVersionInfo() /// Optional: The p-values of the weights and bias. /// The coefficient of determination. /// The adjusted coefficient of determination. - internal OrdinaryLeastSquaresRegressionModelParameters(IHostEnvironment env, in VBuffer weights, float bias, + internal OlsModelParameters(IHostEnvironment env, in VBuffer weights, float bias, Double[] standardErrors = null, Double[] tValues = null, Double[] pValues = null, Double rSquared = 1, Double rSquaredAdjusted = float.NaN) : base(env, RegistrationName, in weights, bias) { @@ -624,7 +624,7 @@ internal OrdinaryLeastSquaresRegressionModelParameters(IHostEnvironment env, in RSquaredAdjusted = rSquaredAdjusted; } - private OrdinaryLeastSquaresRegressionModelParameters(IHostEnvironment env, ModelLoadContext ctx) + private OlsModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { // *** Binary format *** @@ -708,12 +708,12 @@ private static void ProbCheckDecode(Double p) Contracts.CheckDecode(0 <= p && p <= 1); } - private static OrdinaryLeastSquaresRegressionModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) + private static OlsModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new OrdinaryLeastSquaresRegressionModelParameters(env, ctx); + return new OlsModelParameters(env, ctx); } private protected override void SaveSummary(TextWriter writer, RoleMappedSchema schema) diff --git a/src/Microsoft.ML.PCA/PcaTrainer.cs b/src/Microsoft.ML.PCA/PcaTrainer.cs index 231d69a031..3c3907d9fc 100644 --- a/src/Microsoft.ML.PCA/PcaTrainer.cs +++ b/src/Microsoft.ML.PCA/PcaTrainer.cs @@ -23,8 +23,8 @@ RandomizedPcaTrainer.LoadNameValue, RandomizedPcaTrainer.ShortName)] -[assembly: LoadableClass(typeof(PrincipleComponentModelParameters), null, typeof(SignatureLoadModel), - "PCA Anomaly Executor", PrincipleComponentModelParameters.LoaderSignature)] +[assembly: LoadableClass(typeof(PcaModelParameters), null, typeof(SignatureLoadModel), + "PCA Anomaly Executor", PcaModelParameters.LoaderSignature)] [assembly: LoadableClass(typeof(void), typeof(RandomizedPcaTrainer), null, typeof(SignatureEntryPointModule), RandomizedPcaTrainer.LoadNameValue)] @@ -39,7 +39,7 @@ namespace Microsoft.ML.Trainers /// /// This PCA can be made into Kernel PCA by using Random Fourier Features transform /// - public sealed class RandomizedPcaTrainer : TrainerEstimatorBase, PrincipleComponentModelParameters> + public sealed class RandomizedPcaTrainer : TrainerEstimatorBase, PcaModelParameters> { internal const string LoadNameValue = "pcaAnomaly"; internal const string UserNameValue = "PCA Anomaly Detector"; @@ -139,7 +139,7 @@ private RandomizedPcaTrainer(IHostEnvironment env, Options options, string featu } - private protected override PrincipleComponentModelParameters TrainModelCore(TrainContext context) + private protected override PcaModelParameters TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); @@ -164,7 +164,7 @@ private static SchemaShape.Column MakeFeatureColumn(string featureColumn) } //Note: the notations used here are the same as in https://web.stanford.edu/group/mmds/slides2010/Martinsson.pdf (pg. 9) - private PrincipleComponentModelParameters TrainCore(IChannel ch, RoleMappedData data, int dimension) + private PcaModelParameters TrainCore(IChannel ch, RoleMappedData data, int dimension) { Host.AssertValue(ch); ch.AssertValue(data); @@ -222,7 +222,7 @@ private PrincipleComponentModelParameters TrainCore(IChannel ch, RoleMappedData EigenUtils.EigenDecomposition(b2, out smallEigenvalues, out smallEigenvectors); PostProcess(b, smallEigenvalues, smallEigenvectors, dimension, oversampledRank); - return new PrincipleComponentModelParameters(Host, _rank, b, in mean); + return new PcaModelParameters(Host, _rank, b, in mean); } private static float[][] Zeros(int k, int d) @@ -343,8 +343,8 @@ private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape }; } - private protected override AnomalyPredictionTransformer MakeTransformer(PrincipleComponentModelParameters model, DataViewSchema trainSchema) - => new AnomalyPredictionTransformer(Host, model, trainSchema, _featureColumn); + private protected override AnomalyPredictionTransformer MakeTransformer(PcaModelParameters model, DataViewSchema trainSchema) + => new AnomalyPredictionTransformer(Host, model, trainSchema, _featureColumn); [TlcModule.EntryPoint(Name = "Trainers.PcaAnomalyDetector", Desc = "Train an PCA Anomaly model.", @@ -370,7 +370,7 @@ internal static CommonOutputs.AnomalyDetectionOutput TrainPcaAnomaly(IHostEnviro // REVIEW: move the predictor to a different file and fold EigenUtils.cs to this file. // REVIEW: Include the above detail in the XML documentation file. /// - public sealed class PrincipleComponentModelParameters : ModelParametersBase, + public sealed class PcaModelParameters : ModelParametersBase, IValueMapper, ICanGetSummaryAsIDataView, ICanSaveInTextFormat, @@ -387,7 +387,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010001, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(PrincipleComponentModelParameters).Assembly.FullName); + loaderAssemblyName: typeof(PcaModelParameters).Assembly.FullName); } private readonly int _dimension; @@ -408,7 +408,7 @@ private static VersionInfo GetVersionInfo() /// The rank of the PCA approximation of the covariance matrix. This is the number of eigenvectors in the model. /// Array of eigenvectors. /// The mean vector of the training data. - internal PrincipleComponentModelParameters(IHostEnvironment env, int rank, float[][] eigenVectors, in VBuffer mean) + internal PcaModelParameters(IHostEnvironment env, int rank, float[][] eigenVectors, in VBuffer mean) : base(env, RegistrationName) { _dimension = eigenVectors[0].Length; @@ -428,7 +428,7 @@ internal PrincipleComponentModelParameters(IHostEnvironment env, int rank, float _inputType = new VectorType(NumberDataViewType.Single, _dimension); } - private PrincipleComponentModelParameters(IHostEnvironment env, ModelLoadContext ctx) + private PcaModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, RegistrationName, ctx) { // *** Binary format *** @@ -500,12 +500,12 @@ private protected override void SaveCore(ModelSaveContext ctx) writer.WriteSinglesNoCount(_eigenVectors[i].GetValues().Slice(0, _dimension)); } - private static PrincipleComponentModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) + private static PcaModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new PrincipleComponentModelParameters(env, ctx); + return new PcaModelParameters(env, ctx); } void ICanSaveSummary.SaveSummary(TextWriter writer, RoleMappedSchema schema) diff --git a/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs b/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs index 2f3fbdf6e6..2d5b6634c4 100644 --- a/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs +++ b/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/LogisticRegression.cs @@ -15,21 +15,21 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Trainers; -[assembly: LoadableClass(LogisticRegressionBinaryClassificationTrainer.Summary, typeof(LogisticRegressionBinaryClassificationTrainer), typeof(LogisticRegressionBinaryClassificationTrainer.Options), +[assembly: LoadableClass(LogisticRegressionBinaryTrainer.Summary, typeof(LogisticRegressionBinaryTrainer), typeof(LogisticRegressionBinaryTrainer.Options), new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer), typeof(SignatureFeatureScorerTrainer) }, - LogisticRegressionBinaryClassificationTrainer.UserNameValue, - LogisticRegressionBinaryClassificationTrainer.LoadNameValue, - LogisticRegressionBinaryClassificationTrainer.ShortName, + LogisticRegressionBinaryTrainer.UserNameValue, + LogisticRegressionBinaryTrainer.LoadNameValue, + LogisticRegressionBinaryTrainer.ShortName, "logisticregressionwrapper")] -[assembly: LoadableClass(typeof(void), typeof(LogisticRegressionBinaryClassificationTrainer), null, typeof(SignatureEntryPointModule), LogisticRegressionBinaryClassificationTrainer.LoadNameValue)] +[assembly: LoadableClass(typeof(void), typeof(LogisticRegressionBinaryTrainer), null, typeof(SignatureEntryPointModule), LogisticRegressionBinaryTrainer.LoadNameValue)] namespace Microsoft.ML.Trainers { /// /// - public sealed partial class LogisticRegressionBinaryClassificationTrainer : LbfgsTrainerBase>, CalibratedModelParametersBase> { @@ -54,7 +54,7 @@ public sealed class Options : OptionsBase /// /// The instance of that computes the std of the training statistics, at the end of training. /// The calculations are not part of Microsoft.ML package, due to the size of MKL. - /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize . + /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize . /// to the implementation in the Microsoft.ML.Mkl.Components package. /// public ComputeLogisticRegressionStandardDeviation ComputeStandardDeviation; @@ -64,7 +64,7 @@ public sealed class Options : OptionsBase private LinearModelStatistics _stats; /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The environment to use. /// The name of the label column. @@ -73,9 +73,9 @@ public sealed class Options : OptionsBase /// Enforce non-negative weights. /// Weight of L1 regularizer term. /// Weight of L2 regularizer term. - /// Memory size for . Low=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. - internal LogisticRegressionBinaryClassificationTrainer(IHostEnvironment env, + internal LogisticRegressionBinaryTrainer(IHostEnvironment env, string labelColumn = DefaultColumnNames.Label, string featureColumn = DefaultColumnNames.Features, string weights = null, @@ -95,9 +95,9 @@ internal LogisticRegressionBinaryClassificationTrainer(IHostEnvironment env, } /// - /// Initializes a new instance of + /// Initializes a new instance of /// - internal LogisticRegressionBinaryClassificationTrainer(IHostEnvironment env, Options options) + internal LogisticRegressionBinaryTrainer(IHostEnvironment env, Options options) : base(env, options, TrainerUtils.MakeBoolScalarLabel(options.LabelColumnName)) { _posWeight = 0; @@ -127,7 +127,7 @@ private protected override BinaryPredictionTransformer new BinaryPredictionTransformer>(Host, model, trainSchema, FeatureColumn.Name); /// - /// Continues the training of a using an already trained and returns + /// Continues the training of a using an already trained and returns /// a . /// public BinaryPredictionTransformer> Fit(IDataView trainData, LinearModelParameters modelParameters) @@ -420,7 +420,7 @@ internal static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnviro EntryPointUtils.CheckInputArgs(host, input); return TrainerEntryPointsUtils.Train(host, input, - () => new LogisticRegressionBinaryClassificationTrainer(host, input), + () => new LogisticRegressionBinaryTrainer(host, input), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.ExampleWeightColumnName)); } @@ -439,7 +439,7 @@ public abstract class ComputeLogisticRegressionStandardDeviation /// Computes the standard deviation matrix of each of the non-zero training weights, needed to calculate further the standard deviation, /// p-value and z-Score. /// The calculations are not part of Microsoft.ML package, due to the size of MKL. - /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize + /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize /// to the implementation in the Microsoft.ML.Mkl.Components package. /// Due to the existence of regularization, an approximation is used to compute the variances of the trained linear coefficients. /// diff --git a/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/MulticlassLogisticRegression.cs index 12a058eac8..8487ab6b79 100644 --- a/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/MulticlassLogisticRegression.cs +++ b/src/Microsoft.ML.StandardTrainers/Standard/LogisticRegression/MulticlassLogisticRegression.cs @@ -80,7 +80,7 @@ public sealed class Options : OptionsBase /// Enforce non-negative weights. /// Weight of L1 regularizer term. /// Weight of L2 regularizer term. - /// Memory size for . Low=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. internal LogisticRegressionMulticlassClassificationTrainer(IHostEnvironment env, string labelColumn = DefaultColumnNames.Label, @@ -429,7 +429,7 @@ internal MulticlassLogisticRegressionModelParameters(IHostEnvironment env, in VB /// /// Initializes a new instance of the class. - /// This constructor is called by to create the predictor. + /// This constructor is called by to create the predictor. /// /// The host environment. /// The array of weights vectors. It should contain weights. @@ -1004,7 +1004,7 @@ DataViewRow ICanGetSummaryAsIRow.GetStatsIRowOrNull(RoleMappedSchema schema) /// /// A component to train a logistic regression model. /// - public partial class LogisticRegressionBinaryClassificationTrainer + public partial class LogisticRegressionBinaryTrainer { [TlcModule.EntryPoint(Name = "Trainers.LogisticRegressionClassifier", Desc = Summary, diff --git a/src/Microsoft.ML.StandardTrainers/Standard/MulticlassClassification/MulticlassNaiveBayesTrainer.cs b/src/Microsoft.ML.StandardTrainers/Standard/MulticlassClassification/MulticlassNaiveBayesTrainer.cs index 74e85a22d2..85ecf0973d 100644 --- a/src/Microsoft.ML.StandardTrainers/Standard/MulticlassClassification/MulticlassNaiveBayesTrainer.cs +++ b/src/Microsoft.ML.StandardTrainers/Standard/MulticlassClassification/MulticlassNaiveBayesTrainer.cs @@ -13,20 +13,20 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Trainers; -[assembly: LoadableClass(NaiveBayesTrainer.Summary, typeof(NaiveBayesTrainer), typeof(NaiveBayesTrainer.Options), +[assembly: LoadableClass(NaiveBayesMulticlassTrainer.Summary, typeof(NaiveBayesMulticlassTrainer), typeof(NaiveBayesMulticlassTrainer.Options), new[] { typeof(SignatureMulticlassClassifierTrainer), typeof(SignatureTrainer) }, - NaiveBayesTrainer.UserName, - NaiveBayesTrainer.LoadName, - NaiveBayesTrainer.ShortName, DocName = "trainer/NaiveBayes.md")] + NaiveBayesMulticlassTrainer.UserName, + NaiveBayesMulticlassTrainer.LoadName, + NaiveBayesMulticlassTrainer.ShortName, DocName = "trainer/NaiveBayes.md")] -[assembly: LoadableClass(typeof(MulticlassNaiveBayesModelParameters), null, typeof(SignatureLoadModel), - "Multi Class Naive Bayes predictor", MulticlassNaiveBayesModelParameters.LoaderSignature)] +[assembly: LoadableClass(typeof(NaiveBayesMulticlassModelParameters), null, typeof(SignatureLoadModel), + "Multi Class Naive Bayes predictor", NaiveBayesMulticlassModelParameters.LoaderSignature)] -[assembly: LoadableClass(typeof(void), typeof(NaiveBayesTrainer), null, typeof(SignatureEntryPointModule), NaiveBayesTrainer.LoadName)] +[assembly: LoadableClass(typeof(void), typeof(NaiveBayesMulticlassTrainer), null, typeof(SignatureEntryPointModule), NaiveBayesMulticlassTrainer.LoadName)] namespace Microsoft.ML.Trainers { - public sealed class NaiveBayesTrainer : TrainerEstimatorBase, MulticlassNaiveBayesModelParameters> + public sealed class NaiveBayesMulticlassTrainer : TrainerEstimatorBase, NaiveBayesMulticlassModelParameters> { internal const string LoadName = "MultiClassNaiveBayes"; internal const string UserName = "Multiclass Naive Bayes"; @@ -49,12 +49,12 @@ internal sealed class Options : TrainerInputBaseWithLabel public override TrainerInfo Info => _info; /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The environment to use. /// The name of the label column. /// The name of the feature column. - internal NaiveBayesTrainer(IHostEnvironment env, + internal NaiveBayesMulticlassTrainer(IHostEnvironment env, string labelColumn = DefaultColumnNames.Label, string featureColumn = DefaultColumnNames.Features) : base(Contracts.CheckRef(env, nameof(env)).Register(LoadName), TrainerUtils.MakeR4VecFeature(featureColumn), @@ -65,9 +65,9 @@ internal NaiveBayesTrainer(IHostEnvironment env, } /// - /// Initializes a new instance of + /// Initializes a new instance of /// - internal NaiveBayesTrainer(IHostEnvironment env, Options options) + internal NaiveBayesMulticlassTrainer(IHostEnvironment env, Options options) : base(Contracts.CheckRef(env, nameof(env)).Register(LoadName), TrainerUtils.MakeR4VecFeature(options.FeatureColumnName), TrainerUtils.MakeU4ScalarColumn(options.LabelColumnName)) { @@ -89,10 +89,10 @@ private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape }; } - private protected override MulticlassPredictionTransformer MakeTransformer(MulticlassNaiveBayesModelParameters model, DataViewSchema trainSchema) - => new MulticlassPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name); + private protected override MulticlassPredictionTransformer MakeTransformer(NaiveBayesMulticlassModelParameters model, DataViewSchema trainSchema) + => new MulticlassPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name, LabelColumn.Name); - private protected override MulticlassNaiveBayesModelParameters TrainModelCore(TrainContext context) + private protected override NaiveBayesMulticlassModelParameters TrainModelCore(TrainContext context) { Host.CheckValue(context, nameof(context)); var data = context.TrainingSet; @@ -160,7 +160,7 @@ private protected override MulticlassNaiveBayesModelParameters TrainModelCore(Tr Array.Resize(ref labelHistogram, labelCount); Array.Resize(ref featureHistogram, labelCount); - return new MulticlassNaiveBayesModelParameters(Host, labelHistogram, featureHistogram, featureCount); + return new NaiveBayesMulticlassModelParameters(Host, labelHistogram, featureHistogram, featureCount); } [TlcModule.EntryPoint(Name = "Trainers.NaiveBayesClassifier", @@ -175,12 +175,12 @@ internal static CommonOutputs.MulticlassClassificationOutput TrainMulticlassNaiv EntryPointUtils.CheckInputArgs(host, input); return TrainerEntryPointsUtils.Train(host, input, - () => new NaiveBayesTrainer(host, input), + () => new NaiveBayesMulticlassTrainer(host, input), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName)); } } - public sealed class MulticlassNaiveBayesModelParameters : + public sealed class NaiveBayesMulticlassModelParameters : ModelParametersBase>, IValueMapper { @@ -193,7 +193,7 @@ private static VersionInfo GetVersionInfo() verReadableCur: 0x00010001, verWeCanReadBack: 0x00010001, loaderSignature: LoaderSignature, - loaderAssemblyName: typeof(MulticlassNaiveBayesModelParameters).Assembly.FullName); + loaderAssemblyName: typeof(NaiveBayesMulticlassModelParameters).Assembly.FullName); } private readonly int[] _labelHistogram; @@ -229,7 +229,7 @@ private static VersionInfo GetVersionInfo() /// The histogram of labels. /// The feature histogram. /// The number of features. - internal MulticlassNaiveBayesModelParameters(IHostEnvironment env, int[] labelHistogram, int[][] featureHistogram, int featureCount) + internal NaiveBayesMulticlassModelParameters(IHostEnvironment env, int[] labelHistogram, int[][] featureHistogram, int featureCount) : base(env, LoaderSignature) { Host.AssertValue(labelHistogram); @@ -246,7 +246,7 @@ internal MulticlassNaiveBayesModelParameters(IHostEnvironment env, int[] labelHi _outputType = new VectorType(NumberDataViewType.Single, _labelCount); } - private MulticlassNaiveBayesModelParameters(IHostEnvironment env, ModelLoadContext ctx) + private NaiveBayesMulticlassModelParameters(IHostEnvironment env, ModelLoadContext ctx) : base(env, LoaderSignature, ctx) { // *** Binary format *** @@ -280,12 +280,12 @@ private MulticlassNaiveBayesModelParameters(IHostEnvironment env, ModelLoadConte _outputType = new VectorType(NumberDataViewType.Single, _labelCount); } - private static MulticlassNaiveBayesModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) + private static NaiveBayesMulticlassModelParameters Create(IHostEnvironment env, ModelLoadContext ctx) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(ctx, nameof(ctx)); ctx.CheckAtModel(GetVersionInfo()); - return new MulticlassNaiveBayesModelParameters(env, ctx); + return new NaiveBayesMulticlassModelParameters(env, ctx); } private protected override void SaveCore(ModelSaveContext ctx) diff --git a/src/Microsoft.ML.StandardTrainers/Standard/PoissonRegression/PoissonRegression.cs b/src/Microsoft.ML.StandardTrainers/Standard/PoissonRegression/PoissonRegression.cs index cc69c9dec7..1fd56c3e18 100644 --- a/src/Microsoft.ML.StandardTrainers/Standard/PoissonRegression/PoissonRegression.cs +++ b/src/Microsoft.ML.StandardTrainers/Standard/PoissonRegression/PoissonRegression.cs @@ -48,7 +48,7 @@ public sealed class Options : OptionsBase /// Weight of L1 regularizer term. /// Weight of L2 regularizer term. /// Threshold for optimizer convergence. - /// Memory size for . Low=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Enforce non-negative weights. internal PoissonRegressionTrainer(IHostEnvironment env, string labelColumn = DefaultColumnNames.Label, diff --git a/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs b/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs index dc4653365a..f38722b42c 100644 --- a/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs +++ b/src/Microsoft.ML.StandardTrainers/Standard/SdcaBinary.cs @@ -1425,8 +1425,8 @@ public void Add(Double summand) /// /// SDCA is a general training algorithm for (generalized) linear models such as support vector machine, linear regression, logistic regression, /// and so on. SDCA binary classification trainer family includes several sealed members: - /// (1) supports general loss functions and returns . - /// (2) essentially trains a regularized logistic regression model. Because logistic regression + /// (1) supports general loss functions and returns . + /// (2) essentially trains a regularized logistic regression model. Because logistic regression /// naturally provide probability output, this generated model's type is . /// where is and is . /// @@ -1546,17 +1546,17 @@ private protected override BinaryPredictionTransformer MakeTra /// linear function to a . /// /// - public sealed class SdcaCalibratedBinaryClassificationTrainer : + public sealed class SdcaCalibratedBinaryTrainer : SdcaBinaryTrainerBase> { /// - /// Options for the . + /// Options for the . /// public sealed class Options : BinaryOptionsBase { } - internal SdcaCalibratedBinaryClassificationTrainer(IHostEnvironment env, + internal SdcaCalibratedBinaryTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string weightColumnName = null, @@ -1567,7 +1567,7 @@ internal SdcaCalibratedBinaryClassificationTrainer(IHostEnvironment env, { } - internal SdcaCalibratedBinaryClassificationTrainer(IHostEnvironment env, Options options) + internal SdcaCalibratedBinaryTrainer(IHostEnvironment env, Options options) : base(env, options, new LogLoss()) { } @@ -1610,10 +1610,10 @@ private protected override SchemaShape.Column[] ComputeSdcaBinaryClassifierSchem /// The for training a binary logistic regression classification model using the stochastic dual coordinate ascent method. /// /// - public sealed class SdcaNonCalibratedBinaryClassificationTrainer : SdcaBinaryTrainerBase + public sealed class SdcaNonCalibratedBinaryTrainer : SdcaBinaryTrainerBase { /// - /// Options for the . + /// Options for the . /// public sealed class Options : BinaryOptionsBase { @@ -1635,7 +1635,7 @@ public sealed class Options : BinaryOptionsBase public ISupportSdcaClassificationLoss LossFunction { get; set; } } - internal SdcaNonCalibratedBinaryClassificationTrainer(IHostEnvironment env, + internal SdcaNonCalibratedBinaryTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string weightColumnName = null, @@ -1647,7 +1647,7 @@ internal SdcaNonCalibratedBinaryClassificationTrainer(IHostEnvironment env, { } - internal SdcaNonCalibratedBinaryClassificationTrainer(IHostEnvironment env, Options options) + internal SdcaNonCalibratedBinaryTrainer(IHostEnvironment env, Options options) : base(env, options, options.LossFunction ?? options.LossFunctionFactory.CreateComponent(env)) { } @@ -1673,7 +1673,7 @@ private protected override SchemaShape.Column[] ComputeSdcaBinaryClassifierSchem } /// - /// Comparing with , + /// Comparing with , /// directly outputs a built from /// the learned weights and bias without calibration. /// @@ -1940,7 +1940,7 @@ private protected override BinaryPredictionTransformer MakeTransformer(T => new BinaryPredictionTransformer(Host, model, trainSchema, FeatureColumn.Name); /// - /// Continues the training of a using an already trained and returns a . + /// Continues the training of a using an already trained and returns a . /// public BinaryPredictionTransformer Fit(IDataView trainData, LinearModelParameters modelParameters) => TrainTransformer(trainData, initPredictor: modelParameters); diff --git a/src/Microsoft.ML.StandardTrainers/Standard/SdcaMulticlass.cs b/src/Microsoft.ML.StandardTrainers/Standard/SdcaMulticlass.cs index c12d5a52e5..452f35d0fc 100644 --- a/src/Microsoft.ML.StandardTrainers/Standard/SdcaMulticlass.cs +++ b/src/Microsoft.ML.StandardTrainers/Standard/SdcaMulticlass.cs @@ -16,11 +16,11 @@ using Microsoft.ML.Runtime; using Microsoft.ML.Trainers; -[assembly: LoadableClass(SdcaMulticlassClassificationTrainer.Summary, typeof(SdcaMulticlassClassificationTrainer), typeof(SdcaMulticlassClassificationTrainer.Options), +[assembly: LoadableClass(SdcaMulticlassTrainer.Summary, typeof(SdcaMulticlassTrainer), typeof(SdcaMulticlassTrainer.Options), new[] { typeof(SignatureMulticlassClassifierTrainer), typeof(SignatureTrainer), typeof(SignatureFeatureScorerTrainer) }, - SdcaMulticlassClassificationTrainer.UserNameValue, - SdcaMulticlassClassificationTrainer.LoadNameValue, - SdcaMulticlassClassificationTrainer.ShortName)] + SdcaMulticlassTrainer.UserNameValue, + SdcaMulticlassTrainer.LoadNameValue, + SdcaMulticlassTrainer.ShortName)] namespace Microsoft.ML.Trainers { @@ -28,7 +28,7 @@ namespace Microsoft.ML.Trainers /// The for training a multiclass logistic regression classification model using the stochastic dual coordinate ascent method. /// /// - public sealed class SdcaMulticlassClassificationTrainer : SdcaTrainerBase, MulticlassLogisticRegressionModelParameters> + public sealed class SdcaMulticlassTrainer : SdcaTrainerBase, MulticlassLogisticRegressionModelParameters> { internal const string LoadNameValue = "SDCAMC"; internal const string UserNameValue = "Fast Linear Multi-class Classification (SA-SDCA)"; @@ -36,7 +36,7 @@ public sealed class SdcaMulticlassClassificationTrainer : SdcaTrainerBase - /// Options for the . + /// Options for the . /// public sealed class Options : OptionsBase { @@ -63,7 +63,7 @@ public sealed class Options : OptionsBase private protected override PredictionKind PredictionKind => PredictionKind.MulticlassClassification; /// - /// Initializes a new instance of + /// Initializes a new instance of /// /// The environment to use. /// The label, or dependent variable. @@ -73,7 +73,7 @@ public sealed class Options : OptionsBase /// The L2 regularization hyperparameter. /// The L1 regularization hyperparameter. Higher values will tend to lead to more sparse model. /// The maximum number of passes to perform over the data. - internal SdcaMulticlassClassificationTrainer(IHostEnvironment env, + internal SdcaMulticlassTrainer(IHostEnvironment env, string labelColumn = DefaultColumnNames.Label, string featureColumn = DefaultColumnNames.Features, string weights = null, @@ -90,7 +90,7 @@ internal SdcaMulticlassClassificationTrainer(IHostEnvironment env, Loss = _loss; } - internal SdcaMulticlassClassificationTrainer(IHostEnvironment env, Options options, + internal SdcaMulticlassTrainer(IHostEnvironment env, Options options, string featureColumn, string labelColumn, string weightColumn = null) : base(env, options, TrainerUtils.MakeU4ScalarColumn(labelColumn), TrainerUtils.MakeR4ScalarWeightColumn(weightColumn)) { @@ -101,7 +101,7 @@ internal SdcaMulticlassClassificationTrainer(IHostEnvironment env, Options optio Loss = _loss; } - internal SdcaMulticlassClassificationTrainer(IHostEnvironment env, Options options) + internal SdcaMulticlassTrainer(IHostEnvironment env, Options options) : this(env, options, options.FeatureColumnName, options.LabelColumnName) { } @@ -448,18 +448,18 @@ private protected override MulticlassPredictionTransformer(host, input, - () => new SdcaMulticlassClassificationTrainer(host, input), + return TrainerEntryPointsUtils.Train(host, input, + () => new SdcaMulticlassTrainer(host, input), () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName)); } } diff --git a/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs b/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs index c2041dd248..4a87d8e23c 100644 --- a/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs +++ b/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML { - using LROptions = LogisticRegressionBinaryClassificationTrainer.Options; + using LROptions = LogisticRegressionBinaryTrainer.Options; /// /// TrainerEstimator extension methods. @@ -181,7 +181,7 @@ public static SdcaRegressionTrainer Sdca(this RegressionCatalog.RegressionTraine } /// - /// Predict a target using a linear classification model trained with . + /// Predict a target using a linear classification model trained with . /// /// The binary classification catalog trainer object. /// The name of the label column. @@ -196,7 +196,7 @@ public static SdcaRegressionTrainer Sdca(this RegressionCatalog.RegressionTraine /// [!code-csharp[SDCA](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscent.cs)] /// ]]> /// - public static SdcaCalibratedBinaryClassificationTrainer SdcaCalibrated( + public static SdcaCalibratedBinaryTrainer SdcaCalibrated( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, @@ -207,11 +207,11 @@ public static SdcaCalibratedBinaryClassificationTrainer SdcaCalibrated( { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new SdcaCalibratedBinaryClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, l2Regularization, l1Threshold, maximumNumberOfIterations); + return new SdcaCalibratedBinaryTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, l2Regularization, l1Threshold, maximumNumberOfIterations); } /// - /// Predict a target using a linear classification model trained with and advanced options. + /// Predict a target using a linear classification model trained with and advanced options. /// /// The binary classification catalog trainer object. /// Trainer options. @@ -221,19 +221,19 @@ public static SdcaCalibratedBinaryClassificationTrainer SdcaCalibrated( /// [!code-csharp[SDCA](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscentWithOptions.cs)] /// ]]> /// - public static SdcaCalibratedBinaryClassificationTrainer SdcaCalibrated( + public static SdcaCalibratedBinaryTrainer SdcaCalibrated( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, - SdcaCalibratedBinaryClassificationTrainer.Options options) + SdcaCalibratedBinaryTrainer.Options options) { Contracts.CheckValue(catalog, nameof(catalog)); Contracts.CheckValue(options, nameof(options)); var env = CatalogUtils.GetEnvironment(catalog); - return new SdcaCalibratedBinaryClassificationTrainer(env, options); + return new SdcaCalibratedBinaryTrainer(env, options); } /// - /// Predict a target using a linear classification model trained with . + /// Predict a target using a linear classification model trained with . /// /// The binary classification catalog trainer object. /// The name of the label column. @@ -249,7 +249,7 @@ public static SdcaCalibratedBinaryClassificationTrainer SdcaCalibrated( /// [!code-csharp[SDCA](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/StochasticDualCoordinateAscentNonCalibrated.cs)] /// ]]> /// - public static SdcaNonCalibratedBinaryClassificationTrainer SdcaNonCalibrated( + public static SdcaNonCalibratedBinaryTrainer SdcaNonCalibrated( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, @@ -261,27 +261,27 @@ public static SdcaNonCalibratedBinaryClassificationTrainer SdcaNonCalibrated( { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new SdcaNonCalibratedBinaryClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, loss, l2Regularization, l1Threshold, maximumNumberOfIterations); + return new SdcaNonCalibratedBinaryTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, loss, l2Regularization, l1Threshold, maximumNumberOfIterations); } /// - /// Predict a target using a linear classification model trained with and advanced options. + /// Predict a target using a linear classification model trained with and advanced options. /// /// The binary classification catalog trainer object. /// Trainer options. - public static SdcaNonCalibratedBinaryClassificationTrainer SdcaNonCalibrated( + public static SdcaNonCalibratedBinaryTrainer SdcaNonCalibrated( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, - SdcaNonCalibratedBinaryClassificationTrainer.Options options) + SdcaNonCalibratedBinaryTrainer.Options options) { Contracts.CheckValue(catalog, nameof(catalog)); Contracts.CheckValue(options, nameof(options)); var env = CatalogUtils.GetEnvironment(catalog); - return new SdcaNonCalibratedBinaryClassificationTrainer(env, options); + return new SdcaNonCalibratedBinaryTrainer(env, options); } /// - /// Predict a target using a linear multiclass classification model trained with . + /// Predict a target using a linear multiclass classification model trained with . /// /// The multiclass classification catalog trainer object. /// The name of the label column. @@ -297,7 +297,7 @@ public static SdcaNonCalibratedBinaryClassificationTrainer SdcaNonCalibrated( /// [!code-csharp[SDCA](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/StochasticDualCoordinateAscent.cs)] /// ]]> /// - public static SdcaMulticlassClassificationTrainer Sdca(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, + public static SdcaMulticlassTrainer Sdca(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -308,11 +308,11 @@ public static SdcaMulticlassClassificationTrainer Sdca(this MulticlassClassifica { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new SdcaMulticlassClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, loss, l2Regularization, l1Threshold, maximumNumberOfIterations); + return new SdcaMulticlassTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, loss, l2Regularization, l1Threshold, maximumNumberOfIterations); } /// - /// Predict a target using a linear multiclass classification model trained with and advanced options. + /// Predict a target using a linear multiclass classification model trained with and advanced options. /// /// The multiclass classification catalog trainer object. /// Trainer options. @@ -322,14 +322,14 @@ public static SdcaMulticlassClassificationTrainer Sdca(this MulticlassClassifica /// [!code-csharp[SDCA](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/MulticlassClassification/StochasticDualCoordinateAscentWithOptions.cs)] /// ]]> /// - public static SdcaMulticlassClassificationTrainer Sdca(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, - SdcaMulticlassClassificationTrainer.Options options) + public static SdcaMulticlassTrainer Sdca(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, + SdcaMulticlassTrainer.Options options) { Contracts.CheckValue(catalog, nameof(catalog)); Contracts.CheckValue(options, nameof(options)); var env = CatalogUtils.GetEnvironment(catalog); - return new SdcaMulticlassClassificationTrainer(env, options); + return new SdcaMulticlassTrainer(env, options); } /// @@ -448,7 +448,7 @@ public static OnlineGradientDescentTrainer OnlineGradientDescent(this Regression } /// - /// Predict a target using a linear binary classification model trained with the trainer. + /// Predict a target using a linear binary classification model trained with the trainer. /// /// The binary classification catalog trainer object. /// The name of the label column. @@ -457,7 +457,7 @@ public static OnlineGradientDescentTrainer OnlineGradientDescent(this Regression /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Low=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// /// @@ -466,7 +466,7 @@ public static OnlineGradientDescentTrainer OnlineGradientDescent(this Regression /// ]]> /// /// - public static LogisticRegressionBinaryClassificationTrainer LogisticRegression(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + public static LogisticRegressionBinaryTrainer LogisticRegression(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string exampleWeightColumnName = null, @@ -478,21 +478,21 @@ public static LogisticRegressionBinaryClassificationTrainer LogisticRegression(t { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); - return new LogisticRegressionBinaryClassificationTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Regularization, l2Regularization, optimizationTolerance, historySize, enforceNonNegativity); + return new LogisticRegressionBinaryTrainer(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Regularization, l2Regularization, optimizationTolerance, historySize, enforceNonNegativity); } /// - /// Predict a target using a linear binary classification model trained with the trainer. + /// Predict a target using a linear binary classification model trained with the trainer. /// /// The binary classification catalog trainer object. /// Advanced arguments to the algorithm. - public static LogisticRegressionBinaryClassificationTrainer LogisticRegression(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, LROptions options) + public static LogisticRegressionBinaryTrainer LogisticRegression(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, LROptions options) { Contracts.CheckValue(catalog, nameof(catalog)); Contracts.CheckValue(options, nameof(options)); var env = CatalogUtils.GetEnvironment(catalog); - return new LogisticRegressionBinaryClassificationTrainer(env, options); + return new LogisticRegressionBinaryTrainer(env, options); } /// @@ -579,18 +579,18 @@ public static LogisticRegressionMulticlassClassificationTrainer LogisticRegressi } /// - /// Predicts a target using a linear multiclass classification model trained with the . - /// The trains a multiclass Naive Bayes predictor that supports binary feature values. + /// Predicts a target using a linear multiclass classification model trained with the . + /// The trains a multiclass Naive Bayes predictor that supports binary feature values. /// /// The . /// The name of the label column. /// The name of the feature column. - public static NaiveBayesTrainer NaiveBayes(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, + public static NaiveBayesMulticlassTrainer NaiveBayes(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features) { Contracts.CheckValue(catalog, nameof(catalog)); - return new NaiveBayesTrainer(CatalogUtils.GetEnvironment(catalog), labelColumnName, featureColumnName); + return new NaiveBayesMulticlassTrainer(CatalogUtils.GetEnvironment(catalog), labelColumnName, featureColumnName); } /// diff --git a/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs b/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs index ad2f3aeb16..13c2554079 100644 --- a/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs +++ b/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs @@ -9,7 +9,7 @@ namespace Microsoft.ML.StaticPipe { - using Options = LogisticRegressionBinaryClassificationTrainer.Options; + using Options = LogisticRegressionBinaryTrainer.Options; /// /// Binary Classification trainer estimators. @@ -17,7 +17,7 @@ namespace Microsoft.ML.StaticPipe public static class LbfgsBinaryClassificationStaticExtensions { /// - /// Predict a target using a linear binary classification model trained with the trainer. + /// Predict a target using a linear binary classification model trained with the trainer. /// /// The binary classification catalog trainer object. /// The label, or dependent variable. @@ -26,7 +26,7 @@ public static class LbfgsBinaryClassificationStaticExtensions /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Low=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate that is called every time the /// method is called on the @@ -50,7 +50,7 @@ public static (Scalar score, Scalar probability, Scalar pred var rec = new TrainerEstimatorReconciler.BinaryClassifier( (env, labelName, featuresName, weightsName) => { - var trainer = new LogisticRegressionBinaryClassificationTrainer(env, labelName, featuresName, weightsName, + var trainer = new LogisticRegressionBinaryTrainer(env, labelName, featuresName, weightsName, l1Regularization, l2Regularization, optimizationTolerance, historySize, enforceNonNegativity); if (onFit != null) @@ -63,7 +63,7 @@ public static (Scalar score, Scalar probability, Scalar pred } /// - /// Predict a target using a linear binary classification model trained with the trainer. + /// Predict a target using a linear binary classification model trained with the trainer. /// /// The binary classification catalog trainer object. /// The label, or dependent variable. @@ -95,7 +95,7 @@ public static (Scalar score, Scalar probability, Scalar pred options.FeatureColumnName = featuresName; options.ExampleWeightColumnName = weightsName; - var trainer = new LogisticRegressionBinaryClassificationTrainer(env, options); + var trainer = new LogisticRegressionBinaryTrainer(env, options); if (onFit != null) return trainer.WithOnFitDelegate(trans => onFit(trans.Model)); @@ -113,7 +113,7 @@ public static (Scalar score, Scalar probability, Scalar pred public static class LbfgsRegressionExtensions { /// - /// Predict a target using a linear regression model trained with the trainer. + /// Predict a target using a linear regression model trained with the trainer. /// /// The regression catalog trainer object. /// The label, or dependent variable. @@ -122,7 +122,7 @@ public static class LbfgsRegressionExtensions /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Low=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate that is called every time the /// method is called on the @@ -159,7 +159,7 @@ public static Scalar PoissonRegression(this RegressionCatalog.RegressionT } /// - /// Predict a target using a linear regression model trained with the trainer. + /// Predict a target using a linear regression model trained with the trainer. /// /// The regression catalog trainer object. /// The label, or dependent variable. @@ -218,7 +218,7 @@ public static class LbfgsMulticlassExtensions /// Enforce non-negative weights. /// Weight of L1 regularization term. /// Weight of L2 regularization term. - /// Memory size for . Low=faster, less accurate. + /// Memory size for . Low=faster, less accurate. /// Threshold for optimizer convergence. /// A delegate that is called every time the /// method is called on the diff --git a/src/Microsoft.ML.StaticPipe/MulticlassNaiveBayesStatic.cs b/src/Microsoft.ML.StaticPipe/MulticlassNaiveBayesStatic.cs index ff921f36bb..1c65d58fd6 100644 --- a/src/Microsoft.ML.StaticPipe/MulticlassNaiveBayesStatic.cs +++ b/src/Microsoft.ML.StaticPipe/MulticlassNaiveBayesStatic.cs @@ -29,7 +29,7 @@ public static (Vector score, Key predictedLabel) MulticlassNaiveBayesTrainer(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog, Key label, Vector features, - Action onFit = null) + Action onFit = null) { Contracts.CheckValue(features, nameof(features)); Contracts.CheckValue(label, nameof(label)); @@ -38,7 +38,7 @@ public static (Vector score, Key predictedLabel) var rec = new TrainerEstimatorReconciler.MulticlassClassificationReconciler( (env, labelName, featuresName, weightsName) => { - var trainer = new NaiveBayesTrainer(env, labelName, featuresName); + var trainer = new NaiveBayesMulticlassTrainer(env, labelName, featuresName); if (onFit != null) return trainer.WithOnFitDelegate(trans => onFit(trans.Model)); diff --git a/src/Microsoft.ML.StaticPipe/SdcaStaticExtensions.cs b/src/Microsoft.ML.StaticPipe/SdcaStaticExtensions.cs index fdab1d19e0..08468f93f3 100644 --- a/src/Microsoft.ML.StaticPipe/SdcaStaticExtensions.cs +++ b/src/Microsoft.ML.StaticPipe/SdcaStaticExtensions.cs @@ -154,7 +154,7 @@ public static (Scalar score, Scalar probability, Scalar pred var rec = new TrainerEstimatorReconciler.BinaryClassifier( (env, labelName, featuresName, weightsName) => { - var trainer = new SdcaCalibratedBinaryClassificationTrainer(env, labelName, featuresName, weightsName, l2Regularization, l1Threshold, numberOfIterations); + var trainer = new SdcaCalibratedBinaryTrainer(env, labelName, featuresName, weightsName, l2Regularization, l1Threshold, numberOfIterations); if (onFit != null) { return trainer.WithOnFitDelegate(trans => @@ -192,7 +192,7 @@ public static (Scalar score, Scalar probability, Scalar pred public static (Scalar score, Scalar probability, Scalar predictedLabel) Sdca( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Scalar label, Vector features, Scalar weights, - SdcaCalibratedBinaryClassificationTrainer.Options options, + SdcaCalibratedBinaryTrainer.Options options, Action> onFit = null) { Contracts.CheckValue(label, nameof(label)); @@ -207,7 +207,7 @@ public static (Scalar score, Scalar probability, Scalar pred options.LabelColumnName = labelName; options.FeatureColumnName = featuresName; - var trainer = new SdcaCalibratedBinaryClassificationTrainer(env, options); + var trainer = new SdcaCalibratedBinaryTrainer(env, options); if (onFit != null) { return trainer.WithOnFitDelegate(trans => @@ -263,7 +263,7 @@ public static (Scalar score, Scalar predictedLabel) SdcaNonCalibrat var rec = new TrainerEstimatorReconciler.BinaryClassifierNoCalibration( (env, labelName, featuresName, weightsName) => { - var trainer = new SdcaNonCalibratedBinaryClassificationTrainer(env, labelName, featuresName, weightsName, loss, l2Regularization, l1Threshold, numberOfIterations); + var trainer = new SdcaNonCalibratedBinaryTrainer(env, labelName, featuresName, weightsName, loss, l2Regularization, l1Threshold, numberOfIterations); if (onFit != null) { return trainer.WithOnFitDelegate(trans => @@ -299,7 +299,7 @@ public static (Scalar score, Scalar predictedLabel) SdcaNonCalibrat this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Scalar label, Vector features, Scalar weights, ISupportSdcaClassificationLoss loss, - SdcaNonCalibratedBinaryClassificationTrainer.Options options, + SdcaNonCalibratedBinaryTrainer.Options options, Action onFit = null) { Contracts.CheckValue(label, nameof(label)); @@ -314,7 +314,7 @@ public static (Scalar score, Scalar predictedLabel) SdcaNonCalibrat options.FeatureColumnName = featuresName; options.LabelColumnName = labelName; - var trainer = new SdcaNonCalibratedBinaryClassificationTrainer(env, options); + var trainer = new SdcaNonCalibratedBinaryTrainer(env, options); if (onFit != null) { return trainer.WithOnFitDelegate(trans => @@ -368,7 +368,7 @@ public static (Vector score, Key predictedLabel) Sdca( var rec = new TrainerEstimatorReconciler.MulticlassClassificationReconciler( (env, labelName, featuresName, weightsName) => { - var trainer = new SdcaMulticlassClassificationTrainer(env, labelName, featuresName, weightsName, loss, l2Regularization, l1Threshold, numberOfIterations); + var trainer = new SdcaMulticlassTrainer(env, labelName, featuresName, weightsName, loss, l2Regularization, l1Threshold, numberOfIterations); if (onFit != null) return trainer.WithOnFitDelegate(trans => onFit(trans.Model)); return trainer; @@ -396,7 +396,7 @@ public static (Vector score, Key predictedLabel) Sdca( Key label, Vector features, Scalar weights, - SdcaMulticlassClassificationTrainer.Options options, + SdcaMulticlassTrainer.Options options, Action onFit = null) { Contracts.CheckValue(label, nameof(label)); @@ -411,7 +411,7 @@ public static (Vector score, Key predictedLabel) Sdca( options.LabelColumnName = labelName; options.FeatureColumnName = featuresName; - var trainer = new SdcaMulticlassClassificationTrainer(env, options); + var trainer = new SdcaMulticlassTrainer(env, options); if (onFit != null) return trainer.WithOnFitDelegate(trans => onFit(trans.Model)); return trainer; diff --git a/src/Microsoft.ML.StaticPipe/TreeTrainersStatic.cs b/src/Microsoft.ML.StaticPipe/TreeTrainersStatic.cs index 2d55af198b..bb9ef3514f 100644 --- a/src/Microsoft.ML.StaticPipe/TreeTrainersStatic.cs +++ b/src/Microsoft.ML.StaticPipe/TreeTrainersStatic.cs @@ -108,7 +108,7 @@ public static Scalar FastTree(this RegressionCatalog.RegressionTrainers c /// /// FastTree extension method. - /// Predict a target using a decision tree binary classification model trained with the . + /// Predict a target using a decision tree binary classification model trained with the . /// /// The . /// The label column. @@ -144,7 +144,7 @@ public static (Scalar score, Scalar probability, Scalar pred var rec = new TrainerEstimatorReconciler.BinaryClassifier( (env, labelName, featuresName, weightsName) => { - var trainer = new FastTreeBinaryClassificationTrainer(env, labelName, featuresName, weightsName, numberOfLeaves, + var trainer = new FastTreeBinaryTrainer(env, labelName, featuresName, weightsName, numberOfLeaves, numberOfTrees, minimumExampleCountPerLeaf, learningRate); if (onFit != null) @@ -158,7 +158,7 @@ public static (Scalar score, Scalar probability, Scalar pred /// /// FastTree extension method. - /// Predict a target using a decision tree binary classification model trained with the . + /// Predict a target using a decision tree binary classification model trained with the . /// /// The . /// The label column. @@ -180,7 +180,7 @@ public static (Scalar score, Scalar probability, Scalar pred /// public static (Scalar score, Scalar probability, Scalar predictedLabel) FastTree(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Scalar label, Vector features, Scalar weights, - FastTreeBinaryClassificationTrainer.Options options, + FastTreeBinaryTrainer.Options options, Action> onFit = null) { Contracts.CheckValueOrNull(options); @@ -193,7 +193,7 @@ public static (Scalar score, Scalar probability, Scalar pred options.FeatureColumnName = featuresName; options.ExampleWeightColumnName = weightsName; - var trainer = new FastTreeBinaryClassificationTrainer(env, options); + var trainer = new FastTreeBinaryTrainer(env, options); if (onFit != null) return trainer.WithOnFitDelegate(trans => onFit(trans.Model)); diff --git a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv index 28834b1332..af4afd7cfc 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv +++ b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv @@ -43,14 +43,14 @@ Trainers.AveragedPerceptronBinaryClassifier Averaged Perceptron Binary Classifie Trainers.EnsembleBinaryClassifier Train binary ensemble. Microsoft.ML.Trainers.Ensemble.Ensemble CreateBinaryEnsemble Microsoft.ML.Trainers.Ensemble.EnsembleTrainer+Arguments Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput Trainers.EnsembleClassification Train multiclass ensemble. Microsoft.ML.Trainers.Ensemble.Ensemble CreateMulticlassEnsemble Microsoft.ML.Trainers.Ensemble.MulticlassDataPartitionEnsembleTrainer+Arguments Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput Trainers.EnsembleRegression Train regression ensemble. Microsoft.ML.Trainers.Ensemble.Ensemble CreateRegressionEnsemble Microsoft.ML.Trainers.Ensemble.RegressionEnsembleTrainer+Arguments Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput -Trainers.FastForestBinaryClassifier Uses a random forest learner to perform binary classification. Microsoft.ML.Trainers.FastTree.FastForest TrainBinary Microsoft.ML.Trainers.FastTree.FastForestBinaryClassificationTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput +Trainers.FastForestBinaryClassifier Uses a random forest learner to perform binary classification. Microsoft.ML.Trainers.FastTree.FastForest TrainBinary Microsoft.ML.Trainers.FastTree.FastForestBinaryTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput Trainers.FastForestRegressor Trains a random forest to fit target values using least-squares. Microsoft.ML.Trainers.FastTree.FastForest TrainRegression Microsoft.ML.Trainers.FastTree.FastForestRegressionTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput -Trainers.FastTreeBinaryClassifier Uses a logit-boost boosted tree learner to perform binary classification. Microsoft.ML.Trainers.FastTree.FastTree TrainBinary Microsoft.ML.Trainers.FastTree.FastTreeBinaryClassificationTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput +Trainers.FastTreeBinaryClassifier Uses a logit-boost boosted tree learner to perform binary classification. Microsoft.ML.Trainers.FastTree.FastTree TrainBinary Microsoft.ML.Trainers.FastTree.FastTreeBinaryTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput Trainers.FastTreeRanker Trains gradient boosted decision trees to the LambdaRank quasi-gradient. Microsoft.ML.Trainers.FastTree.FastTree TrainRanking Microsoft.ML.Trainers.FastTree.FastTreeRankingTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RankingOutput Trainers.FastTreeRegressor Trains gradient boosted decision trees to fit target values using least-squares. Microsoft.ML.Trainers.FastTree.FastTree TrainRegression Microsoft.ML.Trainers.FastTree.FastTreeRegressionTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.FastTreeTweedieRegressor Trains gradient boosted decision trees to fit target values using a Tweedie loss function. This learner is a generalization of Poisson, compound Poisson, and gamma regression. Microsoft.ML.Trainers.FastTree.FastTree TrainTweedieRegression Microsoft.ML.Trainers.FastTree.FastTreeTweedieTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.FieldAwareFactorizationMachineBinaryClassifier Train a field-aware factorization machine for binary classification Microsoft.ML.Trainers.FieldAwareFactorizationMachineTrainer TrainBinary Microsoft.ML.Trainers.FieldAwareFactorizationMachineTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput -Trainers.GeneralizedAdditiveModelBinaryClassifier Trains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features. Microsoft.ML.Trainers.FastTree.Gam TrainBinary Microsoft.ML.Trainers.FastTree.GamBinaryClassificationTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput +Trainers.GeneralizedAdditiveModelBinaryClassifier Trains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features. Microsoft.ML.Trainers.FastTree.Gam TrainBinary Microsoft.ML.Trainers.FastTree.GamBinaryTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput Trainers.GeneralizedAdditiveModelRegressor Trains a gradient boosted stump per feature, on all features simultaneously, to fit target values using least-squares. It mantains no interactions between features. Microsoft.ML.Trainers.FastTree.Gam TrainRegression Microsoft.ML.Trainers.FastTree.GamRegressionTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.KMeansPlusPlusClusterer K-means is a popular clustering algorithm. With K-means, the data is clustered into a specified number of clusters in order to minimize the within-cluster sum of squares. K-means++ improves upon K-means by using a better method for choosing the initial cluster centers. Microsoft.ML.Trainers.KMeansTrainer TrainKMeans Microsoft.ML.Trainers.KMeansTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+ClusteringOutput Trainers.LightGbmBinaryClassifier Train a LightGBM binary classification model. Microsoft.ML.Trainers.LightGbm.LightGbm TrainBinary Microsoft.ML.Trainers.LightGbm.Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput @@ -58,15 +58,15 @@ Trainers.LightGbmClassifier Train a LightGBM multi class model. Microsoft.ML.Tra Trainers.LightGbmRanker Train a LightGBM ranking model. Microsoft.ML.Trainers.LightGbm.LightGbm TrainRanking Microsoft.ML.Trainers.LightGbm.Options Microsoft.ML.EntryPoints.CommonOutputs+RankingOutput Trainers.LightGbmRegressor LightGBM Regression Microsoft.ML.Trainers.LightGbm.LightGbm TrainRegression Microsoft.ML.Trainers.LightGbm.Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.LinearSvmBinaryClassifier Train a linear SVM. Microsoft.ML.Trainers.LinearSvmTrainer TrainLinearSvm Microsoft.ML.Trainers.LinearSvmTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput -Trainers.LogisticRegressionBinaryClassifier Logistic Regression is a method in statistics used to predict the probability of occurrence of an event and can be used as a classification algorithm. The algorithm predicts the probability of occurrence of an event by fitting data to a logistical function. Microsoft.ML.Trainers.LogisticRegressionBinaryClassificationTrainer TrainBinary Microsoft.ML.Trainers.LogisticRegressionBinaryClassificationTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput -Trainers.LogisticRegressionClassifier Logistic Regression is a method in statistics used to predict the probability of occurrence of an event and can be used as a classification algorithm. The algorithm predicts the probability of occurrence of an event by fitting data to a logistical function. Microsoft.ML.Trainers.LogisticRegressionBinaryClassificationTrainer TrainMulticlass Microsoft.ML.Trainers.LogisticRegressionMulticlassClassificationTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput -Trainers.NaiveBayesClassifier Train a MulticlassNaiveBayesTrainer. Microsoft.ML.Trainers.NaiveBayesTrainer TrainMulticlassNaiveBayesTrainer Microsoft.ML.Trainers.NaiveBayesTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput +Trainers.LogisticRegressionBinaryClassifier Logistic Regression is a method in statistics used to predict the probability of occurrence of an event and can be used as a classification algorithm. The algorithm predicts the probability of occurrence of an event by fitting data to a logistical function. Microsoft.ML.Trainers.LogisticRegressionBinaryTrainer TrainBinary Microsoft.ML.Trainers.LogisticRegressionBinaryTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput +Trainers.LogisticRegressionClassifier Logistic Regression is a method in statistics used to predict the probability of occurrence of an event and can be used as a classification algorithm. The algorithm predicts the probability of occurrence of an event by fitting data to a logistical function. Microsoft.ML.Trainers.LogisticRegressionBinaryTrainer TrainMulticlass Microsoft.ML.Trainers.LogisticRegressionMulticlassClassificationTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput +Trainers.NaiveBayesClassifier Train a MulticlassNaiveBayesTrainer. Microsoft.ML.Trainers.NaiveBayesMulticlassTrainer TrainMulticlassNaiveBayesTrainer Microsoft.ML.Trainers.NaiveBayesMulticlassTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput Trainers.OnlineGradientDescentRegressor Train a Online gradient descent perceptron. Microsoft.ML.Trainers.OnlineGradientDescentTrainer TrainRegression Microsoft.ML.Trainers.OnlineGradientDescentTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.OrdinaryLeastSquaresRegressor Train an OLS regression model. Microsoft.ML.Trainers.OlsTrainer TrainRegression Microsoft.ML.Trainers.OlsTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.PcaAnomalyDetector Train an PCA Anomaly model. Microsoft.ML.Trainers.RandomizedPcaTrainer TrainPcaAnomaly Microsoft.ML.Trainers.RandomizedPcaTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+AnomalyDetectionOutput Trainers.PoissonRegressor Train an Poisson regression model. Microsoft.ML.Trainers.PoissonRegressionTrainer TrainRegression Microsoft.ML.Trainers.PoissonRegressionTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.StochasticDualCoordinateAscentBinaryClassifier Train an SDCA binary model. Microsoft.ML.Trainers.Sdca TrainBinary Microsoft.ML.Trainers.LegacySdcaBinaryTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput -Trainers.StochasticDualCoordinateAscentClassifier The SDCA linear multi-class classification trainer. Microsoft.ML.Trainers.Sdca TrainMulticlass Microsoft.ML.Trainers.SdcaMulticlassClassificationTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput +Trainers.StochasticDualCoordinateAscentClassifier The SDCA linear multi-class classification trainer. Microsoft.ML.Trainers.Sdca TrainMulticlass Microsoft.ML.Trainers.SdcaMulticlassTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput Trainers.StochasticDualCoordinateAscentRegressor The SDCA linear regression trainer. Microsoft.ML.Trainers.Sdca TrainRegression Microsoft.ML.Trainers.SdcaRegressionTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.StochasticGradientDescentBinaryClassifier Train an Hogwild SGD binary model. Microsoft.ML.Trainers.LegacySgdBinaryTrainer TrainBinary Microsoft.ML.Trainers.LegacySgdBinaryTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput Trainers.SymSgdBinaryClassifier Train a symbolic SGD. Microsoft.ML.Trainers.SymbolicSgdTrainer TrainSymSgd Microsoft.ML.Trainers.SymbolicSgdTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput diff --git a/test/BaselineOutput/Common/EntryPoints/core_manifest.json b/test/BaselineOutput/Common/EntryPoints/core_manifest.json index 359ea764a4..c358a26e17 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_manifest.json +++ b/test/BaselineOutput/Common/EntryPoints/core_manifest.json @@ -11356,7 +11356,7 @@ { "Name": "Sigmoid", "Type": "Float", - "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryClassificationTrainer, LightGbmMulticlassClassificationTrainer and in LightGbmRankingTrainer.", + "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryTrainer, LightGbmMulticlassTrainer and in LightGbmRankingTrainer.", "Aliases": [ "sigmoid" ], @@ -11859,7 +11859,7 @@ { "Name": "Sigmoid", "Type": "Float", - "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryClassificationTrainer, LightGbmMulticlassClassificationTrainer and in LightGbmRankingTrainer.", + "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryTrainer, LightGbmMulticlassTrainer and in LightGbmRankingTrainer.", "Aliases": [ "sigmoid" ], @@ -12362,7 +12362,7 @@ { "Name": "Sigmoid", "Type": "Float", - "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryClassificationTrainer, LightGbmMulticlassClassificationTrainer and in LightGbmRankingTrainer.", + "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryTrainer, LightGbmMulticlassTrainer and in LightGbmRankingTrainer.", "Aliases": [ "sigmoid" ], @@ -12865,7 +12865,7 @@ { "Name": "Sigmoid", "Type": "Float", - "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryClassificationTrainer, LightGbmMulticlassClassificationTrainer and in LightGbmRankingTrainer.", + "Desc": "Parameter for the sigmoid function. Used only in LightGbmBinaryTrainer, LightGbmMulticlassTrainer and in LightGbmRankingTrainer.", "Aliases": [ "sigmoid" ], diff --git a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs index 7f568adca6..de0b3a8cdb 100644 --- a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs +++ b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs @@ -40,7 +40,7 @@ public CalibratedModelParametersBase(); + var environment = EnvironmentFactory.CreateRankingEnvironment(); cmd.ExecuteMamlCommand(environment); } } diff --git a/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs b/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs index 29aa387ce0..da4f343404 100644 --- a/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs +++ b/test/Microsoft.ML.Benchmarks/PredictionEngineBench.cs @@ -58,7 +58,7 @@ public void SetupIrisPipeline() var pipeline = new ColumnConcatenatingEstimator(env, "Features", new[] { "SepalLength", "SepalWidth", "PetalLength", "PetalWidth" }) .Append(env.Transforms.Conversion.MapValueToKey("Label")) .Append(env.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { NumberOfThreads = 1, ConvergenceTolerance = 1e-2f, })); + new SdcaMulticlassTrainer.Options { NumberOfThreads = 1, ConvergenceTolerance = 1e-2f, })); var model = pipeline.Fit(data); @@ -93,7 +93,7 @@ public void SetupSentimentPipeline() var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText") .Append(mlContext.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1, ConvergenceTolerance = 1e-2f, })); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1, ConvergenceTolerance = 1e-2f, })); var model = pipeline.Fit(data); @@ -127,7 +127,7 @@ public void SetupBreastCancerPipeline() IDataView data = loader.Load(_breastCancerDataPath); var pipeline = env.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1, ConvergenceTolerance = 1e-2f, }); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1, ConvergenceTolerance = 1e-2f, }); var model = pipeline.Fit(data); diff --git a/test/Microsoft.ML.Benchmarks/Text/MultiClassClassification.cs b/test/Microsoft.ML.Benchmarks/Text/MultiClassClassification.cs index 113ccad2f4..e79916bdc0 100644 --- a/test/Microsoft.ML.Benchmarks/Text/MultiClassClassification.cs +++ b/test/Microsoft.ML.Benchmarks/Text/MultiClassClassification.cs @@ -53,7 +53,7 @@ public void CV_Multiclass_WikiDetox_BigramsAndTrichar_LightGBMMulticlass() " xf=Concat{col=Features:FeaturesText,logged_in,ns}" + " tr=LightGBMMulticlass{iter=10}"; - var environment = EnvironmentFactory.CreateClassificationEnvironment(); + var environment = EnvironmentFactory.CreateClassificationEnvironment(); cmd.ExecuteMamlCommand(environment); } @@ -85,7 +85,7 @@ public void CV_Multiclass_WikiDetox_WordEmbeddings_SDCAMC() " xf=WordEmbeddingsTransform{col=FeaturesWordEmbedding:FeaturesText_TransformedText model=FastTextWikipedia300D}" + " xf=Concat{col=Features:FeaturesWordEmbedding,logged_in,ns}"; - var environment = EnvironmentFactory.CreateClassificationEnvironment(); + var environment = EnvironmentFactory.CreateClassificationEnvironment(); cmd.ExecuteMamlCommand(environment); } } diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs index fc4bc5f7f0..0a36b2af62 100644 --- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs +++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs @@ -132,7 +132,7 @@ public void EntryPointScoring() var dataView = GetBreastCancerDataviewWithTextColumns(); dataView = Env.CreateTransform("Term{col=F1}", dataView); var trainData = FeatureCombiner.PrepareFeatures(Env, new FeatureCombiner.FeatureCombinerInput() { Data = dataView, Features = new[] { "F1", "F2", "Rest" } }); - var lrModel = LogisticRegressionBinaryClassificationTrainer.TrainBinary(Env, new LogisticRegressionBinaryClassificationTrainer.Options { TrainingData = trainData.OutputData }).PredictorModel; + var lrModel = LogisticRegressionBinaryTrainer.TrainBinary(Env, new LogisticRegressionBinaryTrainer.Options { TrainingData = trainData.OutputData }).PredictorModel; var model = ModelOperations.CombineTwoModels(Env, new ModelOperations.SimplePredictorModelInput() { TransformModel = trainData.Model, PredictorModel = lrModel }).PredictorModel; var scored1 = ScoreModel.Score(Env, new ScoreModel.Input() { Data = dataView, PredictorModel = model }).ScoredData; @@ -362,12 +362,12 @@ public void EntryPointInputBuilderOptionals() { var catalog = Env.ComponentCatalog; - InputBuilder ib1 = new InputBuilder(Env, typeof(LogisticRegressionBinaryClassificationTrainer.Options), catalog); + InputBuilder ib1 = new InputBuilder(Env, typeof(LogisticRegressionBinaryTrainer.Options), catalog); // Ensure that InputBuilder unwraps the Optional correctly. var weightType = ib1.GetFieldTypeOrNull("ExampleWeightColumnName"); Assert.True(weightType.Equals(typeof(string))); - var instance = ib1.GetInstance() as LogisticRegressionBinaryClassificationTrainer.Options; + var instance = ib1.GetInstance() as LogisticRegressionBinaryTrainer.Options; Assert.True(instance.ExampleWeightColumnName == null); ib1.TrySetValue("ExampleWeightColumnName", "OtherWeight"); @@ -420,14 +420,14 @@ public void EntryPointCreateEnsemble() for (int i = 0; i < nModels; i++) { var data = splitOutput.TrainData[i]; - var lrInput = new LogisticRegressionBinaryClassificationTrainer.Options + var lrInput = new LogisticRegressionBinaryTrainer.Options { TrainingData = data, L1Regularization = (Single)0.1 * i, L2Regularization = (Single)0.01 * (1 + i), NormalizeFeatures = NormalizeOption.No }; - predictorModels[i] = LogisticRegressionBinaryClassificationTrainer.TrainBinary(Env, lrInput).PredictorModel; + predictorModels[i] = LogisticRegressionBinaryTrainer.TrainBinary(Env, lrInput).PredictorModel; individualScores[i] = ScoreModel.Score(Env, new ScoreModel.Input { Data = splitOutput.TestData[nModels], PredictorModel = predictorModels[i] }) @@ -676,7 +676,7 @@ public void EntryPointCalibrate() var splitOutput = CVSplit.Split(Env, new CVSplit.Input { Data = dataView, NumFolds = 3 }); - var lrModel = LogisticRegressionBinaryClassificationTrainer.TrainBinary(Env, new LogisticRegressionBinaryClassificationTrainer.Options { TrainingData = splitOutput.TestData[0] }).PredictorModel; + var lrModel = LogisticRegressionBinaryTrainer.TrainBinary(Env, new LogisticRegressionBinaryTrainer.Options { TrainingData = splitOutput.TestData[0] }).PredictorModel; var calibratedLrModel = Calibrate.FixedPlatt(Env, new Calibrate.FixedPlattInput { Data = splitOutput.TestData[1], UncalibratedPredictorModel = lrModel }).PredictorModel; @@ -695,7 +695,7 @@ public void EntryPointCalibrate() calibratedLrModel = Calibrate.Pav(Env, input).PredictorModel; // This tests that the SchemaBindableCalibratedPredictor doesn't get confused if its sub-predictor is already calibrated. - var fastForest = new FastForestBinaryClassificationTrainer(Env, "Label", "Features"); + var fastForest = new FastForestBinaryTrainer(Env, "Label", "Features"); var rmd = new RoleMappedData(splitOutput.TrainData[0], "Label", "Features"); var ffModel = new PredictorModelImpl(Env, rmd, splitOutput.TrainData[0], fastForest.Train(rmd)); var calibratedFfModel = Calibrate.Platt(Env, @@ -724,14 +724,14 @@ public void EntryPointPipelineEnsemble() data = new ColumnConcatenatingTransformer(Env, "Features", new[] { "Features1", "Features2" }).Transform(data); data = new ValueToKeyMappingEstimator(Env, "Label", "Label", keyOrdinality: ValueToKeyMappingEstimator.KeyOrdinality.ByValue).Fit(data).Transform(data); - var lrInput = new LogisticRegressionBinaryClassificationTrainer.Options + var lrInput = new LogisticRegressionBinaryTrainer.Options { TrainingData = data, L1Regularization = (Single)0.1 * i, L2Regularization = (Single)0.01 * (1 + i), NormalizeFeatures = NormalizeOption.Yes }; - predictorModels[i] = LogisticRegressionBinaryClassificationTrainer.TrainBinary(Env, lrInput).PredictorModel; + predictorModels[i] = LogisticRegressionBinaryTrainer.TrainBinary(Env, lrInput).PredictorModel; var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]); predictorModels[i] = ModelOperations.CombineTwoModels(Env, @@ -985,14 +985,14 @@ public void EntryPointPipelineEnsembleText() }, data); } - var lrInput = new LogisticRegressionBinaryClassificationTrainer.Options + var lrInput = new LogisticRegressionBinaryTrainer.Options { TrainingData = data, L1Regularization = (Single)0.1 * i, L2Regularization = (Single)0.01 * (1 + i), NormalizeFeatures = NormalizeOption.Yes }; - predictorModels[i] = LogisticRegressionBinaryClassificationTrainer.TrainBinary(Env, lrInput).PredictorModel; + predictorModels[i] = LogisticRegressionBinaryTrainer.TrainBinary(Env, lrInput).PredictorModel; var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]); predictorModels[i] = ModelOperations.CombineTwoModels(Env, @@ -1318,7 +1318,7 @@ public void EntryPointPipelineEnsembleGetSummary() data = new ColumnConcatenatingTransformer(Env, new ColumnConcatenatingTransformer.ColumnOptions("Features", i % 2 == 0 ? new[] { "Features", "Cat" } : new[] { "Cat", "Features" })).Transform(data); if (i % 2 == 0) { - var lrInput = new LogisticRegressionBinaryClassificationTrainer.Options + var lrInput = new LogisticRegressionBinaryTrainer.Options { TrainingData = data, NormalizeFeatures = NormalizeOption.Yes, @@ -1326,7 +1326,7 @@ public void EntryPointPipelineEnsembleGetSummary() ShowTrainingStatistics = true, ComputeStandardDeviation = new ComputeLRTrainingStdThroughMkl() }; - predictorModels[i] = LogisticRegressionBinaryClassificationTrainer.TrainBinary(Env, lrInput).PredictorModel; + predictorModels[i] = LogisticRegressionBinaryTrainer.TrainBinary(Env, lrInput).PredictorModel; var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]); predictorModels[i] = ModelOperations.CombineTwoModels(Env, @@ -1335,7 +1335,7 @@ public void EntryPointPipelineEnsembleGetSummary() } else if (i % 2 == 1) { - var trainer = new FastTreeBinaryClassificationTrainer(Env, "Label", "Features"); + var trainer = new FastTreeBinaryTrainer(Env, "Label", "Features"); var rmd = new RoleMappedData(data, false, RoleMappedSchema.CreatePair(RoleMappedSchema.ColumnRole.Feature, "Features"), RoleMappedSchema.CreatePair(RoleMappedSchema.ColumnRole.Label, "Label")); @@ -3347,7 +3347,7 @@ public void EntryPointLinearPredictorSummary() InputFile = inputFile, }).Data; - var lrInput = new LogisticRegressionBinaryClassificationTrainer.Options + var lrInput = new LogisticRegressionBinaryTrainer.Options { TrainingData = dataView, NormalizeFeatures = NormalizeOption.Yes, @@ -3355,7 +3355,7 @@ public void EntryPointLinearPredictorSummary() ShowTrainingStatistics = true, ComputeStandardDeviation = new ComputeLRTrainingStdThroughMkl() }; - var model = LogisticRegressionBinaryClassificationTrainer.TrainBinary(Env, lrInput).PredictorModel; + var model = LogisticRegressionBinaryTrainer.TrainBinary(Env, lrInput).PredictorModel; var mcLrInput = new LogisticRegressionMulticlassClassificationTrainer.Options { @@ -3364,7 +3364,7 @@ public void EntryPointLinearPredictorSummary() NumberOfThreads = 1, ShowTrainingStatistics = true }; - var mcModel = LogisticRegressionBinaryClassificationTrainer.TrainMulticlass(Env, mcLrInput).PredictorModel; + var mcModel = LogisticRegressionBinaryTrainer.TrainMulticlass(Env, mcLrInput).PredictorModel; var output = SummarizePredictor.Summarize(Env, new SummarizePredictor.Input() { PredictorModel = model }); @@ -3556,7 +3556,7 @@ public void EntryPointTreeLeafFeaturizer() Columns = new[] { new ColumnConcatenatingTransformer.Column { Name = "Features", Source = new[] { "Categories", "NumericFeatures" } } } }); - var fastTree = Trainers.FastTree.FastTree.TrainBinary(Env, new FastTreeBinaryClassificationTrainer.Options + var fastTree = Trainers.FastTree.FastTree.TrainBinary(Env, new FastTreeBinaryTrainer.Options { FeatureColumnName = "Features", NumberOfTrees = 5, diff --git a/test/Microsoft.ML.Functional.Tests/DataTransformation.cs b/test/Microsoft.ML.Functional.Tests/DataTransformation.cs index 21db727b8a..068e3c90b5 100644 --- a/test/Microsoft.ML.Functional.Tests/DataTransformation.cs +++ b/test/Microsoft.ML.Functional.Tests/DataTransformation.cs @@ -144,7 +144,7 @@ void ExtensibilityModifyTextFeaturization() }, "SentimentText") .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.SdcaCalibrated( - new SdcaCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaCalibratedBinaryTrainer.Options { NumberOfThreads = 1 })); // Train the model. var model = pipeline.Fit(data); diff --git a/test/Microsoft.ML.Functional.Tests/Evaluation.cs b/test/Microsoft.ML.Functional.Tests/Evaluation.cs index 93d0a93b78..cacd71cfbf 100644 --- a/test/Microsoft.ML.Functional.Tests/Evaluation.cs +++ b/test/Microsoft.ML.Functional.Tests/Evaluation.cs @@ -65,7 +65,7 @@ public void TrainAndEvaluateBinaryClassification() var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1 })); // Train the model. var model = pipeline.Fit(data); @@ -94,7 +94,7 @@ public void TrainAndEvaluateBinaryClassificationWithCalibration() var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.LogisticRegression( - new LogisticRegressionBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new LogisticRegressionBinaryTrainer.Options { NumberOfThreads = 1 })); // Train the model. var model = pipeline.Fit(data); @@ -152,7 +152,7 @@ public void TrainAndEvaluateMulticlassClassification() .Append(mlContext.Transforms.Conversion.MapValueToKey("Label")) .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { NumberOfThreads = 1})); + new SdcaMulticlassTrainer.Options { NumberOfThreads = 1})); // Train the model. var model = pipeline.Fit(data); @@ -274,7 +274,7 @@ public void TrainAndEvaluateWithPrecisionRecallCurves() var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.LogisticRegression( - new LogisticRegressionBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new LogisticRegressionBinaryTrainer.Options { NumberOfThreads = 1 })); // Train the model. var model = pipeline.Fit(data); diff --git a/test/Microsoft.ML.Functional.Tests/IntrospectiveTraining.cs b/test/Microsoft.ML.Functional.Tests/IntrospectiveTraining.cs index 5a26d98619..5eb002c993 100644 --- a/test/Microsoft.ML.Functional.Tests/IntrospectiveTraining.cs +++ b/test/Microsoft.ML.Functional.Tests/IntrospectiveTraining.cs @@ -82,7 +82,7 @@ public void InspectFastTreeModelParameters() var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.FastTree( - new FastTreeBinaryClassificationTrainer.Options{ NumberOfLeaves = 5, NumberOfTrees= 3, NumberOfThreads = 1 })); + new FastTreeBinaryTrainer.Options{ NumberOfLeaves = 5, NumberOfTrees= 3, NumberOfThreads = 1 })); // Fit the pipeline. var model = pipeline.Fit(data); @@ -217,7 +217,7 @@ public void InpsectLinearModelParameters() var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(mlContext) .Append(mlContext.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1 })); // Fit the pipeline. var model = pipeline.Fit(data); @@ -423,7 +423,7 @@ private IEstimator).Model as CalibratedModelParametersBase).SubModel - as BinaryClassificationGamModelParameters; + as GamBinaryModelParameters; Assert.NotNull(gam); gam = (((loadedCompositeLoader as CompositeDataLoader).Transformer.LastTransformer as ISingleFeaturePredictionTransformer).Model as CalibratedModelParametersBase).SubModel - as BinaryClassificationGamModelParameters; + as GamBinaryModelParameters; Assert.NotNull(gam); gam = (((loadedTransformerModel1 as TransformerChain).LastTransformer as ISingleFeaturePredictionTransformer).Model as CalibratedModelParametersBase).SubModel - as BinaryClassificationGamModelParameters; + as GamBinaryModelParameters; Assert.NotNull(gam); } @@ -150,7 +150,7 @@ public void SaveAndLoadModelWithLoader() Assert.NotNull(singleFeaturePredictionTransformer); var calibratedModelParameters = singleFeaturePredictionTransformer.Model as CalibratedModelParametersBase; Assert.NotNull(calibratedModelParameters); - var gamModel = calibratedModelParameters.SubModel as BinaryClassificationGamModelParameters; + var gamModel = calibratedModelParameters.SubModel as GamBinaryModelParameters; Assert.NotNull(gamModel); var ageBinUpperBounds = gamModel.GetBinUpperBounds(ageIndex); var ageBinEffects = gamModel.GetBinEffects(ageIndex); diff --git a/test/Microsoft.ML.Functional.Tests/Training.cs b/test/Microsoft.ML.Functional.Tests/Training.cs index 954602d249..b00739b699 100644 --- a/test/Microsoft.ML.Functional.Tests/Training.cs +++ b/test/Microsoft.ML.Functional.Tests/Training.cs @@ -44,10 +44,10 @@ public void CompareTrainerEvaluations() // Create a selection of learners. var sdcaTrainer = mlContext.BinaryClassification.Trainers.SdcaCalibrated( - new SdcaCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 }); + new SdcaCalibratedBinaryTrainer.Options { NumberOfThreads = 1 }); var fastTreeTrainer = mlContext.BinaryClassification.Trainers.FastTree( - new FastTreeBinaryClassificationTrainer.Options { NumberOfThreads = 1 }); + new FastTreeBinaryTrainer.Options { NumberOfThreads = 1 }); var ffmTrainer = mlContext.BinaryClassification.Trainers.FieldAwareFactorizationMachine(); @@ -226,7 +226,7 @@ public void ContinueTrainingLogisticRegression() .AppendCacheCheckpoint(mlContext); var trainer = mlContext.BinaryClassification.Trainers.LogisticRegression( - new LogisticRegressionBinaryClassificationTrainer.Options { NumberOfThreads = 1, MaximumNumberOfIterations = 10 }); + new LogisticRegressionBinaryTrainer.Options { NumberOfThreads = 1, MaximumNumberOfIterations = 10 }); // Fit the data transformation pipeline. var featurization = featurizationPipeline.Fit(data); @@ -452,7 +452,7 @@ public void MetacomponentsFunctionAsExpectedOva() // Create a model training an OVA trainer with a binary classifier. var binaryClassificationTrainer = mlContext.BinaryClassification.Trainers.LogisticRegression( - new LogisticRegressionBinaryClassificationTrainer.Options { MaximumNumberOfIterations = 10, NumberOfThreads = 1, }); + new LogisticRegressionBinaryTrainer.Options { MaximumNumberOfIterations = 10, NumberOfThreads = 1, }); var binaryClassificationPipeline = mlContext.Transforms.Concatenate("Features", Iris.Features) .AppendCacheCheckpoint(mlContext) .Append(mlContext.Transforms.Conversion.MapValueToKey("Label")) diff --git a/test/Microsoft.ML.Functional.Tests/Validation.cs b/test/Microsoft.ML.Functional.Tests/Validation.cs index a370eb7a97..e93a840f91 100644 --- a/test/Microsoft.ML.Functional.Tests/Validation.cs +++ b/test/Microsoft.ML.Functional.Tests/Validation.cs @@ -43,7 +43,7 @@ void CrossValidation() // Check that the results are valid Assert.IsType(cvResult[0].Metrics); - Assert.IsType>>(cvResult[0].Model); + Assert.IsType>>(cvResult[0].Model); Assert.True(cvResult[0].ScoredHoldOutSet is IDataView); Assert.Equal(5, cvResult.Length); diff --git a/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs b/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs index 1a6a5cf90b..a9646b8893 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestGamPublicInterfaces.cs @@ -31,7 +31,7 @@ public void TestGamDirectInstatiation() new double[] { 2, 1, 0 } }; - var gam = new RegressionGamModelParameters(mlContext, binUpperBounds, binEffects, intercept); + var gam = new GamRegressionModelParameters(mlContext, binUpperBounds, binEffects, intercept); // Check that the model has the right number of shape functions Assert.Equal(binUpperBounds.Length, gam.NumberOfShapeFunctions); @@ -50,15 +50,15 @@ public void TestGamDirectInstatiation() Utils.AreEqual(binEffects[i], gam.GetBinEffects(i).ToArray()); // Check that the constructor handles null inputs properly - Assert.Throws(() => new RegressionGamModelParameters(mlContext, binUpperBounds, null, intercept)); - Assert.Throws(() => new RegressionGamModelParameters(mlContext, null, binEffects, intercept)); - Assert.Throws(() => new RegressionGamModelParameters(mlContext, null, null, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, binUpperBounds, null, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, null, binEffects, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, null, null, intercept)); // Check that the constructor handles mismatches in length between bin upper bounds and bin effects var misMatchArray = new double[1][]; misMatchArray[0] = new double[] { 0 }; - Assert.Throws(() => new RegressionGamModelParameters(mlContext, binUpperBounds, misMatchArray, intercept)); - Assert.Throws(() => new RegressionGamModelParameters(mlContext, misMatchArray, binEffects, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, binUpperBounds, misMatchArray, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, misMatchArray, binEffects, intercept)); // Check that the constructor handles a mismatch in bin upper bounds and bin effects for a feature var fewerBinEffects = new double[2][] @@ -66,13 +66,13 @@ public void TestGamDirectInstatiation() new double[] { 0, 1 }, new double[] { 2, 1, 0 } }; - Assert.Throws(() => new RegressionGamModelParameters(mlContext, binUpperBounds, fewerBinEffects, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, binUpperBounds, fewerBinEffects, intercept)); var moreBinEffects = new double[2][] { new double[] { 0, 1, 2, 3 }, new double[] { 2, 1, 0 } }; - Assert.Throws(() => new RegressionGamModelParameters(mlContext, binUpperBounds, moreBinEffects, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, binUpperBounds, moreBinEffects, intercept)); // Check that the constructor handles bin upper bounds that are not sorted var unsortedUpperBounds = new double[2][] @@ -80,7 +80,7 @@ public void TestGamDirectInstatiation() new double[] { 1, 3, 2 }, new double[] { 4, 5, 6 } }; - Assert.Throws(() => new RegressionGamModelParameters(mlContext, unsortedUpperBounds, binEffects, intercept)); + Assert.Throws(() => new GamRegressionModelParameters(mlContext, unsortedUpperBounds, binEffects, intercept)); } private void CheckArrayOfArrayEquality(double[][] array1, double[][] array2) diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index d40a9c48b2..ce141dbafd 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -594,7 +594,7 @@ public void TestTreeEnsembleCombiner() var fastTrees = new PredictorModel[3]; for (int i = 0; i < 3; i++) { - fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Options + fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryTrainer.Options { FeatureColumnName = "Features", NumberOfTrees = 5, @@ -616,7 +616,7 @@ public void TestTreeEnsembleCombinerWithCategoricalSplits() var fastTrees = new PredictorModel[3]; for (int i = 0; i < 3; i++) { - fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Options + fastTrees[i] = FastTree.TrainBinary(ML, new FastTreeBinaryTrainer.Options { FeatureColumnName = "Features", NumberOfTrees = 5, @@ -723,7 +723,7 @@ public void TestEnsembleCombiner() var predictors = new PredictorModel[] { - FastTree.TrainBinary(ML, new FastTreeBinaryClassificationTrainer.Options + FastTree.TrainBinary(ML, new FastTreeBinaryTrainer.Options { FeatureColumnName = "Features", NumberOfTrees = 5, @@ -739,7 +739,7 @@ public void TestEnsembleCombiner() TrainingData = dataView, NormalizeFeatures = NormalizeOption.No }).PredictorModel, - LogisticRegressionBinaryClassificationTrainer.TrainBinary(ML, new LogisticRegressionBinaryClassificationTrainer.Options() + LogisticRegressionBinaryTrainer.TrainBinary(ML, new LogisticRegressionBinaryTrainer.Options() { FeatureColumnName = "Features", LabelColumnName = DefaultColumnNames.Label, @@ -747,7 +747,7 @@ public void TestEnsembleCombiner() TrainingData = dataView, NormalizeFeatures = NormalizeOption.No }).PredictorModel, - LogisticRegressionBinaryClassificationTrainer.TrainBinary(ML, new LogisticRegressionBinaryClassificationTrainer.Options() + LogisticRegressionBinaryTrainer.TrainBinary(ML, new LogisticRegressionBinaryTrainer.Options() { FeatureColumnName = "Features", LabelColumnName = DefaultColumnNames.Label, @@ -776,7 +776,7 @@ public void TestMulticlassEnsembleCombiner() LabelColumnName = DefaultColumnNames.Label, TrainingData = dataView }).PredictorModel, - LogisticRegressionBinaryClassificationTrainer.TrainMulticlass(Env, new LogisticRegressionMulticlassClassificationTrainer.Options() + LogisticRegressionBinaryTrainer.TrainMulticlass(Env, new LogisticRegressionMulticlassClassificationTrainer.Options() { FeatureColumnName = "Features", LabelColumnName = DefaultColumnNames.Label, @@ -784,7 +784,7 @@ public void TestMulticlassEnsembleCombiner() TrainingData = dataView, NormalizeFeatures = NormalizeOption.No }).PredictorModel, - LogisticRegressionBinaryClassificationTrainer.TrainMulticlass(Env, new LogisticRegressionMulticlassClassificationTrainer.Options() + LogisticRegressionBinaryTrainer.TrainMulticlass(Env, new LogisticRegressionMulticlassClassificationTrainer.Options() { FeatureColumnName = "Features", LabelColumnName = DefaultColumnNames.Label, diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs index 7ff3dff550..4a220ffb23 100644 --- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs +++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs @@ -117,7 +117,7 @@ public void SdcaBinaryClassification() var est = reader.MakeNewEstimator() .Append(r => (r.label, preds: catalog.Trainers.Sdca(r.label, r.features, null, - new SdcaCalibratedBinaryClassificationTrainer.Options { MaximumNumberOfIterations = 2, NumberOfThreads = 1 }, + new SdcaCalibratedBinaryTrainer.Options { MaximumNumberOfIterations = 2, NumberOfThreads = 1 }, onFit: (p) => { pred = p; }))); var pipe = reader.Append(est); @@ -197,7 +197,7 @@ public void SdcaBinaryClassificationNoCalibration() // With a custom loss function we no longer get calibrated predictions. var est = reader.MakeNewEstimator() .Append(r => (r.label, preds: catalog.Trainers.SdcaNonCalibrated(r.label, r.features, null, loss, - new SdcaNonCalibratedBinaryClassificationTrainer.Options { MaximumNumberOfIterations = 2, NumberOfThreads = 1 }, + new SdcaNonCalibratedBinaryTrainer.Options { MaximumNumberOfIterations = 2, NumberOfThreads = 1 }, onFit: p => pred = p))); var pipe = reader.Append(est); @@ -654,7 +654,7 @@ public void LogisticRegressionBinaryClassification() var est = reader.MakeNewEstimator() .Append(r => (r.label, preds: catalog.Trainers.LogisticRegressionBinaryClassifier(r.label, r.features, null, - new LogisticRegressionBinaryClassificationTrainer.Options { L1Regularization = 10, NumberOfThreads = 1 }, onFit: (p) => { pred = p; }))); + new LogisticRegressionBinaryTrainer.Options { L1Regularization = 10, NumberOfThreads = 1 }, onFit: (p) => { pred = p; }))); var pipe = reader.Append(est); @@ -960,7 +960,7 @@ public void MulticlassNaiveBayesTrainer() var reader = TextLoaderStatic.CreateLoader(env, c => (label: c.LoadText(0), features: c.LoadFloat(1, 4))); - MulticlassNaiveBayesModelParameters pred = null; + NaiveBayesMulticlassModelParameters pred = null; // With a custom loss function we no longer get calibrated predictions. var est = reader.MakeNewEstimator() diff --git a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs index 60292566b7..5cd37edf60 100644 --- a/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs +++ b/test/Microsoft.ML.TestFramework/EnvironmentExtensions.cs @@ -23,7 +23,7 @@ public static TEnvironment AddStandardComponents(this TEnvironment env.ComponentCatalog.RegisterAssembly(typeof(FastTreeBinaryModelParameters).Assembly); // ML.FastTree env.ComponentCatalog.RegisterAssembly(typeof(EnsembleModelParameters).Assembly); // ML.Ensemble env.ComponentCatalog.RegisterAssembly(typeof(KMeansModelParameters).Assembly); // ML.KMeansClustering - env.ComponentCatalog.RegisterAssembly(typeof(PrincipleComponentModelParameters).Assembly); // ML.PCA + env.ComponentCatalog.RegisterAssembly(typeof(PcaModelParameters).Assembly); // ML.PCA env.ComponentCatalog.RegisterAssembly(typeof(CVSplit).Assembly); // ML.EntryPoints return env; } diff --git a/test/Microsoft.ML.TestFramework/Learners.cs b/test/Microsoft.ML.TestFramework/Learners.cs index 7367583dfd..f369264db3 100644 --- a/test/Microsoft.ML.TestFramework/Learners.cs +++ b/test/Microsoft.ML.TestFramework/Learners.cs @@ -39,7 +39,7 @@ public class TestLearnersBase static TestLearnersBase() { bool ok = true; - ok &= typeof(FastTreeBinaryClassificationTrainer) != null; + ok &= typeof(FastTreeBinaryTrainer) != null; Contracts.Check(ok, "Missing assemblies!"); } diff --git a/test/Microsoft.ML.Tests/FeatureContributionTests.cs b/test/Microsoft.ML.Tests/FeatureContributionTests.cs index 3ea59f5e80..b5c8036665 100644 --- a/test/Microsoft.ML.Tests/FeatureContributionTests.cs +++ b/test/Microsoft.ML.Tests/FeatureContributionTests.cs @@ -150,7 +150,7 @@ public void TestLightGbmBinary() public void TestSDCABinary() { TestFeatureContribution(ML.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1, }), GetSparseDataset(TaskType.BinaryClassification, 100), "SDCABinary", precision: 5); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1, }), GetSparseDataset(TaskType.BinaryClassification, 100), "SDCABinary", precision: 5); } [Fact] diff --git a/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs b/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs index 9f176cd3c9..96993ad53d 100644 --- a/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs +++ b/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs @@ -152,7 +152,7 @@ public void TestPfiBinaryClassificationOnDenseFeatures() { var data = GetDenseDataset(TaskType.BinaryClassification); var model = ML.BinaryClassification.Trainers.LogisticRegression( - new LogisticRegressionBinaryClassificationTrainer.Options { NumberOfThreads = 1 }).Fit(data); + new LogisticRegressionBinaryTrainer.Options { NumberOfThreads = 1 }).Fit(data); var pfi = ML.BinaryClassification.PermutationFeatureImportance(model, data); // Pfi Indices: @@ -190,7 +190,7 @@ public void TestPfiBinaryClassificationOnSparseFeatures() { var data = GetSparseDataset(TaskType.BinaryClassification); var model = ML.BinaryClassification.Trainers.LogisticRegression( - new LogisticRegressionBinaryClassificationTrainer.Options { NumberOfThreads = 1 }).Fit(data); + new LogisticRegressionBinaryTrainer.Options { NumberOfThreads = 1 }).Fit(data); var pfi = ML.BinaryClassification.PermutationFeatureImportance(model, data); // Pfi Indices: diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs index e8c05bb5bf..151cb3792f 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/DecomposableTrainAndPredict.cs @@ -32,7 +32,7 @@ void DecomposableTrainAndPredict() var pipeline = new ColumnConcatenatingEstimator (ml, "Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") .Append(new ValueToKeyMappingEstimator(ml, "Label"), TransformerScope.TrainTest) .Append(ml.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1, })) + new SdcaMulticlassTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1, })) .Append(new KeyToValueMappingEstimator(ml, "PredictedLabel")); var model = pipeline.Fit(data).GetModelFor(TransformerScope.Scoring); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs index e977884d7e..c5098f6b30 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/Extensibility.cs @@ -41,7 +41,7 @@ void Extensibility() .Append(new CustomMappingEstimator(ml, action, null), TransformerScope.TrainTest) .Append(new ValueToKeyMappingEstimator(ml, "Label"), TransformerScope.TrainTest) .Append(ml.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1 })) + new SdcaMulticlassTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1 })) .Append(new KeyToValueMappingEstimator(ml, "PredictedLabel")); var model = pipeline.Fit(data).GetModelFor(TransformerScope.Scoring); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs index daacc9fcae..5d5298e3cb 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/MultithreadedPrediction.cs @@ -30,7 +30,7 @@ void MultithreadedPrediction() var pipeline = ml.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1 })); // Train. var model = pipeline.Fit(data); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs index 75dadbb957..8c023bb340 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/PredictAndMetadata.cs @@ -30,7 +30,7 @@ void PredictAndMetadata() var pipeline = ml.Transforms.Concatenate("Features", "SepalLength", "SepalWidth", "PetalLength", "PetalWidth") .Append(ml.Transforms.Conversion.MapValueToKey("Label"), TransformerScope.TrainTest) .Append(ml.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1, })); + new SdcaMulticlassTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1, })); var model = pipeline.Fit(data).GetModelFor(TransformerScope.Scoring); var engine = ml.Model.CreatePredictionEngine(model); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs index 2b16c7ed3b..923b074bac 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/SimpleTrainAndPredict.cs @@ -27,7 +27,7 @@ public void SimpleTrainAndPredict() var pipeline = ml.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1 })); // Train. var model = pipeline.Fit(data); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs index 8d6dd718f8..2b5c82ec6f 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs @@ -28,7 +28,7 @@ public void TrainSaveModelAndPredict() var pipeline = ml.Transforms.Text.FeaturizeText("Features", "SentimentText") .AppendCacheCheckpoint(ml) .Append(ml.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1 })); // Train. var model = pipeline.Fit(data); diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs index 4f28e8ff4d..356c14ef2c 100644 --- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs +++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainWithInitialPredictor.cs @@ -32,7 +32,7 @@ public void TrainWithInitialPredictor() // Train the first predictor. var trainer = ml.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { NumberOfThreads = 1 }); + new SdcaNonCalibratedBinaryTrainer.Options { NumberOfThreads = 1 }); var firstModel = trainer.Fit(trainData); diff --git a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs index af5002b050..c3a33f1492 100644 --- a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationTests.cs @@ -33,7 +33,7 @@ public void TrainAndPredictIrisModelTest() .Append(mlContext.Transforms.Conversion.MapValueToKey("Label")) .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaMulticlassTrainer.Options { NumberOfThreads = 1 })); // Read training and test data sets string dataPath = GetDataPath(TestDatasets.iris.trainFilename); diff --git a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs index e6c54d9f80..eb383d9fdd 100644 --- a/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs +++ b/test/Microsoft.ML.Tests/Scenarios/IrisPlantClassificationWithStringLabelTests.cs @@ -38,7 +38,7 @@ public void TrainAndPredictIrisModelWithStringLabelTest() .Append(mlContext.Transforms.Conversion.MapValueToKey("Label", "IrisPlantType"), TransformerScope.TrainTest) .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { NumberOfThreads = 1 })) + new SdcaMulticlassTrainer.Options { NumberOfThreads = 1 })) .Append(mlContext.Transforms.Conversion.MapKeyToValue(("Plant", "PredictedLabel"))); // Train the pipeline diff --git a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs index 321afb876b..8c5108cf2e 100644 --- a/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs +++ b/test/Microsoft.ML.Tests/Scenarios/OvaTest.cs @@ -104,7 +104,7 @@ public void OvaFastTree() // Pipeline var pipeline = mlContext.MulticlassClassification.Trainers.OneVersusAll( - mlContext.BinaryClassification.Trainers.FastTree(new FastTreeBinaryClassificationTrainer.Options { NumberOfThreads = 1 }), + mlContext.BinaryClassification.Trainers.FastTree(new FastTreeBinaryTrainer.Options { NumberOfThreads = 1 }), useProbabilities: false); var model = pipeline.Fit(data); diff --git a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs index 1a03391b82..db47181620 100644 --- a/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs +++ b/test/Microsoft.ML.Tests/ScenariosWithDirectInstantiation/IrisPlantClassificationTests.cs @@ -31,7 +31,7 @@ public void TrainAndPredictIrisModelUsingDirectInstantiationTest() .Append(mlContext.Transforms.Conversion.MapValueToKey("Label")) .AppendCacheCheckpoint(mlContext) .Append(mlContext.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { NumberOfThreads = 1 })); + new SdcaMulticlassTrainer.Options { NumberOfThreads = 1 })); // Read training and test data sets string dataPath = GetDataPath(TestDatasets.iris.trainFilename); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs index d11b38ce39..bccdc90ee3 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs @@ -56,7 +56,7 @@ public void TestLogisticRegressionNoStats() { (IEstimator pipe, IDataView dataView) = GetBinaryClassificationPipeline(); - pipe = pipe.Append(ML.BinaryClassification.Trainers.LogisticRegression(new LogisticRegressionBinaryClassificationTrainer.Options { ShowTrainingStatistics = true })); + pipe = pipe.Append(ML.BinaryClassification.Trainers.LogisticRegression(new LogisticRegressionBinaryTrainer.Options { ShowTrainingStatistics = true })); var transformerChain = pipe.Fit(dataView) as TransformerChain>>; var linearModel = transformerChain.LastTransformer.Model.SubModel as LinearBinaryModelParameters; @@ -73,7 +73,7 @@ public void TestLogisticRegressionWithStats() (IEstimator pipe, IDataView dataView) = GetBinaryClassificationPipeline(); pipe = pipe.Append(ML.BinaryClassification.Trainers.LogisticRegression( - new LogisticRegressionBinaryClassificationTrainer.Options + new LogisticRegressionBinaryTrainer.Options { ShowTrainingStatistics = true, ComputeStandardDeviation = new ComputeLRTrainingStdThroughMkl(), diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs index e6f36351b5..75b17d881c 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/MetalinearEstimators.cs @@ -42,7 +42,7 @@ public void OVAUncalibrated() { var (pipeline, data) = GetMulticlassPipeline(); var sdcaTrainer = ML.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1 }); + new SdcaNonCalibratedBinaryTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1 }); pipeline = pipeline.Append(ML.MulticlassClassification.Trainers.OneVersusAll(sdcaTrainer, useProbabilities: false)) .Append(new KeyToValueMappingEstimator(Env, "PredictedLabel")); @@ -60,7 +60,7 @@ public void PairwiseCouplingTrainer() var (pipeline, data) = GetMulticlassPipeline(); var sdcaTrainer = ML.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1 }); + new SdcaNonCalibratedBinaryTrainer.Options { MaximumNumberOfIterations = 100, Shuffle = true, NumberOfThreads = 1 }); pipeline = pipeline.Append(ML.MulticlassClassification.Trainers.PairwiseCoupling(sdcaTrainer)) .Append(ML.Transforms.Conversion.MapKeyToValue("PredictedLabel")); @@ -83,7 +83,7 @@ public void MetacomponentsFeaturesRenamed() var data = loader.Load(GetDataPath(TestDatasets.irisData.trainFilename)); var sdcaTrainer = ML.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { + new SdcaNonCalibratedBinaryTrainer.Options { LabelColumnName = "Label", FeatureColumnName = "Vars", MaximumNumberOfIterations = 100, diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs index 312c484ef9..7734dedb49 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/SdcaTests.cs @@ -24,11 +24,11 @@ public void SdcaWorkout() .Fit(data.AsDynamic).Transform(data.AsDynamic); var binaryTrainer = ML.BinaryClassification.Trainers.SdcaCalibrated( - new SdcaCalibratedBinaryClassificationTrainer.Options { ConvergenceTolerance = 1e-2f, MaximumNumberOfIterations = 10 }); + new SdcaCalibratedBinaryTrainer.Options { ConvergenceTolerance = 1e-2f, MaximumNumberOfIterations = 10 }); TestEstimatorCore(binaryTrainer, binaryData); var nonCalibratedBinaryTrainer = ML.BinaryClassification.Trainers.SdcaNonCalibrated( - new SdcaNonCalibratedBinaryClassificationTrainer.Options { ConvergenceTolerance = 1e-2f, MaximumNumberOfIterations = 10 }); + new SdcaNonCalibratedBinaryTrainer.Options { ConvergenceTolerance = 1e-2f, MaximumNumberOfIterations = 10 }); TestEstimatorCore(nonCalibratedBinaryTrainer, binaryData); var regressionTrainer = ML.Regression.Trainers.Sdca( @@ -38,7 +38,7 @@ public void SdcaWorkout() var mcData = ML.Transforms.Conversion.MapValueToKey("Label").Fit(data.AsDynamic).Transform(data.AsDynamic); var mcTrainer = ML.MulticlassClassification.Trainers.Sdca( - new SdcaMulticlassClassificationTrainer.Options { ConvergenceTolerance = 1e-2f, MaximumNumberOfIterations = 10 }); + new SdcaMulticlassTrainer.Options { ConvergenceTolerance = 1e-2f, MaximumNumberOfIterations = 10 }); TestEstimatorCore(mcTrainer, mcData); Done(); diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs index 5938eba4b2..7d03b0746e 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEnsembleFeaturizerTest.cs @@ -21,7 +21,7 @@ public void TreeEnsembleFeaturizerOutputSchemaTest() // Define a tree model whose trees will be extracted to construct a tree featurizer. var trainer = ML.BinaryClassification.Trainers.FastTree( - new FastTreeBinaryClassificationTrainer.Options + new FastTreeBinaryTrainer.Options { NumberOfThreads = 1, NumberOfTrees = 10, diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs index 6d85c6ea7e..d8ec78e585 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/TreeEstimators.cs @@ -29,7 +29,7 @@ public void FastTreeBinaryEstimator() var (pipe, dataView) = GetBinaryClassificationPipeline(); var trainer = ML.BinaryClassification.Trainers.FastTree( - new FastTreeBinaryClassificationTrainer.Options + new FastTreeBinaryTrainer.Options { NumberOfThreads = 1, NumberOfTrees = 10, @@ -70,7 +70,7 @@ public void GAMClassificationEstimator() { var (pipe, dataView) = GetBinaryClassificationPipeline(); - var trainer = new GamBinaryClassificationTrainer(Env, new GamBinaryClassificationTrainer.Options + var trainer = new GamBinaryTrainer(Env, new GamBinaryTrainer.Options { GainConfidenceLevel = 0, NumberOfIterations = 15, @@ -90,7 +90,7 @@ public void FastForestClassificationEstimator() var (pipe, dataView) = GetBinaryClassificationPipeline(); var trainer = ML.BinaryClassification.Trainers.FastForest( - new FastForestBinaryClassificationTrainer.Options + new FastForestBinaryTrainer.Options { NumberOfLeaves = 10, NumberOfTrees = 20, @@ -294,7 +294,7 @@ private void LightGbmHelper(bool useSoftmax, out string modelString, out List