diff --git a/src/python/docs/docstrings/ClassifierBestPerformanceSelector.txt b/src/python/docs/docstrings/ClassifierBestPerformanceSelector.txt index c7f2449a..78336709 100644 --- a/src/python/docs/docstrings/ClassifierBestPerformanceSelector.txt +++ b/src/python/docs/docstrings/ClassifierBestPerformanceSelector.txt @@ -32,7 +32,4 @@ .. index:: models, ensemble, classification - Example: - .. literalinclude:: /../nimbusml/examples/EnsembleClassifier.py - :language: python """ \ No newline at end of file diff --git a/src/python/docs/docstrings/EnsembleClassifier.txt b/src/python/docs/docstrings/EnsembleClassifier.txt index 3301f266..47cbdd13 100644 --- a/src/python/docs/docstrings/EnsembleClassifier.txt +++ b/src/python/docs/docstrings/EnsembleClassifier.txt @@ -51,7 +51,7 @@ or ``"LogLossReduction"``. - :output_combiner: indicates how to combine the predictions of the different + :param output_combiner: indicates how to combine the predictions of the different models into a single prediction. There are five available output combiners for clasification: diff --git a/src/python/docs/docstrings/EnsembleRegressor.txt b/src/python/docs/docstrings/EnsembleRegressor.txt index e185307c..cb859674 100644 --- a/src/python/docs/docstrings/EnsembleRegressor.txt +++ b/src/python/docs/docstrings/EnsembleRegressor.txt @@ -51,7 +51,7 @@ ``"RSquared"``. - :output_combiner: indicates how to combine the predictions of the different + :param output_combiner: indicates how to combine the predictions of the different models into a single prediction. There are five available output combiners for clasification: @@ -126,7 +126,7 @@ ` - .. index:: models, ensemble, classification + .. index:: models, ensemble, regression Example: .. literalinclude:: /../nimbusml/examples/EnsembleRegressor.py diff --git a/src/python/docs/docstrings/LinearSvmBinaryClassifier.txt b/src/python/docs/docstrings/LinearSvmBinaryClassifier.txt index df805518..4a9cf5ad 100644 --- a/src/python/docs/docstrings/LinearSvmBinaryClassifier.txt +++ b/src/python/docs/docstrings/LinearSvmBinaryClassifier.txt @@ -5,11 +5,11 @@ .. remarks:: Linear SVM implements an algorithm that finds a hyperplane in the feature space for binary classification, by solving an SVM problem. - For instance, with feature values *f_0, f_1,..., f_{D-1}*, the + For instance, with feature values $f_0, f_1,..., f_{D-1}$, the prediction is given by determining what side of the hyperplane the point falls into. That is the same as the sign of the feautures' - weighted sum, i.e. *\sum_{i = 0}^{D-1} \left(w_i * f_i \right) + b*, - where *w_0, w_1,..., w_{D-1}* are the weights computed by the + weighted sum, i.e. $\sum_{i = 0}^{D-1} \left(w_i * f_i \right) + b$, + where $w_0, w_1,..., w_{D-1}$ are the weights computed by the algorithm, and *b* is the bias computed by the algorithm. This algorithm implemented is the PEGASOS method, which alternates diff --git a/src/python/docs/docstrings/RegressorBestPerformanceSelector.txt b/src/python/docs/docstrings/RegressorBestPerformanceSelector.txt index 83ba0116..285bcf89 100644 --- a/src/python/docs/docstrings/RegressorBestPerformanceSelector.txt +++ b/src/python/docs/docstrings/RegressorBestPerformanceSelector.txt @@ -28,9 +28,6 @@ ` - .. index:: models, ensemble, classification + .. index:: models, ensemble, regression - Example: - .. literalinclude:: /../nimbusml/examples/EnsembleClassifier.py - :language: python """ \ No newline at end of file diff --git a/src/python/nimbusml/ensemble/ensembleclassifier.py b/src/python/nimbusml/ensemble/ensembleclassifier.py index cf60f34d..d99e3b71 100644 --- a/src/python/nimbusml/ensemble/ensembleclassifier.py +++ b/src/python/nimbusml/ensemble/ensembleclassifier.py @@ -77,8 +77,7 @@ class EnsembleClassifier(core, BasePredictor, ClassifierMixin): ``"AccuracyMicro"``, ``"AccuracyMacro"``, ``"LogLoss"``, or ``"LogLossReduction"``. - - :output_combiner: indicates how to combine the predictions of the different + :param output_combiner: indicates how to combine the predictions of the different models into a single prediction. There are five available output combiners for clasification: @@ -97,8 +96,6 @@ class EnsembleClassifier(core, BasePredictor, ClassifierMixin): outputs of the trained models, weighted by the specified metric. The metric can be ``"AccuracyMicroAvg"`` or ``"AccuracyMacroAvg"``. - :param output_combiner: Output combiner. - :param normalize: Specifies the type of automatic normalization used: * ``"Auto"``: if normalization is needed, it is performed diff --git a/src/python/nimbusml/ensemble/ensembleregressor.py b/src/python/nimbusml/ensemble/ensembleregressor.py index 45fb10f5..1b7aac76 100644 --- a/src/python/nimbusml/ensemble/ensembleregressor.py +++ b/src/python/nimbusml/ensemble/ensembleregressor.py @@ -77,8 +77,7 @@ class EnsembleRegressor(core, BasePredictor, RegressorMixin): can be ``"L1"``, ``"L2"``, ``"Rms"``, or ``"Loss"``, or ``"RSquared"``. - - :output_combiner: indicates how to combine the predictions of the different + :param output_combiner: indicates how to combine the predictions of the different models into a single prediction. There are five available output combiners for clasification: @@ -91,8 +90,6 @@ class EnsembleRegressor(core, BasePredictor, RegressorMixin): of the different models on a training instance, and the instance's label. - :param output_combiner: Output combiner. - :param normalize: Specifies the type of automatic normalization used: * ``"Auto"``: if normalization is needed, it is performed @@ -166,7 +163,7 @@ class EnsembleRegressor(core, BasePredictor, RegressorMixin): ` - .. index:: models, ensemble, classification + .. index:: models, ensemble, regression Example: .. literalinclude:: /../nimbusml/examples/EnsembleRegressor.py diff --git a/src/python/nimbusml/ensemble/output_combiner/__init__.py b/src/python/nimbusml/ensemble/output_combiner/__init__.py index c71adc6d..957e0c37 100644 --- a/src/python/nimbusml/ensemble/output_combiner/__init__.py +++ b/src/python/nimbusml/ensemble/output_combiner/__init__.py @@ -13,7 +13,7 @@ 'ClassifierStacking', 'ClassifierVoting', 'ClassifierWeightedAverage', - 'ClassifierAverage', - 'ClassifierMedian', - 'ClassifierStacking' + 'RegressorAverage', + 'RegressorMedian', + 'RegressorStacking' ] diff --git a/src/python/nimbusml/ensemble/sub_model_selector/classifierbestperformanceselector.py b/src/python/nimbusml/ensemble/sub_model_selector/classifierbestperformanceselector.py index 09df140e..a20f68bc 100644 --- a/src/python/nimbusml/ensemble/sub_model_selector/classifierbestperformanceselector.py +++ b/src/python/nimbusml/ensemble/sub_model_selector/classifierbestperformanceselector.py @@ -58,9 +58,6 @@ class ClassifierBestPerformanceSelector(core): .. index:: models, ensemble, classification - Example: - .. literalinclude:: /../nimbusml/examples/EnsembleClassifier.py - :language: python """ @trace diff --git a/src/python/nimbusml/ensemble/sub_model_selector/regressorbestperformanceselector.py b/src/python/nimbusml/ensemble/sub_model_selector/regressorbestperformanceselector.py index 52505e02..3411f09b 100644 --- a/src/python/nimbusml/ensemble/sub_model_selector/regressorbestperformanceselector.py +++ b/src/python/nimbusml/ensemble/sub_model_selector/regressorbestperformanceselector.py @@ -54,11 +54,8 @@ class RegressorBestPerformanceSelector(core): ` - .. index:: models, ensemble, classification + .. index:: models, ensemble, regression - Example: - .. literalinclude:: /../nimbusml/examples/EnsembleClassifier.py - :language: python """ @trace diff --git a/src/python/nimbusml/internal/core/ensemble/ensembleclassifier.py b/src/python/nimbusml/internal/core/ensemble/ensembleclassifier.py index 13850fc8..083413f1 100644 --- a/src/python/nimbusml/internal/core/ensemble/ensembleclassifier.py +++ b/src/python/nimbusml/internal/core/ensemble/ensembleclassifier.py @@ -77,8 +77,7 @@ class EnsembleClassifier( ``"AccuracyMicro"``, ``"AccuracyMacro"``, ``"LogLoss"``, or ``"LogLossReduction"``. - - :output_combiner: indicates how to combine the predictions of the different + :param output_combiner: indicates how to combine the predictions of the different models into a single prediction. There are five available output combiners for clasification: @@ -97,8 +96,6 @@ class EnsembleClassifier( outputs of the trained models, weighted by the specified metric. The metric can be ``"AccuracyMicroAvg"`` or ``"AccuracyMacroAvg"``. - :param output_combiner: Output combiner. - :param normalize: Specifies the type of automatic normalization used: * ``"Auto"``: if normalization is needed, it is performed diff --git a/src/python/nimbusml/internal/core/ensemble/ensembleregressor.py b/src/python/nimbusml/internal/core/ensemble/ensembleregressor.py index 408b607d..cc0935c7 100644 --- a/src/python/nimbusml/internal/core/ensemble/ensembleregressor.py +++ b/src/python/nimbusml/internal/core/ensemble/ensembleregressor.py @@ -75,8 +75,7 @@ class EnsembleRegressor( can be ``"L1"``, ``"L2"``, ``"Rms"``, or ``"Loss"``, or ``"RSquared"``. - - :output_combiner: indicates how to combine the predictions of the different + :param output_combiner: indicates how to combine the predictions of the different models into a single prediction. There are five available output combiners for clasification: @@ -89,8 +88,6 @@ class EnsembleRegressor( of the different models on a training instance, and the instance's label. - :param output_combiner: Output combiner. - :param normalize: Specifies the type of automatic normalization used: * ``"Auto"``: if normalization is needed, it is performed @@ -164,7 +161,7 @@ class EnsembleRegressor( ` - .. index:: models, ensemble, classification + .. index:: models, ensemble, regression Example: .. literalinclude:: /../nimbusml/examples/EnsembleRegressor.py diff --git a/src/python/nimbusml/internal/core/ensemble/sub_model_selector/classifierbestperformanceselector.py b/src/python/nimbusml/internal/core/ensemble/sub_model_selector/classifierbestperformanceselector.py index baf96b79..214e740a 100644 --- a/src/python/nimbusml/internal/core/ensemble/sub_model_selector/classifierbestperformanceselector.py +++ b/src/python/nimbusml/internal/core/ensemble/sub_model_selector/classifierbestperformanceselector.py @@ -58,9 +58,6 @@ class ClassifierBestPerformanceSelector(Component): .. index:: models, ensemble, classification - Example: - .. literalinclude:: /../nimbusml/examples/EnsembleClassifier.py - :language: python """ @trace diff --git a/src/python/nimbusml/internal/core/ensemble/sub_model_selector/regressorbestperformanceselector.py b/src/python/nimbusml/internal/core/ensemble/sub_model_selector/regressorbestperformanceselector.py index 51b07c66..23919068 100644 --- a/src/python/nimbusml/internal/core/ensemble/sub_model_selector/regressorbestperformanceselector.py +++ b/src/python/nimbusml/internal/core/ensemble/sub_model_selector/regressorbestperformanceselector.py @@ -54,11 +54,8 @@ class RegressorBestPerformanceSelector(Component): ` - .. index:: models, ensemble, classification + .. index:: models, ensemble, regression - Example: - .. literalinclude:: /../nimbusml/examples/EnsembleClassifier.py - :language: python """ @trace diff --git a/src/python/nimbusml/internal/core/linear_model/linearsvmbinaryclassifier.py b/src/python/nimbusml/internal/core/linear_model/linearsvmbinaryclassifier.py index 54bff625..41996a33 100644 --- a/src/python/nimbusml/internal/core/linear_model/linearsvmbinaryclassifier.py +++ b/src/python/nimbusml/internal/core/linear_model/linearsvmbinaryclassifier.py @@ -26,11 +26,11 @@ class LinearSvmBinaryClassifier( .. remarks:: Linear SVM implements an algorithm that finds a hyperplane in the feature space for binary classification, by solving an SVM problem. - For instance, with feature values *f_0, f_1,..., f_{D-1}*, the + For instance, with feature values $f_0, f_1,..., f_{D-1}$, the prediction is given by determining what side of the hyperplane the point falls into. That is the same as the sign of the feautures' - weighted sum, i.e. *\sum_{i = 0}^{D-1} \left(w_i * f_i \right) + b*, - where *w_0, w_1,..., w_{D-1}* are the weights computed by the + weighted sum, i.e. $\sum_{i = 0}^{D-1} \left(w_i * f_i \right) + b$, + where $w_0, w_1,..., w_{D-1}$ are the weights computed by the algorithm, and *b* is the bias computed by the algorithm. This algorithm implemented is the PEGASOS method, which alternates diff --git a/src/python/nimbusml/linear_model/linearsvmbinaryclassifier.py b/src/python/nimbusml/linear_model/linearsvmbinaryclassifier.py index 30d1b927..4f900aea 100644 --- a/src/python/nimbusml/linear_model/linearsvmbinaryclassifier.py +++ b/src/python/nimbusml/linear_model/linearsvmbinaryclassifier.py @@ -29,11 +29,11 @@ class LinearSvmBinaryClassifier( .. remarks:: Linear SVM implements an algorithm that finds a hyperplane in the feature space for binary classification, by solving an SVM problem. - For instance, with feature values *f_0, f_1,..., f_{D-1}*, the + For instance, with feature values $f_0, f_1,..., f_{D-1}$, the prediction is given by determining what side of the hyperplane the point falls into. That is the same as the sign of the feautures' - weighted sum, i.e. *\sum_{i = 0}^{D-1} \left(w_i * f_i \right) + b*, - where *w_0, w_1,..., w_{D-1}* are the weights computed by the + weighted sum, i.e. $\sum_{i = 0}^{D-1} \left(w_i * f_i \right) + b$, + where $w_0, w_1,..., w_{D-1}$ are the weights computed by the algorithm, and *b* is the bias computed by the algorithm. This algorithm implemented is the PEGASOS method, which alternates