1454 lines
64 KiB
Ruby
Generated
1454 lines
64 KiB
Ruby
Generated
# typed: true
|
|
|
|
# DO NOT EDIT MANUALLY
|
|
# This is an autogenerated file for types exported from the `rumale-ensemble` gem.
|
|
# Please instead update this file by running `bin/tapioca gem rumale-ensemble`.
|
|
|
|
|
|
# Rumale is a machine learning library in Ruby.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/version.rb#4
|
|
module Rumale; end
|
|
|
|
# This module consists of the classes that implement ensemble-based methods.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/version.rb#6
|
|
module Rumale::Ensemble; end
|
|
|
|
# AdaBoostClassifier is a class that implements AdaBoost (SAMME.R) for classification.
|
|
# This class uses decision tree for a weak learner.
|
|
#
|
|
# *Reference*
|
|
# - Zhu, J., Rosset, S., Zou, H., and Hashie, T., "Multi-class AdaBoost," Technical Report No. 430, Department of Statistics, University of Michigan, 2005.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/ada_boost_classifier'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::AdaBoostClassifier.new(
|
|
# n_estimators: 10, criterion: 'gini', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_labels)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#26
|
|
class Rumale::Ensemble::AdaBoostClassifier < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Classifier
|
|
|
|
# Create a new classifier with AdaBoost.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of decision trees for contructing AdaBoost classifier.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, decision tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers all features.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [AdaBoostClassifier] a new instance of AdaBoostClassifier
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#58
|
|
def initialize(n_estimators: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the class labels.
|
|
#
|
|
# @return [Numo::Int32] (size: n_classes)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#35
|
|
def classes; end
|
|
|
|
# Calculate confidence scores for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to compute the scores.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Confidence score per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#136
|
|
def decision_function(x); end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<DecisionTreeClassifier>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#31
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#39
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [AdaBoostClassifier] The learned classifier itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#79
|
|
def fit(x, y); end
|
|
|
|
# Predict class labels for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples]) Predicted class label per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#153
|
|
def predict(x); end
|
|
|
|
# Predict probability for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probailities.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#165
|
|
def predict_proba(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_classifier.rb#43
|
|
def rng; end
|
|
end
|
|
|
|
# AdaBoostRegressor is a class that implements AdaBoost for regression.
|
|
# This class uses decision tree for a weak learner.
|
|
#
|
|
# *Reference*
|
|
# - Shrestha, D. L., and Solomatine, D. P., "Experiments with AdaBoost.RT, an Improved Boosting Scheme for Regression," Neural Computation 18 (7), pp. 1678--1710, 2006.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/ada_boost_regressor'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::AdaBoostRegressor.new(
|
|
# n_estimators: 10, criterion: 'mse', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_values)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#26
|
|
class Rumale::Ensemble::AdaBoostRegressor < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Regressor
|
|
|
|
# Create a new regressor with random forest.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of decision trees for contructing AdaBoost regressor.
|
|
# @param threshold [Float] The threshold for delimiting correct and incorrect predictions. That is constrained to [0, 1]
|
|
# @param exponent [Float] The exponent for the weight of each weak learner.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, decision tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers all features.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [AdaBoostRegressor] a new instance of AdaBoostRegressor
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#60
|
|
def initialize(n_estimators: T.unsafe(nil), threshold: T.unsafe(nil), exponent: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the weight for each weak learner.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_estimates)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#35
|
|
def estimator_weights; end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<DecisionTreeRegressor>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#31
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#39
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::DFloat] (shape: [n_samples]) The target values to be used for fitting the model.
|
|
# @return [AdaBoostRegressor] The learned regressor itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#83
|
|
def fit(x, y); end
|
|
|
|
# Predict values for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_outputs]) Predicted value per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#154
|
|
def predict(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/ada_boost_regressor.rb#43
|
|
def rng; end
|
|
end
|
|
|
|
# ExtraTreesClassifier is a class that implements extremely randomized trees for classification.
|
|
# The algorithm of extremely randomized trees is similar to random forest.
|
|
# The features of the algorithm of extremely randomized trees are
|
|
# not to apply the bagging procedure and to randomly select the threshold for splitting feature space.
|
|
#
|
|
# *Reference*
|
|
# - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/extra_trees_classifier'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::ExtraTreesClassifier.new(
|
|
# n_estimators: 10, criterion: 'gini', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_labels)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#26
|
|
class Rumale::Ensemble::ExtraTreesClassifier < ::Rumale::Ensemble::RandomForestClassifier
|
|
# Create a new classifier with extremely randomized trees.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of trees for contructing extremely randomized trees.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, extra tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on extra tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers 'Math.sqrt(n_features)' features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit method in parallel.
|
|
# If nil is given, the method does not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [ExtraTreesClassifier] a new instance of ExtraTreesClassifier
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#60
|
|
def initialize(n_estimators: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#123
|
|
def apply(x); end
|
|
|
|
# Return the class labels.
|
|
#
|
|
# @return [Numo::Int32] (size: n_classes)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#33
|
|
def classes; end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<ExtraTreeClassifier>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#29
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#37
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [ExtraTreesClassifier] The learned classifier itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#71
|
|
def fit(x, y); end
|
|
|
|
# Predict class labels for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples]) Predicted class label per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#103
|
|
def predict(x); end
|
|
|
|
# Predict probability for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probailities.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#113
|
|
def predict_proba(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#41
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_classifier.rb#131
|
|
def plant_tree(rnd_seed); end
|
|
end
|
|
|
|
# ExtraTreesRegressor is a class that implements extremely randomized trees for regression
|
|
# The algorithm of extremely randomized trees is similar to random forest.
|
|
# The features of the algorithm of extremely randomized trees are
|
|
# not to apply the bagging procedure and to randomly select the threshold for splitting feature space.
|
|
#
|
|
# *Reference*
|
|
# - Geurts, P., Ernst, D., and Wehenkel, L., "Extremely randomized trees," Machine Learning, vol. 63 (1), pp. 3--42, 2006.
|
|
#
|
|
# @example
|
|
# @require 'rumale/ensemble/extra_trees_regressor'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::ExtraTreesRegressor.new(
|
|
# n_estimators: 10, criterion: 'mse', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_values)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#26
|
|
class Rumale::Ensemble::ExtraTreesRegressor < ::Rumale::Ensemble::RandomForestRegressor
|
|
# Create a new regressor with extremely randomized trees.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of trees for contructing extremely randomized trees.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, extra tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on extra tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers 'Math.sqrt(n_features)' features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit and predict methods in parallel.
|
|
# If nil is given, the methods do not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [ExtraTreesRegressor] a new instance of ExtraTreesRegressor
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#56
|
|
def initialize(n_estimators: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to assign each leaf.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#108
|
|
def apply(x); end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<ExtraTreeRegressor>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#29
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#33
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::DFloat] (shape: [n_samples, n_outputs]) The target values to be used for fitting the model.
|
|
# @return [ExtraTreesRegressor] The learned regressor itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#67
|
|
def fit(x, y); end
|
|
|
|
# Predict values for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_outputs]) Predicted value per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#98
|
|
def predict(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#37
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/extra_trees_regressor.rb#116
|
|
def plant_tree(rnd_seed); end
|
|
end
|
|
|
|
# GradientBoostingClassifier is a class that implements gradient tree boosting for classification.
|
|
# The class use negative binomial log-likelihood for the loss function.
|
|
# For multiclass classification problem, it uses one-vs-the-rest strategy.
|
|
#
|
|
# *Reference*
|
|
# - Friedman, J H., "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
|
|
# - Friedman, J H., "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
|
|
# - Chen, T., and Guestrin, C., "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/gradient_boosting_classifier'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::GradientBoostingClassifier.new(
|
|
# n_estimators: 100, learning_rate: 0.3, reg_lambda: 0.001, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_values)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#29
|
|
class Rumale::Ensemble::GradientBoostingClassifier < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Classifier
|
|
|
|
# Create a new classifier with gradient tree boosting.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of trees for contructing classifier.
|
|
# @param learning_rate [Float] The boosting learining rate
|
|
# @param reg_lambda [Float] The L2 regularization term on weight.
|
|
# @param subsample [Float] The subsampling ratio of the training samples.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, decision tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers all features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit and predict methods in parallel.
|
|
# If nil is given, the methods do not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [GradientBoostingClassifier] a new instance of GradientBoostingClassifier
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#68
|
|
def initialize(n_estimators: T.unsafe(nil), learning_rate: T.unsafe(nil), reg_lambda: T.unsafe(nil), subsample: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators, n_classes]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#172
|
|
def apply(x); end
|
|
|
|
# Return the class labels.
|
|
#
|
|
# @return [Numo::Int32] (size: n_classes)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#38
|
|
def classes; end
|
|
|
|
# Calculate confidence scores for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to compute the scores.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Confidence score per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#127
|
|
def decision_function(x); end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<GradientTreeRegressor>] or [Array<Array<GradientTreeRegressor>>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#34
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
# The feature importances are calculated based on the numbers of times the feature is used for splitting.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#43
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [GradientBoostingClassifier] The learned classifier itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#92
|
|
def fit(x, y); end
|
|
|
|
# Predict class labels for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples]) Predicted class label per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#142
|
|
def predict(x); end
|
|
|
|
# Predict probability for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probailities.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#154
|
|
def predict_proba(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#47
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# for debug
|
|
#
|
|
# def loss(y_true, y_pred)
|
|
# # y_true in {-1, 1}
|
|
# Numo::NMath.log(1.0 + Numo::NMath.exp(-2.0 * y_true * y_pred)).mean
|
|
# end
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#220
|
|
def gradient(y_true, y_pred); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#225
|
|
def hessian(y_true, y_pred); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#239
|
|
def multiclass_base_predictions(y); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#257
|
|
def multiclass_estimators(x, y); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#272
|
|
def multiclass_feature_importances; end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#281
|
|
def multiclass_scores(x); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#186
|
|
def partial_fit(x, y, init_pred); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_classifier.rb#230
|
|
def plant_tree(sub_rng); end
|
|
end
|
|
|
|
# GradientBoostingRegressor is a class that implements gradient tree boosting for regression.
|
|
# The class use L2 loss for the loss function.
|
|
#
|
|
# *Reference*
|
|
# - Friedman, J H. "Greedy Function Approximation: A Gradient Boosting Machine," Annals of Statistics, 29 (5), pp. 1189--1232, 2001.
|
|
# - Friedman, J H. "Stochastic Gradient Boosting," Computational Statistics and Data Analysis, 38 (4), pp. 367--378, 2002.
|
|
# - Chen, T., and Guestrin, C., "XGBoost: A Scalable Tree Boosting System," Proc. KDD'16, pp. 785--794, 2016.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/gradient_boosting_regressor'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::GradientBoostingRegressor.new(
|
|
# n_estimators: 100, learning_rate: 0.3, reg_lambda: 0.001, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_values)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#28
|
|
class Rumale::Ensemble::GradientBoostingRegressor < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Regressor
|
|
|
|
# Create a new regressor with gradient tree boosting.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of trees for contructing regressor.
|
|
# @param learning_rate [Float] The boosting learining rate
|
|
# @param reg_lambda [Float] The L2 regularization term on weight.
|
|
# @param subsample [Float] The subsampling ratio of the training samples.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, decision tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers all features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit and predict methods in parallel.
|
|
# If nil is given, the methods do not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [GradientBoostingRegressor] a new instance of GradientBoostingRegressor
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#63
|
|
def initialize(n_estimators: T.unsafe(nil), learning_rate: T.unsafe(nil), reg_lambda: T.unsafe(nil), subsample: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#128
|
|
def apply(x); end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<GradientTreeRegressor>] or [Array<Array<GradientTreeRegressor>>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#33
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
# The feature importances are calculated based on the numbers of times the feature is used for splitting.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#38
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::DFloat] (shape: [n_samples]) The target values to be used for fitting the model.
|
|
# @return [GradientBoostingRegressor] The learned regressor itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#87
|
|
def fit(x, y); end
|
|
|
|
# Predict values for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::DFloat] (shape: [n_samples]) Predicted values per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#113
|
|
def predict(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#42
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# for debug
|
|
#
|
|
# def loss(y_true, y_pred)
|
|
# ((y_true - y_pred)**2).mean
|
|
# end
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#173
|
|
def gradient(y_true, y_pred); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#177
|
|
def hessian(n_samples); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#190
|
|
def multivar_estimators(x, y); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#199
|
|
def multivar_feature_importances; end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#208
|
|
def multivar_predict(x); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#140
|
|
def partial_fit(x, y, init_pred); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/gradient_boosting_regressor.rb#181
|
|
def plant_tree(sub_rng); end
|
|
end
|
|
|
|
# RandomForestClassifier is a class that implements random forest for classification.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/random_forest_classifier'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::RandomForestClassifier.new(
|
|
# n_estimators: 10, criterion: 'gini', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_labels)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#23
|
|
class Rumale::Ensemble::RandomForestClassifier < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Classifier
|
|
|
|
# Create a new classifier with random forest.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of decision trees for contructing random forest.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, decision tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers 'Math.sqrt(n_features)' features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit method in parallel.
|
|
# If nil is given, the method does not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [RandomForestClassifier] a new instance of RandomForestClassifier
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#59
|
|
def initialize(n_estimators: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#154
|
|
def apply(x); end
|
|
|
|
# Return the class labels.
|
|
#
|
|
# @return [Numo::Int32] (size: n_classes)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#32
|
|
def classes; end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<DecisionTreeClassifier>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#28
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#36
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [RandomForestClassifier] The learned classifier itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#81
|
|
def fit(x, y); end
|
|
|
|
# Predict class labels for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples]) Predicted class label per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#120
|
|
def predict(x); end
|
|
|
|
# Predict probability for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probailities.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#139
|
|
def predict_proba(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#40
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#162
|
|
def plant_tree(rnd_seed); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_classifier.rb#170
|
|
def predict_proba_tree(tree, x); end
|
|
end
|
|
|
|
# RandomForestRegressor is a class that implements random forest for regression
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/random_forest_regressor'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::RandomForestRegressor.new(
|
|
# n_estimators: 10, criterion: 'mse', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_values)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#22
|
|
class Rumale::Ensemble::RandomForestRegressor < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Regressor
|
|
|
|
# Create a new regressor with random forest.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of decision trees for contructing random forest.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, decision tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on decision tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers 'Math.sqrt(n_features)' features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit and predict methods in parallel.
|
|
# If nil is given, the methods do not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [RandomForestRegressor] a new instance of RandomForestRegressor
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#54
|
|
def initialize(n_estimators: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to assign each leaf.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#129
|
|
def apply(x); end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<DecisionTreeRegressor>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#27
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#31
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::DFloat] (shape: [n_samples, n_outputs]) The target values to be used for fitting the model.
|
|
# @return [RandomForestRegressor] The learned regressor itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#76
|
|
def fit(x, y); end
|
|
|
|
# Predict values for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_outputs]) Predicted value per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#115
|
|
def predict(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#35
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/random_forest_regressor.rb#137
|
|
def plant_tree(rnd_seed); end
|
|
end
|
|
|
|
# StackingClassifier is a class that implements classifier with stacking method.
|
|
#
|
|
# *Reference*
|
|
# - Zhou, Z-H., "Ensemble Methods - Foundations and Algorithms," CRC Press Taylor and Francis Group, Chapman and Hall/CRC, 2012.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/stacking_classifier'
|
|
#
|
|
# estimators = {
|
|
# lgr: Rumale::LinearModel::LogisticRegression.new(reg_param: 1e-2),
|
|
# mlp: Rumale::NeuralNetwork::MLPClassifier.new(hidden_units: [256], random_seed: 1),
|
|
# rnd: Rumale::Ensemble::RandomForestClassifier.new(random_seed: 1)
|
|
# }
|
|
# meta_estimator = Rumale::LinearModel::LogisticRegression.new
|
|
# classifier = Rumale::Ensemble::StackedClassifier.new(
|
|
# estimators: estimators, meta_estimator: meta_estimator, random_seed: 1
|
|
# )
|
|
# classifier.fit(training_samples, training_labels)
|
|
# results = classifier.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#31
|
|
class Rumale::Ensemble::StackingClassifier < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Classifier
|
|
|
|
# Create a new classifier with stacking method.
|
|
#
|
|
# @param estimators [Hash<Symbol,Classifier>] The base classifiers for extracting meta features.
|
|
# @param meta_estimator [Classifier/Nil] The meta classifier that predicts class label.
|
|
# If nil is given, LogisticRegression is used.
|
|
# @param n_splits [Integer] The number of folds for cross validation with stratified k-fold on meta feature extraction in training phase.
|
|
# @param shuffle [Boolean] The flag indicating whether to shuffle the dataset on cross validation.
|
|
# @param stack_method [String] The method name of base classifier for using meta feature extraction.
|
|
# If 'auto' is given, it searches the callable method in the order 'predict_proba', 'decision_function', and 'predict'
|
|
# on each classifier.
|
|
# @param passthrough [Boolean] The flag indicating whether to concatenate the original features and meta features when training the meta classifier.
|
|
# @param random_seed [Integer/Nil] The seed value using to initialize the random generator on cross validation.
|
|
# @return [StackingClassifier] a new instance of StackingClassifier
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#62
|
|
def initialize(estimators:, meta_estimator: T.unsafe(nil), n_splits: T.unsafe(nil), shuffle: T.unsafe(nil), stack_method: T.unsafe(nil), passthrough: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the class labels.
|
|
#
|
|
# @return [Numo::Int32] (size: n_classes)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#44
|
|
def classes; end
|
|
|
|
# Calculate confidence scores for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to compute the scores.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) The confidence score per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#134
|
|
def decision_function(x); end
|
|
|
|
# Return the base classifiers.
|
|
#
|
|
# @return [Hash<Symbol,Classifier>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#36
|
|
def estimators; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [StackedClassifier] The learned classifier itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#81
|
|
def fit(x, y); end
|
|
|
|
# Fit the model with training data, and then transform them with the learned model.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_components]) The meta features for training data.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#189
|
|
def fit_transform(x, y); end
|
|
|
|
# Return the meta classifier.
|
|
#
|
|
# @return [Classifier]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#40
|
|
def meta_estimator; end
|
|
|
|
# Predict class labels for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples]) The predicted class label per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#145
|
|
def predict(x); end
|
|
|
|
# Predict probability for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probabilities.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) The predicted probability of each class per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#156
|
|
def predict_proba(x); end
|
|
|
|
# Return the method used by each base classifier.
|
|
#
|
|
# @return [Hash<Symbol,Symbol>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#48
|
|
def stack_method; end
|
|
|
|
# Transform the given data with the learned model.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to be transformed with the learned model.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_components]) The meta features for samples.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#167
|
|
def transform(x); end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#215
|
|
def detect_output_size(n_features); end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#203
|
|
def detect_stack_method; end
|
|
end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_classifier.rb#199
|
|
Rumale::Ensemble::StackingClassifier::STACK_METHODS = T.let(T.unsafe(nil), Array)
|
|
|
|
# StackingRegressor is a class that implements regressor with stacking method.
|
|
#
|
|
# *Reference*
|
|
# - Zhou, Z-H., "Ensemble Methods - Foundations and Algorithms," CRC Press Taylor and Francis Group, Chapman and Hall/CRC, 2012.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/stacking_regressor'
|
|
#
|
|
# estimators = {
|
|
# las: Rumale::LinearModel::Lasso.new(reg_param: 1e-2, random_seed: 1),
|
|
# mlp: Rumale::NeuralNetwork::MLPRegressor.new(hidden_units: [256], random_seed: 1),
|
|
# rnd: Rumale::Ensemble::RandomForestRegressor.new(random_seed: 1)
|
|
# }
|
|
# meta_estimator = Rumale::LinearModel::Ridge.new
|
|
# regressor = Rumale::Ensemble::StackedRegressor.new(
|
|
# estimators: estimators, meta_estimator: meta_estimator, random_seed: 1
|
|
# )
|
|
# regressor.fit(training_samples, training_values)
|
|
# results = regressor.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#30
|
|
class Rumale::Ensemble::StackingRegressor < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Regressor
|
|
|
|
# Create a new regressor with stacking method.
|
|
#
|
|
# @param estimators [Hash<Symbol,Regressor>] The base regressors for extracting meta features.
|
|
# @param meta_estimator [Regressor/Nil] The meta regressor that predicts values.
|
|
# If nil is given, Ridge is used.
|
|
# @param n_splits [Integer] The number of folds for cross validation with k-fold on meta feature extraction in training phase.
|
|
# @param shuffle [Boolean] The flag indicating whether to shuffle the dataset on cross validation.
|
|
# @param passthrough [Boolean] The flag indicating whether to concatenate the original features and meta features when training the meta regressor.
|
|
# @param random_seed [Integer/Nil] The seed value using to initialize the random generator on cross validation.
|
|
# @return [StackingRegressor] a new instance of StackingRegressor
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#50
|
|
def initialize(estimators:, meta_estimator: T.unsafe(nil), n_splits: T.unsafe(nil), shuffle: T.unsafe(nil), passthrough: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the base regressors.
|
|
#
|
|
# @return [Hash<Symbol,Regressor>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#35
|
|
def estimators; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::DFloat] (shape: [n_samples, n_outputs]) The target variables to be used for fitting the model.
|
|
# @return [StackedRegressor] The learned regressor itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#67
|
|
def fit(x, y); end
|
|
|
|
# Fit the model with training data, and then transform them with the learned model.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::DFloat] (shape: [n_samples, n_outputs]) The target variables to be used for fitting the model.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_components]) The meta features for training data.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#149
|
|
def fit_transform(x, y); end
|
|
|
|
# Return the meta regressor.
|
|
#
|
|
# @return [Regressor]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#39
|
|
def meta_estimator; end
|
|
|
|
# Predict values for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_outputs]) The predicted values per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#116
|
|
def predict(x); end
|
|
|
|
# Transform the given data with the learned model.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to be transformed with the learned model.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_components]) The meta features for samples.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#127
|
|
def transform(x); end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/stacking_regressor.rb#159
|
|
def detect_output_size(n_features); end
|
|
end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/version.rb#8
|
|
Rumale::Ensemble::VERSION = T.let(T.unsafe(nil), String)
|
|
|
|
# VRTreesClassifier is a class that implements variable-random (VR) trees for classification.
|
|
#
|
|
# *Reference*
|
|
# - Liu, F. T., Ting, K. M., Yu, Y., and Zhou, Z. H., "Spectrum of Variable-Random Trees," Journal of Artificial Intelligence Research, vol. 32, pp. 355--384, 2008.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/vr_trees_classifier'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::VRTreesClassifier.new(
|
|
# n_estimators: 10, criterion: 'gini', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_labels)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#23
|
|
class Rumale::Ensemble::VRTreesClassifier < ::Rumale::Ensemble::RandomForestClassifier
|
|
# Create a new classifier with variable-random trees.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of trees for contructing variable-random trees.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, variable-random tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on variable-random tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers 'n_features' features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit method in parallel.
|
|
# If nil is given, the method does not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [VRTreesClassifier] a new instance of VRTreesClassifier
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#57
|
|
def initialize(n_estimators: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#122
|
|
def apply(x); end
|
|
|
|
# Return the class labels.
|
|
#
|
|
# @return [Numo::Int32] (size: n_classes)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#30
|
|
def classes; end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<VRTreeClassifier>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#26
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#34
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [VRTreesClassifier] The learned classifier itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#68
|
|
def fit(x, y); end
|
|
|
|
# Predict class labels for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples]) Predicted class label per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#102
|
|
def predict(x); end
|
|
|
|
# Predict probability for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probailities.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#112
|
|
def predict_proba(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#38
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_classifier.rb#130
|
|
def plant_tree(alpha, rnd_seed); end
|
|
end
|
|
|
|
# VRTreesRegressor is a class that implements variable-random (VR) trees for regression
|
|
#
|
|
# *Reference*
|
|
# - Liu, F. T., Ting, K. M., Yu, Y., and Zhou, Z. H., "Spectrum of Variable-Random Trees," Journal of Artificial Intelligence Research, vol. 32, pp. 355--384, 2008.
|
|
#
|
|
# @example
|
|
# @require 'rumale/ensemble/vr_trees_regressor'
|
|
#
|
|
# estimator =
|
|
# Rumale::Ensemble::VRTreesRegressor.new(
|
|
# n_estimators: 10, criterion: 'mse', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
|
|
# estimator.fit(training_samples, traininig_values)
|
|
# results = estimator.predict(testing_samples)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#23
|
|
class Rumale::Ensemble::VRTreesRegressor < ::Rumale::Ensemble::RandomForestRegressor
|
|
# Create a new regressor with variable-random trees.
|
|
#
|
|
# @param n_estimators [Integer] The numeber of trees for contructing variable-random trees.
|
|
# @param criterion [String] The function to evalue spliting point. Supported criteria are 'gini' and 'entropy'.
|
|
# @param max_depth [Integer] The maximum depth of the tree.
|
|
# If nil is given, variable-random tree grows without concern for depth.
|
|
# @param max_leaf_nodes [Integer] The maximum number of leaves on variable-random tree.
|
|
# If nil is given, number of leaves is not limited.
|
|
# @param min_samples_leaf [Integer] The minimum number of samples at a leaf node.
|
|
# @param max_features [Integer] The number of features to consider when searching optimal split point.
|
|
# If nil is given, split process considers 'n_features' features.
|
|
# @param n_jobs [Integer] The number of jobs for running the fit and predict methods in parallel.
|
|
# If nil is given, the methods do not execute in parallel.
|
|
# If zero or less is given, it becomes equal to the number of processors.
|
|
# This parameter is ignored if the Parallel gem is not loaded.
|
|
# @param random_seed [Integer] The seed value using to initialize the random generator.
|
|
# It is used to randomly determine the order of features when deciding spliting point.
|
|
# @return [VRTreesRegressor] a new instance of VRTreesRegressor
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#53
|
|
def initialize(n_estimators: T.unsafe(nil), criterion: T.unsafe(nil), max_depth: T.unsafe(nil), max_leaf_nodes: T.unsafe(nil), min_samples_leaf: T.unsafe(nil), max_features: T.unsafe(nil), n_jobs: T.unsafe(nil), random_seed: T.unsafe(nil)); end
|
|
|
|
# Return the index of the leaf that each sample reached.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to assign each leaf.
|
|
# @return [Numo::Int32] (shape: [n_samples, n_estimators]) Leaf index for sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#107
|
|
def apply(x); end
|
|
|
|
# Return the set of estimators.
|
|
#
|
|
# @return [Array<VRTreeRegressor>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#26
|
|
def estimators; end
|
|
|
|
# Return the importance for each feature.
|
|
#
|
|
# @return [Numo::DFloat] (size: n_features)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#30
|
|
def feature_importances; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::DFloat] (shape: [n_samples, n_outputs]) The target values to be used for fitting the model.
|
|
# @return [VRTreesRegressor] The learned regressor itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#64
|
|
def fit(x, y); end
|
|
|
|
# Predict values for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_outputs]) Predicted value per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#97
|
|
def predict(x); end
|
|
|
|
# Return the random generator for random selection of feature index.
|
|
#
|
|
# @return [Random]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#34
|
|
def rng; end
|
|
|
|
private
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/vr_trees_regressor.rb#115
|
|
def plant_tree(alpha, rnd_seed); end
|
|
end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/value.rb#6
|
|
module Rumale::Ensemble::Value; end
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/value.rb#8
|
|
Rumale::Ensemble::Value::N_BITS = T.let(T.unsafe(nil), Integer)
|
|
|
|
# source://rumale-ensemble//lib/rumale/ensemble/value.rb#10
|
|
Rumale::Ensemble::Value::SEED_BASE = T.let(T.unsafe(nil), Integer)
|
|
|
|
# VotingClassifier is a class that implements classifier with voting ensemble method.
|
|
#
|
|
# *Reference*
|
|
# - Zhou, Z-H., "Ensemble Methods - Foundations and Algorithms," CRC Press Taylor and Francis Group, Chapman and Hall/CRC, 2012.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/voting_classifier'
|
|
#
|
|
# estimators = {
|
|
# lgr: Rumale::LinearModel::LogisticRegression.new(reg_param: 1e-2),
|
|
# mlp: Rumale::NeuralNetwork::MLPClassifier.new(hidden_units: [256], random_seed: 1),
|
|
# rnd: Rumale::Ensemble::RandomForestClassifier.new(random_seed: 1)
|
|
# }
|
|
# weights = { lgr: 0.2, mlp: 0.3, rnd: 0.5 }
|
|
#
|
|
# classifier = Rumale::Ensemble::VotingClassifier.new(estimators: estimators, weights: weights, voting: 'soft')
|
|
# classifier.fit(x_train, y_train)
|
|
# results = classifier.predict(x_test)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#28
|
|
class Rumale::Ensemble::VotingClassifier < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Classifier
|
|
|
|
# Create a new ensembled classifier with voting rule.
|
|
#
|
|
# @param estimators [Hash<Symbol,Classifier>] The sub-classifiers to vote.
|
|
# @param weights [Hash<Symbol,Float>] The weight value for each classifier.
|
|
# @param voting [String] The voting rule for the predicted results of each classifier.
|
|
# If 'hard' is given, the ensembled classifier predicts the class label by majority vote.
|
|
# If 'soft' is given, the ensembled classifier uses the weighted average of predicted probabilities for the prediction.
|
|
# @return [VotingClassifier] a new instance of VotingClassifier
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#46
|
|
def initialize(estimators:, weights: T.unsafe(nil), voting: T.unsafe(nil)); end
|
|
|
|
# Return the class labels.
|
|
#
|
|
# @return [Numo::Int32] (size: n_classes)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#37
|
|
def classes; end
|
|
|
|
# Calculate confidence scores for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to compute the scores.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) The confidence score per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#77
|
|
def decision_function(x); end
|
|
|
|
# Return the sub-classifiers that voted.
|
|
#
|
|
# @return [Hash<Symbol,Classifier>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#33
|
|
def estimators; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [VotingClassifier] The learned classifier itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#60
|
|
def fit(x, y); end
|
|
|
|
# Predict class labels for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the labels.
|
|
# @return [Numo::Int32] (shape: [n_samples]) The predicted class label per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#95
|
|
def predict(x); end
|
|
|
|
# Predict probability for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the probabilities.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_classes]) Predicted probability of each class per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#109
|
|
def predict_proba(x); end
|
|
|
|
private
|
|
|
|
# @return [Boolean]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_classifier.rb#124
|
|
def soft_voting?; end
|
|
end
|
|
|
|
# VotingRegressor is a class that implements regressor with voting ensemble method.
|
|
#
|
|
# *Reference*
|
|
# - Zhou, Z-H., "Ensemble Methods - Foundations and Algorithms," CRC Press Taylor and Francis Group, Chapman and Hall/CRC, 2012.
|
|
#
|
|
# @example
|
|
# require 'rumale/ensemble/voting_regressor'
|
|
#
|
|
# estimators = {
|
|
# rdg: Rumale::LinearModel::Ridge.new(reg_param: 0.1),
|
|
# mlp: Rumale::NeuralNetwork::MLPRegressor.new(hidden_units: [256], random_seed: 1),
|
|
# rnd: Rumale::Ensemble::RandomForestRegressor.new(random_seed: 1)
|
|
# }
|
|
# weights = { rdg: 0.2, mlp: 0.3, rnd: 0.5 }
|
|
#
|
|
# regressor = Rumale::Ensemble::VotingRegressor.new(estimators: estimators, weights: weights, voting: 'soft')
|
|
# regressor.fit(x_train, y_train)
|
|
# results = regressor.predict(x_test)
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_regressor.rb#27
|
|
class Rumale::Ensemble::VotingRegressor < ::Rumale::Base::Estimator
|
|
include ::Rumale::Base::Regressor
|
|
|
|
# Create a new ensembled regressor with voting rule.
|
|
#
|
|
# @param estimators [Hash<Symbol,Regressor>] The sub-regressors to vote.
|
|
# @param weights [Hash<Symbol,Float>] The weight value for each regressor.
|
|
# @return [VotingRegressor] a new instance of VotingRegressor
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_regressor.rb#38
|
|
def initialize(estimators:, weights: T.unsafe(nil)); end
|
|
|
|
# Return the sub-regressors that voted.
|
|
#
|
|
# @return [Hash<Symbol,Regressor>]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_regressor.rb#32
|
|
def estimators; end
|
|
|
|
# Fit the model with given training data.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The training data to be used for fitting the model.
|
|
# @param y [Numo::Int32] (shape: [n_samples]) The labels to be used for fitting the model.
|
|
# @return [VotingRegressor] The learned regressor itself.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_regressor.rb#51
|
|
def fit(x, y); end
|
|
|
|
# Predict values for samples.
|
|
#
|
|
# @param x [Numo::DFloat] (shape: [n_samples, n_features]) The samples to predict the values.
|
|
# @return [Numo::DFloat] (shape: [n_samples, n_outputs]) Predicted value per sample.
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_regressor.rb#66
|
|
def predict(x); end
|
|
|
|
private
|
|
|
|
# @return [Boolean]
|
|
#
|
|
# source://rumale-ensemble//lib/rumale/ensemble/voting_regressor.rb#79
|
|
def single_target?; end
|
|
end
|