Class: Rumale::Ensemble::RandomForestRegressor

Inherits:
Base::Estimator show all
Includes:
Base::Regressor
Defined in:
rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb

Overview

RandomForestRegressor is a class that implements random forest for regression

Examples:

require 'rumale/ensemble/random_forest_regressor'

estimator =
  Rumale::Ensemble::RandomForestRegressor.new(
    n_estimators: 10, criterion: 'mse', max_depth: 3, max_leaf_nodes: 10, min_samples_leaf: 5, random_seed: 1)
estimator.fit(training_samples, traininig_values)
results = estimator.predict(testing_samples)

Direct Known Subclasses

ExtraTreesRegressor

Instance Attribute Summary collapse

Attributes inherited from Base::Estimator

#params

Instance Method Summary collapse

Methods included from Base::Regressor

#score

Constructor Details

#initialize(n_estimators: 10, criterion: 'mse', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1, max_features: nil, n_jobs: nil, random_seed: nil) ⇒ RandomForestRegressor

Create a new regressor with random forest.

Parameters:

  • n_estimators (Integer) (defaults to: 10)

    The numeber of decision trees for contructing random forest.

  • criterion (String) (defaults to: 'mse')

    The function to evalue spliting point. Supported criteria are ‘gini’ and ‘entropy’.

  • max_depth (Integer) (defaults to: nil)

    The maximum depth of the tree. If nil is given, decision tree grows without concern for depth.

  • max_leaf_nodes (Integer) (defaults to: nil)

    The maximum number of leaves on decision tree. If nil is given, number of leaves is not limited.

  • min_samples_leaf (Integer) (defaults to: 1)

    The minimum number of samples at a leaf node.

  • max_features (Integer) (defaults to: nil)

    The number of features to consider when searching optimal split point. If nil is given, split process considers ‘Math.sqrt(n_features)’ features.

  • n_jobs (Integer) (defaults to: nil)

    The number of jobs for running the fit and predict methods in parallel. If nil is given, the methods do not execute in parallel. If zero or less is given, it becomes equal to the number of processors. This parameter is ignored if the Parallel gem is not loaded.

  • random_seed (Integer) (defaults to: nil)

    The seed value using to initialize the random generator. It is used to randomly determine the order of features when deciding spliting point.



54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
# File 'rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb', line 54

def initialize(n_estimators: 10,
               criterion: 'mse', max_depth: nil, max_leaf_nodes: nil, min_samples_leaf: 1,
               max_features: nil, n_jobs: nil, random_seed: nil)
  super()
  @params = {
    n_estimators: n_estimators,
    criterion: criterion,
    max_depth: max_depth,
    max_leaf_nodes: max_leaf_nodes,
    min_samples_leaf: min_samples_leaf,
    max_features: max_features,
    n_jobs: n_jobs,
    random_seed: random_seed || srand
  }
  @rng = Random.new(@params[:random_seed])
end

Instance Attribute Details

#estimatorsArray<DecisionTreeRegressor> (readonly)

Return the set of estimators.

Returns:

  • (Array<DecisionTreeRegressor>)


27
28
29
# File 'rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb', line 27

def estimators
  @estimators
end

#feature_importancesNumo::DFloat (readonly)

Return the importance for each feature.

Returns:

  • (Numo::DFloat)

    (size: n_features)



31
32
33
# File 'rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb', line 31

def feature_importances
  @feature_importances
end

#rngRandom (readonly)

Return the random generator for random selection of feature index.

Returns:

  • (Random)


35
36
37
# File 'rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb', line 35

def rng
  @rng
end

Instance Method Details

#apply(x) ⇒ Numo::Int32

Return the index of the leaf that each sample reached.

Parameters:

  • x (Numo::DFloat)

    (shape: [n_samples, n_features]) The samples to assign each leaf.

Returns:

  • (Numo::Int32)

    (shape: [n_samples, n_estimators]) Leaf index for sample.



129
130
131
132
133
# File 'rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb', line 129

def apply(x)
  x = ::Rumale::Validation.check_convert_sample_array(x)

  Numo::Int32[*Array.new(@params[:n_estimators]) { |n| @estimators[n].apply(x) }].transpose.dup
end

#fit(x, y) ⇒ RandomForestRegressor

Fit the model with given training data.

Parameters:

  • x (Numo::DFloat)

    (shape: [n_samples, n_features]) The training data to be used for fitting the model.

  • y (Numo::DFloat)

    (shape: [n_samples, n_outputs]) The target values to be used for fitting the model.

Returns:



76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb', line 76

def fit(x, y)
  x = ::Rumale::Validation.check_convert_sample_array(x)
  y = ::Rumale::Validation.check_convert_target_value_array(y)
  ::Rumale::Validation.check_sample_size(x, y)

  # Initialize some variables.
  n_samples, n_features = x.shape
  @params[:max_features] = Math.sqrt(n_features).to_i if @params[:max_features].nil?
  @params[:max_features] = [[1, @params[:max_features]].max, n_features].min # rubocop:disable Style/ComparableClamp
  single_target = y.shape[1].nil?
  sub_rng = @rng.dup
  rngs = Array.new(@params[:n_estimators]) { Random.new(sub_rng.rand(::Rumale::Ensemble::Value::SEED_BASE)) }
  # Construct forest.
  @estimators =
    if enable_parallel?
      parallel_map(@params[:n_estimators]) do |n|
        bootstrap_ids = Array.new(n_samples) { rngs[n].rand(0...n_samples) }
        plant_tree(rngs[n].seed).fit(x[bootstrap_ids, true], single_target ? y[bootstrap_ids] : y[bootstrap_ids, true])
      end
    else
      Array.new(@params[:n_estimators]) do |n|
        bootstrap_ids = Array.new(n_samples) { rngs[n].rand(0...n_samples) }
        plant_tree(rngs[n].seed).fit(x[bootstrap_ids, true], single_target ? y[bootstrap_ids] : y[bootstrap_ids, true])
      end
    end
  @feature_importances =
    if enable_parallel?
      parallel_map(@params[:n_estimators]) { |n| @estimators[n].feature_importances }.sum
    else
      @estimators.sum(&:feature_importances)
    end
  @feature_importances /= @feature_importances.sum
  self
end

#predict(x) ⇒ Numo::DFloat

Predict values for samples.

Parameters:

  • x (Numo::DFloat)

    (shape: [n_samples, n_features]) The samples to predict the values.

Returns:

  • (Numo::DFloat)

    (shape: [n_samples, n_outputs]) Predicted value per sample.



115
116
117
118
119
120
121
122
123
# File 'rumale-ensemble/lib/rumale/ensemble/random_forest_regressor.rb', line 115

def predict(x)
  x = ::Rumale::Validation.check_convert_sample_array(x)

  if enable_parallel?
    parallel_map(@params[:n_estimators]) { |n| @estimators[n].predict(x) }.sum / @params[:n_estimators]
  else
    @estimators.sum { |tree| tree.predict(x) } / @params[:n_estimators]
  end
end