Module: Rumale::EvaluationMeasure
- Defined in:
- rumale-evaluation_measure/lib/rumale/evaluation_measure/recall.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/purity.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/f_score.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/roc_auc.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/version.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/accuracy.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/function.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/log_loss.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/r2_score.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/precision.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/precision_recall.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/silhouette_score.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/mean_squared_error.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/mutual_information.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/adjusted_rand_score.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/mean_absolute_error.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/davies_bouldin_score.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/median_absolute_error.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/mean_squared_log_error.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/calinski_harabasz_score.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/explained_variance_score.rb,
rumale-evaluation_measure/lib/rumale/evaluation_measure/normalized_mutual_information.rb
Overview
This module consists of the classes for model evaluation.
Defined Under Namespace
Classes: Accuracy, AdjustedRandScore, CalinskiHarabaszScore, DaviesBouldinScore, ExplainedVarianceScore, FScore, LogLoss, MeanAbsoluteError, MeanSquaredError, MeanSquaredLogError, MedianAbsoluteError, MutualInformation, NormalizedMutualInformation, Precision, Purity, R2Score, ROCAUC, Recall, SilhouetteScore
Class Method Summary collapse
-
.classification_report(y_true, y_pred, target_name: nil, output_hash: false) ⇒ String/Hash
Output a summary of classification performance for each class.
-
.confusion_matrix(y_true, y_pred) ⇒ Numo::Int32
Calculate confusion matrix for evaluating classification performance.
Class Method Details
.classification_report(y_true, y_pred, target_name: nil, output_hash: false) ⇒ String/Hash
Output a summary of classification performance for each class.
69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
# File 'rumale-evaluation_measure/lib/rumale/evaluation_measure/function.rb', line 69 def classification_report(y_true, y_pred, target_name: nil, output_hash: false) # rubocop:disable Metrics/MethodLength, Metrics/AbcSize # calculate each evaluation measure. classes = y_true.to_a.uniq.sort supports = Numo::Int32.asarray(classes.map { |label| y_true.eq(label).count }) precisions = Rumale::EvaluationMeasure::PrecisionRecall.precision_each_class(y_true, y_pred) recalls = Rumale::EvaluationMeasure::PrecisionRecall.recall_each_class(y_true, y_pred) fscores = Rumale::EvaluationMeasure::PrecisionRecall.f_score_each_class(y_true, y_pred) macro_precision = Rumale::EvaluationMeasure::PrecisionRecall.macro_average_precision(y_true, y_pred) macro_recall = Rumale::EvaluationMeasure::PrecisionRecall.macro_average_recall(y_true, y_pred) macro_fscore = Rumale::EvaluationMeasure::PrecisionRecall.macro_average_f_score(y_true, y_pred) accuracy = Rumale::EvaluationMeasure::Accuracy.new.score(y_true, y_pred) sum_supports = supports.sum weights = Numo::DFloat.cast(supports) / sum_supports weighted_precision = (Numo::DFloat.cast(precisions) * weights).sum weighted_recall = (Numo::DFloat.cast(recalls) * weights).sum weighted_fscore = (Numo::DFloat.cast(fscores) * weights).sum # output reults. target_name ||= classes target_name.map!(&:to_s) if output_hash res = {} target_name.each_with_index do |label, n| res[label] = { precision: precisions[n], recall: recalls[n], fscore: fscores[n], support: supports[n] } end res[:accuracy] = accuracy res[:macro_avg] = { precision: macro_precision, recall: macro_recall, fscore: macro_fscore, support: sum_supports } res[:weighted_avg] = { precision: weighted_precision, recall: weighted_recall, fscore: weighted_fscore, support: sum_supports } else width = [12, target_name.map(&:size).max].max # 12 is 'weighted avg'.size res = +'' res << "#{' ' * width} precision recall f1-score support\n" res << "\n" target_name.each_with_index do |label, n| label_str = format("%##{width}s", label) precision_str = format('%#10s', format('%.2f', precisions[n])) recall_str = format('%#10s', format('%.2f', recalls[n])) fscore_str = format('%#10s', format('%.2f', fscores[n])) supports_str = format('%#10s', supports[n]) res << "#{label_str} #{precision_str}#{recall_str}#{fscore_str}#{supports_str}\n" end res << "\n" supports_str = format('%#10s', sum_supports) accuracy_str = format('%#30s', format('%.2f', accuracy)) res << format("%##{width}s ", 'accuracy') res << "#{accuracy_str}#{supports_str}\n" precision_str = format('%#10s', format('%.2f', macro_precision)) recall_str = format('%#10s', format('%.2f', macro_recall)) fscore_str = format('%#10s', format('%.2f', macro_fscore)) res << format("%##{width}s ", 'macro avg') res << "#{precision_str}#{recall_str}#{fscore_str}#{supports_str}\n" precision_str = format('%#10s', format('%.2f', weighted_precision)) recall_str = format('%#10s', format('%.2f', weighted_recall)) fscore_str = format('%#10s', format('%.2f', weighted_fscore)) res << format("%##{width}s ", 'weighted avg') res << "#{precision_str}#{recall_str}#{fscore_str}#{supports_str}\n" end res end |
.confusion_matrix(y_true, y_pred) ⇒ Numo::Int32
Calculate confusion matrix for evaluating classification performance.
30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 |
# File 'rumale-evaluation_measure/lib/rumale/evaluation_measure/function.rb', line 30 def confusion_matrix(y_true, y_pred) labels = y_true.to_a.uniq.sort n_labels = labels.size conf_mat = Numo::Int32.zeros(n_labels, n_labels) labels.each_with_index do |lbl_a, i| y_p = y_pred[y_true.eq(lbl_a)] labels.each_with_index do |lbl_b, j| conf_mat[i, j] = y_p.eq(lbl_b).count end end conf_mat end |