summaryrefslogtreecommitdiff
path: root/report/report_factory.rb
diff options
context:
space:
mode:
Diffstat (limited to 'report/report_factory.rb')
-rwxr-xr-xreport/report_factory.rb23
1 files changed, 16 insertions, 7 deletions
diff --git a/report/report_factory.rb b/report/report_factory.rb
index f73ffd9..b67fbf1 100755
--- a/report/report_factory.rb
+++ b/report/report_factory.rb
@@ -14,7 +14,7 @@ VAL_ATTR_REGR = [ :num_instances, :num_unpredicted, :root_mean_squared_error,
#VAL_ATTR_BOX_PLOT_CLASS = [ :accuracy, :average_area_under_roc,
# :area_under_roc, :f_measure, :true_positive_rate, :true_negative_rate ]
VAL_ATTR_BOX_PLOT_CLASS = [ :accuracy, :area_under_roc, :f_measure, :true_positive_rate, :true_negative_rate, :positive_predictive_value, :negative_predictive_value ]
-VAL_ATTR_BOX_PLOT_REGR = [ :root_mean_squared_error, :mean_absolute_error, :r_square ]
+VAL_ATTR_BOX_PLOT_REGR = [ :root_mean_squared_error, :mean_absolute_error, :r_square, :concordance_correlation_coefficient ]
VAL_ATTR_TTEST_REGR = [ :r_square, :root_mean_squared_error ]
VAL_ATTR_TTEST_CLASS = [ :accuracy, :average_area_under_roc ]
@@ -61,7 +61,7 @@ module Reports::ReportFactory
i = 0
task_step = 100 / validation_set.size.to_f
validation_set.validations.each do |v|
- v.get_predictions( OpenTox::SubTask.create(task, i*task_step, (i+1)*task_step ) )
+ v.get_predictions( OpenTox::SubTask.create(task, i*task_step, [(i+1)*task_step,100].min ) )
i += 1
end
end
@@ -299,7 +299,7 @@ module Reports::ReportFactory
report.add_result(merged,result_attributes,res_titel,res_titel,res_text)
# pending: regression stats have different scales!!!
report.add_box_plot(set, :identifier, box_plot_attributes)
- report.add_paired_ttest_tables(set, :identifier, ttest_attributes, ttest_significance) if ttest_significance>0
+ report.add_ttest_tables(set, :identifier, ttest_attributes, ttest_significance) if ttest_significance>0
report.end_section
end
task.progress(100) if task
@@ -314,7 +314,7 @@ module Reports::ReportFactory
validation_set.get_values(:identifier).inspect) if validation_set.num_different_values(:identifier)<2
#validation_set.load_cv_attributes
- pre_load_predictions( validation_set, OpenTox::SubTask.create(task,0,80) )
+ #pre_load_predictions( validation_set, OpenTox::SubTask.create(task,0,80) )
report = Reports::ReportContent.new("Method comparison report")
add_filter_warning(report, validation_set.filter_params) if validation_set.filter_params!=nil
@@ -322,24 +322,33 @@ module Reports::ReportFactory
case validation_set.unique_feature_type
when "classification"
result_attributes += VAL_ATTR_CLASS
+ ttest_attributes = VAL_ATTR_TTEST_CLASS
box_plot_attributes = VAL_ATTR_BOX_PLOT_CLASS
else
result_attributes += VAL_ATTR_REGR
+ ttest_attributes = VAL_ATTR_TTEST_REGR
box_plot_attributes = VAL_ATTR_BOX_PLOT_REGR
end
merged = validation_set.merge([:identifier])
merged.sort(:identifier)
-
merged.validations.each do |v|
v.validation_uri = v.validation_uri.split(";").uniq.join(" ")
v.validation_report_uri = v.validation_report_uri.split(";").uniq.join(" ") if v.validation_report_uri
end
-
msg = merged.validations.collect{|v| v.identifier+" ("+Lib::MergeObjects.merge_count(v).to_s+"x)"}.join(", ")
report.add_result(merged,result_attributes,"Average Results","Results",msg)
-
report.add_box_plot(validation_set, :identifier, box_plot_attributes)
+ if params[:ttest_attributes] and params[:ttest_attributes].chomp.size>0
+ ttest_attributes = params[:ttest_attributes].split(",").collect{|a| a.to_sym}
+ end
+ ttest_significance = 0.9
+ if params[:ttest_significance]
+ ttest_significance = params[:ttest_significance].to_f
+ end
+ #report.add_ttest_tables(validation_set, :identifier, ttest_attributes, ttest_significance) if ttest_significance>0
+ #report.add_ttest_tables(validation_set, :identifier, ttest_attributes, 0.75) if ttest_significance>0
+ #report.add_ttest_tables(validation_set, :identifier, ttest_attributes, 0.5) if ttest_significance>0
report
end