summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authormguetlein <martin.guetlein@gmail.com>2011-05-17 10:46:45 +0200
committermguetlein <martin.guetlein@gmail.com>2011-05-17 10:46:45 +0200
commit9ce03c0f50bb9129b584327d56fa4c9277849227 (patch)
tree8c0213ec8e3e5ac2ca918ab03a78c6fa99f2fcdc /lib
parenteb5f8b5da9b247d62abc8a7b9eb2e44fe46a1c79 (diff)
crossvalidation statistics fix: compute cv-statistics with cv-predictions instead of averaging cv-validation-statistics
Diffstat (limited to 'lib')
-rw-r--r--lib/dataset_cache.rb23
-rwxr-xr-xlib/ot_predictions.rb254
2 files changed, 172 insertions, 105 deletions
diff --git a/lib/dataset_cache.rb b/lib/dataset_cache.rb
new file mode 100644
index 0000000..1af1d51
--- /dev/null
+++ b/lib/dataset_cache.rb
@@ -0,0 +1,23 @@
+
+module Lib
+
+ module DatasetCache
+
+ @@cache={}
+
+ # same as OpenTox::Dataset.find with caching function
+ # rational: datasets are reused in crossvalidation very often, cache to save computational effort
+ # PENDING: may cause memory issues, test with huge datasets
+ def self.find(dataset_uri, subjectid=nil)
+ return nil if (dataset_uri==nil)
+ d = @@cache[dataset_uri.to_s+"_"+subjectid.to_s]
+ if d==nil
+ d = OpenTox::Dataset.find(dataset_uri, subjectid)
+ @@cache[dataset_uri.to_s+"_"+subjectid.to_s] = d
+ end
+ d
+ end
+
+ end
+
+end \ No newline at end of file
diff --git a/lib/ot_predictions.rb b/lib/ot_predictions.rb
index 22f9b20..644168f 100755
--- a/lib/ot_predictions.rb
+++ b/lib/ot_predictions.rb
@@ -15,127 +15,161 @@ module Lib
return @compounds[instance_index]
end
- def initialize(feature_type, test_dataset_uri, test_target_dataset_uri,
- prediction_feature, prediction_dataset_uri, predicted_variable, subjectid=nil, task=nil)
+ def initialize( feature_type, test_dataset_uris, test_target_dataset_uris,
+ prediction_feature, prediction_dataset_uris, predicted_variables, subjectid=nil, task=nil)
- LOGGER.debug("loading prediciton via test-dataset:'"+test_dataset_uri.to_s+
- "', test-target-datset:'"+test_target_dataset_uri.to_s+
- "', prediction-dataset:'"+prediction_dataset_uri.to_s+
- "', prediction_feature: '"+prediction_feature.to_s+"' "+
- "', predicted_variable: '"+predicted_variable.to_s+"'")
-
- predicted_variable=prediction_feature if predicted_variable==nil
-
- test_dataset = OpenTox::Dataset.find test_dataset_uri,subjectid
- raise "test dataset not found: '"+test_dataset_uri.to_s+"'" unless test_dataset
+ test_dataset_uris = [test_dataset_uris] unless test_dataset_uris.is_a?(Array)
+ test_target_dataset_uris = [test_target_dataset_uris] unless test_target_dataset_uris.is_a?(Array)
+ prediction_dataset_uris = [prediction_dataset_uris] unless prediction_dataset_uris.is_a?(Array)
+ predicted_variables = [predicted_variables] unless predicted_variables.is_a?(Array)
+ LOGGER.debug "loading prediciton -- test-dataset: "+test_dataset_uris.inspect
+ LOGGER.debug "loading prediciton -- test-target-datset: "+test_target_dataset_uris.inspect
+ LOGGER.debug "loading prediciton -- prediction-dataset: "+prediction_dataset_uris.inspect
+ LOGGER.debug "loading prediciton -- predicted_variable: "+predicted_variables.inspect
+ LOGGER.debug "loading prediciton -- prediction_feature: "+prediction_feature.to_s
raise "prediction_feature missing" unless prediction_feature
- if test_target_dataset_uri == nil || test_target_dataset_uri.strip.size==0 || test_target_dataset_uri==test_dataset_uri
- test_target_dataset_uri = test_dataset_uri
- test_target_dataset = test_dataset
- raise "prediction_feature not found in test_dataset, specify a test_target_dataset\n"+
- "prediction_feature: '"+prediction_feature.to_s+"'\n"+
- "test_dataset: '"+test_target_dataset_uri.to_s+"'\n"+
- "available features are: "+test_target_dataset.features.inspect if test_target_dataset.features.keys.index(prediction_feature)==nil
- else
- test_target_dataset = OpenTox::Dataset.find test_target_dataset_uri,subjectid
- raise "test target datset not found: '"+test_target_dataset_uri.to_s+"'" unless test_target_dataset
- if CHECK_VALUES
- test_dataset.compounds.each do |c|
- raise "test compound not found on test class dataset "+c.to_s unless test_target_dataset.compounds.include?(c)
- end
- end
- raise "prediction_feature not found in test_target_dataset\n"+
- "prediction_feature: '"+prediction_feature.to_s+"'\n"+
- "test_target_dataset: '"+test_target_dataset_uri.to_s+"'\n"+
- "available features are: "+test_target_dataset.features.inspect if test_target_dataset.features.keys.index(prediction_feature)==nil
- end
-
- @compounds = test_dataset.compounds
- LOGGER.debug "test dataset size: "+@compounds.size.to_s
- raise "test dataset is empty "+test_dataset_uri.to_s unless @compounds.size>0
-
- if feature_type=="classification"
- accept_values = test_target_dataset.features[prediction_feature][OT.acceptValue]
- raise "'"+OT.acceptValue.to_s+"' missing/invalid for feature '"+prediction_feature.to_s+"' in dataset '"+
- test_target_dataset_uri.to_s+"', acceptValues are: '"+accept_values.inspect+"'" if accept_values==nil or accept_values.length<2
- else
- accept_values=nil
- end
+ @compounds = []
+ all_predicted_values = []
+ all_actual_values = []
+ all_confidence_values = []
+ accept_values = nil
- actual_values = []
- @compounds.each do |c|
- case feature_type
- when "classification"
- actual_values << classification_value(test_target_dataset, c, prediction_feature, accept_values)
- when "regression"
- actual_values << regression_value(test_target_dataset, c, prediction_feature)
- end
+ if task
+ task_step = 100 / (test_dataset_uris.size*2 + 1)
+ task_status = 0
end
- task.progress(40) if task # loaded actual values
+
+ test_dataset_uris.size.times do |i|
+
+ test_dataset_uri = test_dataset_uris[i]
+ test_target_dataset_uri = test_target_dataset_uris[i]
+ prediction_dataset_uri = prediction_dataset_uris[i]
+ predicted_variable = predicted_variables[i]
+
+ predicted_variable=prediction_feature if predicted_variable==nil
- prediction_dataset = OpenTox::Dataset.find prediction_dataset_uri,subjectid
- raise "prediction dataset not found: '"+prediction_dataset_uri.to_s+"'" unless prediction_dataset
+ test_dataset = Lib::DatasetCache.find test_dataset_uri,subjectid
+ raise "test dataset not found: '"+test_dataset_uri.to_s+"'" unless test_dataset
- # TODO: remove LAZAR_PREDICTION_DATASET_HACK
- no_prediction_feature = prediction_dataset.features.keys.index(predicted_variable)==nil
- if no_prediction_feature
- one_entry_per_compound = true
- @compounds.each do |c|
- if prediction_dataset.data_entries[c] and prediction_dataset.data_entries[c].size != 1
- one_entry_per_compound = false
- break
- end
- end
- msg = "prediction-feature not found: '"+predicted_variable+"' in prediction-dataset: "+prediction_dataset_uri.to_s+", available features: "+
- prediction_dataset.features.keys.inspect
- if one_entry_per_compound
- LOGGER.warn msg
+ if test_target_dataset_uri == nil || test_target_dataset_uri.strip.size==0 || test_target_dataset_uri==test_dataset_uri
+ test_target_dataset_uri = test_dataset_uri
+ test_target_dataset = test_dataset
+ raise "prediction_feature not found in test_dataset, specify a test_target_dataset\n"+
+ "prediction_feature: '"+prediction_feature.to_s+"'\n"+
+ "test_dataset: '"+test_target_dataset_uri.to_s+"'\n"+
+ "available features are: "+test_target_dataset.features.inspect if test_target_dataset.features.keys.index(prediction_feature)==nil
else
- raise msg
+ test_target_dataset = Lib::DatasetCache.find test_target_dataset_uri,subjectid
+ raise "test target datset not found: '"+test_target_dataset_uri.to_s+"'" unless test_target_dataset
+ if CHECK_VALUES
+ test_dataset.compounds.each do |c|
+ raise "test compound not found on test class dataset "+c.to_s unless test_target_dataset.compounds.include?(c)
+ end
+ end
+ raise "prediction_feature not found in test_target_dataset\n"+
+ "prediction_feature: '"+prediction_feature.to_s+"'\n"+
+ "test_target_dataset: '"+test_target_dataset_uri.to_s+"'\n"+
+ "available features are: "+test_target_dataset.features.inspect if test_target_dataset.features.keys.index(prediction_feature)==nil
end
- end
-
- raise "more predicted than test compounds test:"+@compounds.size.to_s+" < prediction:"+
- prediction_dataset.compounds.size.to_s if @compounds.size < prediction_dataset.compounds.size
- if CHECK_VALUES
- prediction_dataset.compounds.each do |c|
- raise "predicted compound not found in test dataset:\n"+c+"\ntest-compounds:\n"+
- @compounds.collect{|c| c.to_s}.join("\n") if @compounds.index(c)==nil
+
+ compounds = test_dataset.compounds
+ LOGGER.debug "test dataset size: "+compounds.size.to_s
+ raise "test dataset is empty "+test_dataset_uri.to_s unless compounds.size>0
+
+ if feature_type=="classification"
+ av = test_target_dataset.features[prediction_feature][OT.acceptValue]
+ raise "'"+OT.acceptValue.to_s+"' missing/invalid for feature '"+prediction_feature.to_s+"' in dataset '"+
+ test_target_dataset_uri.to_s+"', acceptValues are: '"+av.inspect+"'" if av==nil or av.length<2
+ if accept_values==nil
+ accept_values=av
+ else
+ raise "accept values (in folds) differ "+av.inspect+" != "+accept_values.inspect if av!=accept_values
+ end
end
- end
-
- predicted_values = []
- confidence_values = []
- @compounds.each do |c|
- if prediction_dataset.compounds.index(c)==nil
- predicted_values << nil
- confidence_values << nil
- else
+
+ actual_values = []
+ compounds.each do |c|
case feature_type
when "classification"
- # TODO: remove LAZAR_PREDICTION_DATASET_HACK
- predicted_values << classification_value(prediction_dataset, c, no_prediction_feature ? nil : predicted_variable, accept_values)
+ actual_values << classification_value(test_target_dataset, c, prediction_feature, accept_values)
when "regression"
- predicted_values << regression_value(prediction_dataset, c, no_prediction_feature ? nil : predicted_variable)
+ actual_values << regression_value(test_target_dataset, c, prediction_feature)
+ end
+ end
+ task.progress( task_status += task_step ) if task # loaded actual values
+
+ prediction_dataset = Lib::DatasetCache.find prediction_dataset_uri,subjectid
+ raise "prediction dataset not found: '"+prediction_dataset_uri.to_s+"'" unless prediction_dataset
+
+ # TODO: remove LAZAR_PREDICTION_DATASET_HACK
+ no_prediction_feature = prediction_dataset.features.keys.index(predicted_variable)==nil
+ if no_prediction_feature
+ one_entry_per_compound = true
+ compounds.each do |c|
+ if prediction_dataset.data_entries[c] and prediction_dataset.data_entries[c].size != 1
+ one_entry_per_compound = false
+ break
+ end
+ end
+ msg = "prediction-feature not found: '"+predicted_variable+"' in prediction-dataset: "+prediction_dataset_uri.to_s+", available features: "+
+ prediction_dataset.features.keys.inspect
+ if one_entry_per_compound
+ LOGGER.warn msg
+ else
+ raise msg
end
- # TODO confidence_values << prediction_dataset.get_prediction_confidence(c, predicted_variable)
- conf = 1
- begin
- feature = prediction_dataset.data_entries[c].keys[0]
- feature_data = prediction_dataset.features[feature]
- conf = feature_data[OT.confidence] if feature_data[OT.confidence]!=nil
- rescue
- LOGGER.warn "could not get confidence"
+ end
+
+ raise "more predicted than test compounds test:"+compounds.size.to_s+" < prediction:"+
+ prediction_dataset.compounds.size.to_s if compounds.size < prediction_dataset.compounds.size
+ if CHECK_VALUES
+ prediction_dataset.compounds.each do |c|
+ raise "predicted compound not found in test dataset:\n"+c+"\ntest-compounds:\n"+
+ compounds.collect{|c| c.to_s}.join("\n") if compounds.index(c)==nil
end
- confidence_values << conf
end
+
+ predicted_values = []
+ confidence_values = []
+ count = 0
+ compounds.each do |c|
+ if prediction_dataset.compounds.index(c)==nil
+ predicted_values << nil
+ confidence_values << nil
+ else
+ case feature_type
+ when "classification"
+ # TODO: remove LAZAR_PREDICTION_DATASET_HACK
+ predicted_values << classification_value(prediction_dataset, c, no_prediction_feature ? nil : predicted_variable, accept_values)
+ when "regression"
+ predicted_values << regression_value(prediction_dataset, c, no_prediction_feature ? nil : predicted_variable)
+ end
+ # TODO confidence_values << prediction_dataset.get_prediction_confidence(c, predicted_variable)
+ conf = predicted_values[count]!=nil ? 1 : 0
+ begin
+ feature = prediction_dataset.data_entries[c].keys[0]
+ feature_data = prediction_dataset.features[feature]
+ conf = feature_data[OT.confidence] if feature_data[OT.confidence]!=nil
+ rescue
+ LOGGER.warn "could not get confidence"
+ end
+ confidence_values << conf
+ end
+ count += 1
+ end
+
+ @compounds += compounds
+ all_predicted_values += predicted_values
+ all_actual_values += actual_values
+ all_confidence_values += confidence_values
+
+ task.progress( task_status += task_step ) if task # loaded predicted values and confidence
end
- task.progress(80) if task # loaded predicted values and confidence
- super(predicted_values, actual_values, confidence_values, feature_type, accept_values)
- raise "illegal num compounds "+num_info if @compounds.size != @predicted_values.size
- task.progress(100) if task # done with the mathmatics
+ super(all_predicted_values, all_actual_values, all_confidence_values, feature_type, accept_values)
+ raise "illegal num compounds "+num_info if @compounds.size != @predicted_values.size
+ task.progress(100) if task # done with the mathmatics
end
private
@@ -205,6 +239,7 @@ module Lib
def self.to_array( predictions, add_pic=false, format=false )
res = []
+ conf_column = nil
predictions.each do |p|
(0..p.num_instances-1).each do |i|
a = []
@@ -230,13 +265,22 @@ module Lib
end
end
if p.confidence_values_available?
- a << (format ? p.confidence_value(i).to_nice_s : p.confidence_value(i))
+ conf_column = a.size
+ a << p.confidence_value(i) #(format ? p.confidence_value(i).to_nice_s : p.confidence_value(i))
end
a << p.identifier(i)
res << a
end
end
-
+
+ if conf_column!=nil
+ res.sort!{ |x,y| y[4] <=> x[4] }
+ if format
+ res.each do |a|
+ a[4] = a[4].to_nice_s
+ end
+ end
+ end
header = []
header << "compound" if add_pic
header << "actual value"