From 120ae520e6a72bb9f32bd904767decb87882b260 Mon Sep 17 00:00:00 2001 From: gebele Date: Wed, 1 Nov 2017 15:41:34 +0000 Subject: fixed cramer --- application.rb | 99 +++++++++++++++++++------------- task.rb | 177 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 236 insertions(+), 40 deletions(-) create mode 100644 task.rb diff --git a/application.rb b/application.rb index b46c5d2..e4b9e38 100644 --- a/application.rb +++ b/application.rb @@ -207,49 +207,61 @@ get '/task/?' do smiles = compound.smiles task = Task.find(params[:predictions].to_s) unless task.predictions[params[:model]].nil? - html = "" - html += "" - string = "
#{image}
#{smiles}
" - prediction = task.predictions[params[:model]][pageNumber.to_i] - sorter = [] - if prediction[:info] - sorter << {"Info" => prediction[:info]} - if prediction[:measurements_string].kind_of?(Array) - sorter << {"Measured activity" => "#{prediction[:measurements_string].join(";")}
#{prediction[:converted_measurements].join(";")}"} - else - sorter << {"Measured activity" => "#{prediction[:measurements_string]}
#{prediction[:converted_measurements]}"} + if params[:model] == "Cramer" + prediction = task.predictions[params[:model]] + html = "
" + html += "" + string = "" + html += "#{string}
#{image}
#{smiles}
" + string += "" + string += "" + string += "
Cramer rules:#{prediction["Cramer rules"][pageNumber.to_i]}
Cramer rules, with extensions:#{prediction["Cramer rules, with extensions"][pageNumber.to_i]}
" + else + html = "" + html += "" + string = "" + html += "#{string}
#{image}
#{smiles}
" + prediction = task.predictions[params[:model]][pageNumber.to_i] + sorter = [] + $logger.debug prediction + if prediction[:info] + sorter << {"Info" => prediction[:info]} + if prediction[:measurements_string].kind_of?(Array) + sorter << {"Measured activity" => "#{prediction[:measurements_string].join(";")}
#{prediction[:converted_measurements].join(";")}"} + else + sorter << {"Measured activity" => "#{prediction[:measurements_string]}
#{prediction[:converted_measurements]}"} + end end - end - # regression - if prediction[:prediction_interval] - sorter << {"Prediction" => "#{prediction[:prediction_value]}
#{prediction[:converted_prediction_value]}"} - sorter << {"95% Prediction interval" => "#{prediction[:interval]}
#{prediction[:converted_interval]}"} - sorter << {"Warnings" => prediction[:warnings].join("
")} - # classification - elsif prediction[:probabilities] - sorter << {"Consensus prediction" => prediction["Consensus prediction"]} - sorter << {"Consensus confidence" => prediction["Consensus confidence"]} - sorter << {"Structural alerts for mutagenicity" => prediction["Structural alerts for mutagenicity"]} - sorter << {"Lazar mutagenicity (Salmonella typhimurium)" => ""} - sorter << {"Prediction" => prediction[:value]} - sorter << {"Probability" => prediction[:probabilities].collect{|k,v| "#{k}: #{v.signif(3)}"}.join("
")} - else - sorter << {"Warnings" => prediction[:warnings].join("
")} - end - sorter.each_with_index do |hash,idx| - k = hash.keys[0] - v = hash.values[0] - string += (idx == 0 ? "" : "")+(k =~ /lazar/i ? "" + # regression + if prediction[:prediction_interval] + sorter << {"Prediction" => "#{prediction[:prediction_value]}
#{prediction[:converted_prediction_value]}"} + sorter << {"95% Prediction interval" => "#{prediction[:interval]}
#{prediction[:converted_interval]}"} + sorter << {"Warnings" => prediction[:warnings].join("
")} + # classification + elsif prediction[:probabilities] + sorter << {"Consensus prediction" => prediction["Consensus prediction"]} + sorter << {"Consensus confidence" => prediction["Consensus confidence"]} + sorter << {"Structural alerts for mutagenicity" => prediction["Structural alerts for mutagenicity"]} + sorter << {"Lazar mutagenicity (Salmonella typhimurium)" => ""} + sorter << {"Prediction" => prediction[:value]} + sorter << {"Probability" => prediction[:probabilities].collect{|k,v| "#{k}: #{v.signif(3)}"}.join("
")} + else + sorter << {"Warnings" => prediction[:warnings].join("
")} + end + sorter.each_with_index do |hash,idx| + k = hash.keys[0] + v = hash.values[0] + string += (idx == 0 ? "" : "")+(k =~ /lazar/i ? "" + end + string += "
" : "") - # keyword - string += "#{k}:" - string += "" - # values - string += "#{v}" - string += "
" : "") + # keyword + string += "#{k}:" + string += "" + # values + string += "#{v}" + string += "
" end - string += "
" - html += "#{string}" end return JSON.pretty_generate(:predictions => [html]) end @@ -409,6 +421,13 @@ post '/predict/?' do "#{output["cramer_rules"][idx] != "nil" ? output["cramer_rules"][idx] : "none" },"\ "#{output["cramer_rules_extensions"][idx] != "nil" ? output["cramer_rules_extensions"][idx] : "none"}\n" end + #predictions = [] + #predictions << {"Cramer rules" => output["cramer_rules"]} + #predictions << {"Cramer rules, with extensions" => output["cramer_rules_extensions"]} + predictions = {} + predictions["Cramer rules"] = output["cramer_rules"].collect{|rule| rule != "nil" ? rule : "none"} + predictions["Cramer rules, with extensions"] = output["cramer_rules_extensions"].collect{|rule| rule != "nil" ? rule : "none"} + # write csv t[:csv] = csv # write predictions diff --git a/task.rb b/task.rb new file mode 100644 index 0000000..295e580 --- /dev/null +++ b/task.rb @@ -0,0 +1,177 @@ +DEFAULT_TASK_MAX_DURATION = 36000 +module OpenTox + + # Class for handling asynchronous tasks + class Task + + attr_accessor :pid, :observer_pid + + def metadata + super true # always update metadata + end + + def self.task_uri + Task.new.uri + end + + def self.run(description, creator=nil, uri=nil) + + task = Task.new uri + #task[RDF::OT.created_at] = DateTime.now + #task[RDF::OT.hasStatus] = "Running" + #task[RDF::DC.description] = description.to_s + #task[RDF::DC.creator] = creator.to_s + #task[RDF::OT.percentageCompleted] = "0" + #task.put + pid = fork do + begin + #task.completed yield + rescue => e + # wrap non-opentox-errors first + #e = OpenTox::Error.new(500,e.message,nil,e.backtrace) unless e.is_a?(OpenTox::Error) + #$logger.error "error in task #{task.uri} created by #{creator}" # creator is not logged because error is logged when thrown + #RestClientWrapper.put(File.join(task.uri,'Error'),{:errorReport => e.to_ntriples},{:content_type => 'text/plain'}) + #task.kill + end + end + Process.detach(pid) + #task.pid = pid + + # watch if task has been cancelled + #observer_pid = fork do + # task.wait + # begin + # Process.kill(9,task.pid) if task.cancelled? + # rescue + # $logger.warn "Could not kill process of task #{task.uri}, pid: #{task.pid}" + # end + #end + #Process.detach(observer_pid) + #task.observer_pid = observer_pid + #task + pid + + end + + def kill + Process.kill(9,@pid) + Process.kill(9,@observer_pid) + rescue # no need to raise an exception if processes are not running + end + + def description + self.[](RDF::DC.description) + end + + def creator + self.[](RDF::DC.creator) + end + + def cancel + kill + self.[]=(RDF::OT.hasStatus, "Cancelled") + self.[]=(RDF::OT.finished_at, DateTime.now) + put + end + + def completed(uri) + self.[]=(RDF::OT.resultURI, uri) + self.[]=(RDF::OT.hasStatus, "Completed") + self.[]=(RDF::OT.finished_at, DateTime.now) + self.[]=(RDF::OT.percentageCompleted, "100") + put + end + + # waits for a task, unless time exceeds or state is no longer running + def wait + start_time = Time.new + due_to_time = start_time + DEFAULT_TASK_MAX_DURATION + dur = 0.2 + while running? + sleep dur + dur = [[(Time.new - start_time)/20.0,0.3].max,300.0].min + request_timeout_error "max wait time exceeded ("+DEFAULT_TASK_MAX_DURATION.to_s+"sec), task: '"+@uri.to_s+"'" if (Time.new > due_to_time) + end + end + + end + + def code + RestClientWrapper.get(@uri).code.to_i + end + + # get only header for status requests + def running? + code == 202 + end + + def cancelled? + code == 503 + end + + def completed? + code == 200 + end + + def error? + code >= 400 and code != 503 + end + + [:hasStatus, :resultURI, :created_at, :finished_at, :percentageCompleted].each do |method| + define_method method do + response = self.[](RDF::OT[method]) + response = self.[](RDF::OT1[method]) unless response # API 1.1 compatibility + response + end + end + + # Check status of a task + # @return [String] Status + def status + self[RDF::OT.hasStatus] + end + + def error_report + get + report = {} + query = RDF::Query.new({ + :report => { + RDF.type => RDF::OT.ErrorReport, + :property => :value, + } + }) + query.execute(@rdf).each do |solution| + report[solution.property] = solution.value.to_s + end + report + end + + #TODO: subtasks (only for progress in validation) + class SubTask + + def initialize(task, min, max) + #TODO add subtask code + end + + def self.create(task, min, max) + if task + SubTask.new(task, min, max) + else + nil + end + end + + def waiting_for(task_uri) + #TODO add subtask code + end + + def progress(pct) + #TODO add subtask code + end + + def running?() + #TODO add subtask code + end + end + +end -- cgit v1.2.3