summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorgebele <gebele@in-silico.ch>2019-06-13 15:28:59 +0000
committergebele <gebele@in-silico.ch>2019-06-13 15:28:59 +0000
commit741701df8ff0861b3607a30e9aaf8b8a0c303cdf (patch)
tree40fd2c6ca4eb85f3077859d9a808d01ec5aa8ece /lib
parentf37f0b654b36b66c133755c01f033859c35197f6 (diff)
update with API
Diffstat (limited to 'lib')
-rw-r--r--lib/api.rb28
-rw-r--r--lib/compound.rb6
-rw-r--r--lib/dataset.rb133
-rw-r--r--lib/endpoint.rb10
-rw-r--r--lib/feature.rb4
-rw-r--r--lib/model.rb16
-rw-r--r--lib/report.rb193
-rw-r--r--lib/substance.rb18
-rw-r--r--lib/swagger.rb6
-rw-r--r--lib/validation.rb24
10 files changed, 80 insertions, 358 deletions
diff --git a/lib/api.rb b/lib/api.rb
index 28e33df..c3b27ce 100644
--- a/lib/api.rb
+++ b/lib/api.rb
@@ -1,9 +1,23 @@
-# route to swagger API file
-get "/api/api.json" do
- response['Content-Type'] = "application/json"
+get "/api" do
api_file = File.join("api", "api.json")
- bad_request_error "API Documentation in Swagger JSON is not implemented." unless File.exists?(api_file)
- api_hash = JSON.parse(File.read(api_file))
- api_hash["host"] = request.env['HTTP_HOST']
- return api_hash.to_json
+ halt 400, "API Documentation in Swagger JSON is not implemented." unless File.exists?(api_file)
+ case @accept
+ when "text/html"
+ response['Content-Type'] = "text/html"
+ index_file = File.join(ENV['HOME'],"swagger-ui/dist/index.html")
+ File.read(index_file)
+ when "application/json"
+ response['Content-Type'] = "application/json"
+ api_hash = JSON.parse(File.read(api_file))
+ api_hash["host"] = request.env['HTTP_HOST']
+ return api_hash.to_json
+ else
+ halt 400, "unknown MIME type '#{@accept}'"
+ end
+end
+
+get "/api/api.json" do
+ response['Content-Type'] = "text/html"
+ index_file = File.join(ENV['HOME'],"swagger-ui/dist/index.html")
+ File.read(index_file)
end
diff --git a/lib/compound.rb b/lib/compound.rb
index 77948ab..4606aa4 100644
--- a/lib/compound.rb
+++ b/lib/compound.rb
@@ -2,7 +2,7 @@
# @param [Header] Accept one of text/plain, application/json
# @param [Path] Descriptor name or descriptor ID (e.G.: Openbabel.HBA1, 5755f8eb3cf99a00d8fedf2f)
# @return [text/plain, application/json] list of all prediction models
-get "/compound/descriptor/?:descriptor?" do
+get "/api/compound/descriptor/?:descriptor?" do
case @accept
when "application/json"
return "#{JSON.pretty_generate PhysChem::DESCRIPTORS} " unless params[:descriptor]
@@ -15,7 +15,7 @@ get "/compound/descriptor/?:descriptor?" do
end
end
-post "/compound/descriptor/?" do
+post "/api/compound/descriptor/?" do
bad_request_error "Missing Parameter " unless params[:identifier] && params[:descriptor]
descriptors = params['descriptor'].split(',')
compound = Compound.from_smiles params[:identifier]
@@ -38,7 +38,7 @@ post "/compound/descriptor/?" do
end
end
-get %r{/compound/(InChI.+)} do |input|
+get %r{/api/compound/(InChI.+)} do |input|
compound = Compound.from_inchi URI.unescape(input)
if compound
response['Content-Type'] = @accept
diff --git a/lib/dataset.rb b/lib/dataset.rb
index 749167b..00685b8 100644
--- a/lib/dataset.rb
+++ b/lib/dataset.rb
@@ -1,136 +1,29 @@
# Get all datasets
-get "/dataset/?" do
- datasets = Dataset.all
+get "/api/dataset/?" do
+ datasets = Dataset.all #.limit(100)
case @accept
- when "text/uri-list"
- uri_list = datasets.collect{|dataset| uri("/dataset/#{dataset.id}")}
- return uri_list.join("\n") + "\n"
when "application/json"
- datasets = JSON.parse datasets.to_json
- list = []
- datasets.each{|d| list << uri("/dataset/#{d["_id"]["$oid"]}")}
- return list.to_json
+ list = datasets.collect{|dataset| uri("/api/dataset/#{dataset.id}")}.to_json
+ return list
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
# Get a dataset
-get "/dataset/:id/?" do
- if Task.where(id: params[:id]).exists?
- task = Task.find params[:id]
- halt 404, "Dataset with id: #{params[:id]} not found." unless task.percent == 100
- $logger.debug task.inspect
- response['Content-Type'] = "text/csv"
- m = Model::Validation.find task.model_id
- dataset = Batch.find task.dataset_id
- @ids = dataset.ids
- warnings = dataset.warnings.blank? ? nil : dataset.warnings.join("\n")
- unless warnings.nil?
- @parse = []
- warnings.split("\n").each do |warning|
- if warning =~ /^Cannot/
- smi = warning.split("SMILES compound").last.split("at").first
- line = warning.split("SMILES compound").last.split("at line").last.split("of").first.strip.to_i
- @parse << "Cannot parse SMILES compound#{smi}at line #{line} of #{dataset.source.split("/").last}\n"
- end
- end
- keys_array = []
- warnings.split("\n").each do |warning|
- if warning =~ /^Duplicate/
- text = warning.split("ID").first
- numbers = warning.split("ID").last.split("and")
- keys_array << numbers.collect{|n| n.strip.to_i}
- end
- end
- @dups = {}
- keys_array.each do |keys|
- keys.each do |key|
- @dups[key] = "Duplicate compound at ID #{keys.join(" and ")}\n"
- end
- end
- end
- $logger.debug "dups: #{@dups}"
- endpoint = "#{m.endpoint}_(#{m.species})"
- tempfile = Tempfile.new
- header = task.csv
- lines = []
- $logger.debug task.predictions
- task.predictions[m.id.to_s].each_with_index do |hash,idx|
- identifier = hash.keys[0]
- prediction_id = hash.values[0]
- # add duplicate warning at the end of a line if ID matches
- if @dups[idx+1]
- if prediction_id.is_a? BSON::ObjectId
- if @ids.blank?
- lines << "#{idx+1},#{identifier},#{Prediction.find(prediction_id).csv.tr("\n","")},#{@dups[idx+1]}"
- else
- lines << "#{idx+1},#{@ids[idx]},#{identifier},#{Prediction.find(prediction_id).csv.tr("\n","")},#{@dups[idx+1]}"
- end
- else
- if @ids.blank?
- lines << "#{idx+1},#{identifier},\n"
- else
- lines << "#{idx+1},#{@ids[idx]}#{identifier},\n"
- end
- end
- else
- if prediction_id.is_a? BSON::ObjectId
- if @ids.blank?
- lines << "#{idx+1},#{identifier},#{Prediction.find(prediction_id).csv}"
- else
- lines << "#{idx+1},#{@ids[idx]},#{identifier},#{Prediction.find(prediction_id).csv}"
- end
- else
- if @ids.blank?
- lines << "#{idx+1},#{identifier},\n"
- else
- lines << "#{idx+1},#{@ids[idx]}#{identifier},\n"
- end
- end
- end
- end
- (@parse && !@parse.blank?) ? tempfile.write(header+lines.join("")+"\n"+@parse.join("\n")) : tempfile.write(header+lines.join(""))
- #tempfile.write(header+lines.join(""))
- tempfile.rewind
- ########################
-=begin
- header = task.csv
- lines = []
- task.predictions.each_with_index do |result,idx|
- identifier = result[0]
- prediction_id = result[1]
- prediction = Prediction.find prediction_id
- lines << "#{idx+1},#{identifier},#{prediction.csv.tr("\n","")}"
- end
- return header+lines.join("\n")
-=end
- return tempfile.read
+get "/api/dataset/:id/?" do
+ dataset = Dataset.find :id => params[:id]
+ halt 400, "Dataset with id: #{params[:id]} not found." unless dataset
+ case @accept
+ when "text/csv", "application/csv"
+ return dataset.to_csv
else
- dataset = Dataset.find :id => params[:id]
- halt 400, "Dataset with id: #{params[:id]} not found." unless dataset
- case @accept
- when "application/json"
- dataset.data_entries.each do |k, v|
- dataset.data_entries[k][:URI] = uri("/substance/#{k}")
- end
- dataset[:URI] = uri("/dataset/#{dataset.id}")
- dataset[:substances] = uri("/dataset/#{dataset.id}/substances")
- dataset[:features] = uri("/dataset/#{dataset.id}/features")
- return dataset.to_json
- when "text/csv", "application/csv"
- return dataset.to_csv
- else
- bad_request_error "Mime type #{@accept} is not supported."
- end
+ bad_request_error "Mime type #{@accept} is not supported."
end
end
# Get a dataset attribute. One of compounds, nanoparticles, substances, features
-get "/dataset/:id/:attribute/?" do
- if Task.where(id: params[:id]).exists?
- halt 400, "No attributes selection available for dataset with id: #{params[:id]}.".to_json
- end
+get "/api/dataset/:id/:attribute/?" do
dataset = Dataset.find :id => params[:id]
halt 400, "Dataset with id: #{params[:id]} not found." unless dataset
attribs = ["compounds", "nanoparticles", "substances", "features"]
diff --git a/lib/endpoint.rb b/lib/endpoint.rb
index ef39787..66b7ab2 100644
--- a/lib/endpoint.rb
+++ b/lib/endpoint.rb
@@ -1,7 +1,7 @@
# Get a list of all endpoints
# @param [Header] Accept one of text/uri-list,
# @return [text/uri-list] list of all prediction models
-get "/endpoint/?" do
+get "/api/endpoint/?" do
models = Model::Validation.all
endpoints = models.collect{|m| m.endpoint}.uniq
case @accept
@@ -10,14 +10,14 @@ get "/endpoint/?" do
when "application/json"
return endpoints.to_json
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
-get "/endpoint/:endpoint/?" do
+get "/api/endpoint/:endpoint/?" do
models = Model::Validation.where(endpoint: params[:endpoint])
list = []
- models.each{|m| list << {m.species => uri("/model/#{m.id}")} }
- not_found_error "Endpoint: #{params[:endpoint]} not found." if models.blank?
+ models.each{|m| list << {m.species => uri("/api/model/#{m.id}")} }
+ halt 404, "Endpoint: #{params[:endpoint]} not found." if models.blank?
return list.to_json
end
diff --git a/lib/feature.rb b/lib/feature.rb
index 06a5b37..3123997 100644
--- a/lib/feature.rb
+++ b/lib/feature.rb
@@ -1,5 +1,5 @@
# Get all Features
-get "/feature/?" do
+get "/api/feature/?" do
features = Feature.all
case @accept
when "text/uri-list"
@@ -16,7 +16,7 @@ get "/feature/?" do
end
# Get a feature
-get "/feature/:id/?" do
+get "/api/feature/:id/?" do
case @accept
when "application/json"
feature = Feature.find :id => params[:id]
diff --git a/lib/model.rb b/lib/model.rb
index 42f3a95..9bf4f53 100644
--- a/lib/model.rb
+++ b/lib/model.rb
@@ -1,29 +1,29 @@
# Get a list of all prediction models
# @param [Header] Accept one of text/uri-list,
# @return [text/uri-list] list of all prediction models
-get "/model/?" do
+get "/api/model/?" do
models = Model::Validation.all
case @accept
when "text/uri-list"
- uri_list = models.collect{|model| uri("/model/#{model.id}")}
+ uri_list = models.collect{|model| uri("/api/model/#{model.id}")}
return uri_list.join("\n") + "\n"
when "application/json"
models = JSON.parse models.to_json
list = []
- models.each{|m| list << uri("/model/#{m["_id"]["$oid"]}")}
+ models.each{|m| list << uri("/api/model/#{m["_id"]["$oid"]}")}
return list.to_json
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
-get "/model/:id/?" do
+get "/api/model/:id/?" do
model = Model::Validation.find params[:id]
- not_found_error "Model with id: #{params[:id]} not found." unless model
+ halt 400, "Model with id: #{params[:id]} not found." unless model
return model.to_json
end
-post "/model/:id/?" do
+post "/api/model/:id/?" do
if request.content_type == "application/x-www-form-urlencoded"
identifier = params[:identifier].strip.gsub(/\A"|"\Z/,'')
compound = Compound.from_smiles identifier
@@ -146,6 +146,6 @@ post "/model/:id/?" do
tid = @task.id.to_s
return 202, to("/task/#{tid}").to_json
else
- bad_request_error "No accepted content type"
+ halt 400, "No accepted content type"
end
end
diff --git a/lib/report.rb b/lib/report.rb
index f576106..7c06d60 100644
--- a/lib/report.rb
+++ b/lib/report.rb
@@ -1,208 +1,29 @@
# Get a list of all possible reports to prediction models
# @param [Header] Accept one of text/uri-list,
# @return [text/uri-list] list of all prediction models
-get "/report/?" do
+get "/api/report/?" do
models = Model::Validation.all
case @accept
when "text/uri-list"
- uri_list = models.collect{|model| uri("/report/#{model.model_id}")}
+ uri_list = models.collect{|model| uri("/api/report/#{model.model_id}")}
return uri_list.join("\n") + "\n"
when "application/json"
models = JSON.parse models.to_json
list = []
- models.each{|m| list << uri("/report/#{m["model_id"]["$oid"]}")}
+ models.each{|m| list << uri("/api/report/#{m["_id"]["$oid"]}")}
return list.to_json
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
-get "/report/:id/?" do
+get "/api/report/:id/?" do
case @accept
when "application/xml"
- model = Model::Lazar.find params[:id]
- not_found_error "Model with id: #{params[:id]} not found." unless model
- prediction_model = Model::Validation.find_by :model_id => params[:id]
- validation_template = File.join(File.dirname(__FILE__),"../views/model_details.haml")
-
- if File.directory?("#{File.dirname(__FILE__)}/../../lazar")
- lazar_commit = `cd #{File.dirname(__FILE__)}/../../lazar; git rev-parse HEAD`.strip
- lazar_commit = "https://github.com/opentox/lazar/tree/#{lazar_commit}"
- else
- lazar_commit = "https://github.com/opentox/lazar/releases/tag/v#{Gem.loaded_specs["lazar"].version}"
- end
-
- report = OpenTox::QMRFReport.new
-
- # QSAR Identifier Title 1.1
- report.value "QSAR_title", "Lazar model for #{prediction_model.species} #{prediction_model.endpoint}"
-
- # Software coding the model 1.3
- report.change_catalog :software_catalog, :firstsoftware, {:name => "lazar", :description => "lazar Lazy Structure- Activity Relationships", :number => "1", :url => "https://lazar.in-silico.ch", :contact => "info@in-silico.ch"}
- report.ref_catalog :QSAR_software, :software_catalog, :firstsoftware
-
- # Date of QMRF 2.1
- report.value "qmrf_date", "#{Time.now.strftime('%d %B %Y')}"
-
- # QMRF author(s) and contact details 2.1
- report.change_catalog :authors_catalog, :firstauthor, {:name => "Christoph Helma", :affiliation => "in silico toxicology gmbh", :contact => "Rastatterstr. 41, CH-4057 Basel", :email => "info@in-silico.ch", :number => "1", :url => "www.in-silico.ch"}
- report.ref_catalog :qmrf_authors, :authors_catalog, :firstauthor
-
- # Model developer(s) and contact details 2.5
- report.change_catalog :authors_catalog, :modelauthor, {:name => "Christoph Helma", :affiliation => "in silico toxicology gmbh", :contact => "Rastatterstr. 41, CH-4057 Basel", :email => "info@in-silico.ch", :number => "1", :url => "www.in-silico.ch"}
- report.ref_catalog :model_authors, :authors_catalog, :modelauthor
-
- # Date of model development and/or publication 2.6
- report.value "model_date", "#{Time.parse(model.created_at.to_s).strftime('%Y')}"
-
- # Reference(s) to main scientific papers and/or software package 2.7
- report.change_catalog :publications_catalog, :publications_catalog_1, {:title => "Maunz, Guetlein, Rautenberg, Vorgrimmler, Gebele and Helma (2013), lazar: a modular predictive toxicology framework ", :url => "http://dx.doi.org/10.3389/fphar.2013.00038"}
- report.ref_catalog :references, :publications_catalog, :publications_catalog_1
-
- # Reference(s) to main scientific papers and/or software package 2.7
- report.change_catalog :publications_catalog, :publications_catalog_2, {:title => "Maunz A and Helma C (2008) Prediction of chemical toxicity with local support vector regression and activity-specific kernels. SAR & QSAR in Environmental Research 19 (5-6), 413-431", :url => "http://dx.doi.org/10.1080/10629360802358430"}
- report.ref_catalog :references, :publications_catalog, :publications_catalog_2
-
- # Species 3.1
- report.value "model_species", prediction_model.species
-
- # Endpoint 3.2
- report.change_catalog :endpoints_catalog, :endpoints_catalog_1, {:name => prediction_model.endpoint, :group => ""}
- report.ref_catalog :model_endpoint, :endpoints_catalog, :endpoints_catalog_1
-
- # Endpoint Units 3.4
- report.value "endpoint_units", "#{prediction_model.unit}"
-
- model_type = model.class.to_s.gsub('OpenTox::Model::Lazar','')
-
- # Type of model 4.1
- report.value "algorithm_type", "#{model_type}"
-
- # Explicit algorithm 4.2
- report.change_catalog :algorithms_catalog, :algorithms_catalog_1, {:definition => "see Helma 2016 and lazar.in-silico.ch, submitted version: #{lazar_commit}", :description => "Neighbor algorithm: #{model.algorithms["similarity"]["method"].gsub('_',' ').titleize}#{(model.algorithms["similarity"][:min] ? ' with similarity > ' + model.algorithms["similarity"][:min].to_s : '')}"}
- report.ref_catalog :algorithm_explicit, :algorithms_catalog, :algorithms_catalog_1
- report.change_catalog :algorithms_catalog, :algorithms_catalog_3, {:definition => "see Helma 2016 and lazar.in-silico.ch, submitted version: #{lazar_commit}", :description => "modified k-nearest neighbor #{model_type}"}
- report.ref_catalog :algorithm_explicit, :algorithms_catalog, :algorithms_catalog_3
- if model.algorithms["prediction"]
- pred_algorithm_params = (model.algorithms["prediction"][:method] == "rf" ? "random forest" : model.algorithms["prediction"][:method])
- end
- report.change_catalog :algorithms_catalog, :algorithms_catalog_2, {:definition => "see Helma 2016 and lazar.in-silico.ch, submitted version: #{lazar_commit}", :description => "Prediction algorithm: #{model.algorithms["prediction"].to_s.gsub('OpenTox::Algorithm::','').gsub('_',' ').gsub('.', ' with ')} #{(pred_algorithm_params ? pred_algorithm_params : '')}"}
- report.ref_catalog :algorithm_explicit, :algorithms_catalog, :algorithms_catalog_2
-
- # Descriptors in the model 4.3
- if model.algorithms["descriptors"][:type]
- report.change_catalog :descriptors_catalog, :descriptors_catalog_1, {:description => "", :name => "#{model.algorithms["descriptors"][:type]}", :publication_ref => "", :units => ""}
- report.ref_catalog :algorithms_descriptors, :descriptors_catalog, :descriptors_catalog_1
- end
-
- # Descriptor selection 4.4
- report.value "descriptors_selection", "#{model.algorithms["feature_selection"].gsub('_',' ')} #{model.algorithms["feature_selection"].collect{|k,v| k.to_s + ': ' + v.to_s}.join(', ')}" if model.algorithms["feature_selection"]
-
- # Algorithm and descriptor generation 4.5
- report.value "descriptors_generation", "exhaustive breadth first search for paths in chemical graphs (simplified MolFea algorithm)"
-
- # Software name and version for descriptor generation 4.6
- report.change_catalog :software_catalog, :software_catalog_2, {:name => "lazar, submitted version: #{lazar_commit}", :description => "simplified MolFea algorithm", :number => "2", :url => "https://lazar.in-silico.ch", :contact => "info@in-silico.ch"}
- report.ref_catalog :descriptors_generation_software, :software_catalog, :software_catalog_2
-
- # Chemicals/Descriptors ratio 4.7
- report.value "descriptors_chemicals_ratio", "not applicable (classification based on activities of neighbors, descriptors are used for similarity calculation)"
-
- # Description of the applicability domain of the model 5.1
- report.value "app_domain_description", "<html><head></head><body>
- <p>
- The applicability domain (AD) of the training set is characterized by
- the confidence index of a prediction (high confidence index: close to
- the applicability domain of the training set/reliable prediction, low
- confidence: far from the applicability domain of the
- trainingset/unreliable prediction). The confidence index considers (i)
- the similarity and number of neighbors and (ii) contradictory examples
- within the neighbors. A formal definition can be found in Helma 2006.
- </p>
- <p>
- The reliability of predictions decreases gradually with increasing
- distance from the applicability domain (i.e. decreasing confidence index)
- </p>
- </body>
- </html>"
-
- # Method used to assess the applicability domain 5.2
- report.value "app_domain_method", "see Helma 2006 and Maunz 2008"
-
- # Software name and version for applicability domain assessment 5.3
- report.change_catalog :software_catalog, :software_catalog_3, {:name => "lazar, submitted version: #{lazar_commit}", :description => "integrated into main lazar algorithm", :number => "3", :url => "https://lazar.in-silico.ch", :contact => "info@in-silico.ch"}
- report.ref_catalog :app_domain_software, :software_catalog, :software_catalog_3
-
- # Limits of applicability 5.4
- report.value "applicability_limits", "Predictions with low confidence index, unknown substructures and neighbors that might act by different mechanisms"
-
- # Availability of the training set 6.1
- report.change_attributes "training_set_availability", {:answer => "Yes"}
-
- # Available information for the training set 6.2
- report.change_attributes "training_set_data", {:cas => "Yes", :chemname => "Yes", :formula => "Yes", :inchi => "Yes", :mol => "Yes", :smiles => "Yes"}
-
- # Data for each descriptor variable for the training set 6.3
- report.change_attributes "training_set_descriptors", {:answer => "No"}
-
- # Data for the dependent variable for the training set 6.4
- report.change_attributes "dependent_var_availability", {:answer => "All"}
-
- # Other information about the training set 6.5
- report.value "other_info", "#{prediction_model.source}"
-
- # Pre-processing of data before modelling 6.6
- report.value "preprocessing", (model.class == OpenTox::Model::LazarRegression ? "-log10 transformation" : "none")
-
- # Robustness - Statistics obtained by leave-many-out cross-validation 6.9
- if prediction_model.repeated_crossvalidation
- crossvalidations = prediction_model.crossvalidations
- out = haml File.read(validation_template), :layout=> false, :locals => {:model => prediction_model}
- report.value "lmo", out
- end
-
- # Mechanistic basis of the model 8.1
- report.value "mechanistic_basis","<html><head></head><body>
- <p>
- Compounds with similar structures (neighbors) are assumed to have
- similar activities as the query compound. For the determination of
- activity specific similarities only statistically relevant subtructures
- (paths) are used. For this reason there is a priori no bias towards
- specific mechanistic hypothesis.
- </p>
- </body>
- </html>"
-
- # A priori or a posteriori mechanistic interpretation 8.2
- report.value "mechanistic_basis_comments","a posteriori for individual predictions"
-
- # Other information about the mechanistic interpretation 8.3
- report.value "mechanistic_basis_info","<html><head></head><body><p>Hypothesis about biochemical mechanisms can be derived from individual
- predictions by inspecting neighbors and relevant fragments.</p>
- <p>Neighbors are compounds that are similar in respect to a certain
- endpoint and it is likely that compounds with high similarity act by
- similar mechanisms as the query compound. Links at the webinterface
- prove an easy access to additional experimental data and literature
- citations for the neighbors and the query structure.</p>
- <p>Activating and deactivating parts of the query compound are highlighted
- in red and green on the webinterface. Fragments that are unknown (or too
- infrequent for statistical evaluation are marked in yellow and
- additional statistical information about the individual fragments can be
- retrieved. Please note that lazar predictions are based on neighbors and
- not on fragments. Fragments and their statistical significance are used
- for the calculation of activity specific similarities.</p>"
-
- # Bibliography 9.2
- report.ref_catalog :bibliography, :publications_catalog, :publications_catalog_1
- report.ref_catalog :bibliography, :publications_catalog, :publications_catalog_2
- report.change_catalog :publications_catalog, :publications_catalog_3, {:title => "Helma (2006), Lazy structure-activity relationships (lazar) for the prediction of rodent carcinogenicity and Salmonella mutagenicity.", :url => "http://dx.doi.org/10.1007/s11030-005-9001-5"}
- report.ref_catalog :bibliography, :publications_catalog, :publications_catalog_3
-
- # output
- response['Content-Type'] = "application/xml"
+ report = qmrf_report params[:id]
return report.to_xml
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
diff --git a/lib/substance.rb b/lib/substance.rb
index f493714..5d57505 100644
--- a/lib/substance.rb
+++ b/lib/substance.rb
@@ -1,5 +1,5 @@
# Get all substances
-get "/substance/?" do
+get "/api/substance/?" do
substances = Substance.all
case @accept
when "text/uri-list"
@@ -10,25 +10,25 @@ get "/substance/?" do
substances = JSON.parse list.to_json
return JSON.pretty_generate substances
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
# Get a substance by ID
-get "/substance/:id/?" do
+get "/api/substance/:id/?" do
case @accept
when "application/json"
- mongoid = /^[a-f\d]{24}$/i
- halt 400, "Input #{params[:id]} is no valid ID.".to_json unless params[:id].match(mongoid)
substance = Substance.find params[:id]
if substance
- out = {"compound": {"id": substance.id, "inchi": substance.inchi, "smiles": substance.smiles, "warnings": substance.warnings}}
- response['Content-Type'] = @accept
+ out = {"compound": {"id": substance.id,
+ "inchi": substance.inchi,
+ "smiles": substance.smiles
+ }}
return JSON.pretty_generate JSON.parse(out.to_json)
else
- halt 400, "Substance with ID #{input} not found."
+ halt 400, "Substance with ID #{params[:id]} not found."
end
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
diff --git a/lib/swagger.rb b/lib/swagger.rb
index acb2ad0..2c3ea87 100644
--- a/lib/swagger.rb
+++ b/lib/swagger.rb
@@ -1,5 +1,3 @@
-get "/" do
- response['Content-Type'] = "text/html"
- index_file = File.join(ENV['HOME'],"swagger-ui/dist/index.html")
- File.read(index_file)
+get "/swagger" do
+ redirect("/api")
end
diff --git a/lib/validation.rb b/lib/validation.rb
index fad8a44..031b9e1 100644
--- a/lib/validation.rb
+++ b/lib/validation.rb
@@ -4,7 +4,7 @@ VALIDATION_TYPES = ["repeatedcrossvalidation", "leaveoneout", "crossvalidation",
# Get a list of ayll possible validation types
# @param [Header] Accept one of text/uri-list, application/json
# @return [text/uri-list] URI list of all validation types
-get "/validation/?" do
+get "/api/validation/?" do
uri_list = VALIDATION_TYPES.collect{|validationtype| uri("/validation/#{validationtype}")}
case @accept
when "text/uri-list"
@@ -12,7 +12,7 @@ get "/validation/?" do
when "application/json"
return uri_list.to_json
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
@@ -20,8 +20,8 @@ end
# @param [Header] Accept one of text/uri-list, application/json
# @param [Path] Validationtype One of "repeatedcrossvalidation", "leaveoneout", "crossvalidation", "regressioncrossvalidation"
# @return [text/uri-list] list of all validations of a validation type
-get "/validation/:validationtype/?" do
- bad_request_error "There is no such validation type as: #{params[:validationtype]}" unless VALIDATION_TYPES.include? params[:validationtype]
+get "/api/validation/:validationtype/?" do
+ halt 400, "There is no such validation type as: #{params[:validationtype]}" unless VALIDATION_TYPES.include? params[:validationtype]
case params[:validationtype]
when "repeatedcrossvalidation"
validations = Validation::RepeatedCrossValidation.all
@@ -35,22 +35,22 @@ get "/validation/:validationtype/?" do
case @accept
when "text/uri-list"
- uri_list = validations.collect{|validation| uri("/validation/#{params[:validationtype]}/#{validation.id}")}
+ uri_list = validations.collect{|validation| uri("/api/validation/#{params[:validationtype]}/#{validation.id}")}
return uri_list.join("\n") + "\n"
when "application/json"
validations = JSON.parse validations.to_json
validations.each_index do |idx|
- validations[idx][:URI] = uri("/validation/#{params[:validationtype]}/#{validations[idx]["_id"]["$oid"]}")
+ validations[idx][:URI] = uri("/api/validation/#{params[:validationtype]}/#{validations[idx]["_id"]["$oid"]}")
end
return validations.to_json
else
- bad_request_error "Mime type #{@accept} is not supported."
+ halt 400, "Mime type #{@accept} is not supported."
end
end
# Get validation representation
-get "/validation/:validationtype/:id/?" do
- bad_request_error "There is no such validation type as: #{params[:validationtype]}" unless VALIDATION_TYPES.include? params[:validationtype]
+get "/api/validation/:validationtype/:id/?" do
+ halt 400, "There is no such validation type as: #{params[:validationtype]}" unless VALIDATION_TYPES.include? params[:validationtype]
case params[:validationtype]
when "repeatedcrossvalidation"
validation = Validation::RepeatedCrossValidation.find params[:id]
@@ -62,10 +62,6 @@ get "/validation/:validationtype/:id/?" do
validation = Validation::RegressionCrossValidation.find params[:id]
end
- not_found_error "#{params[:validationtype]} with id: #{params[:id]} not found." unless validation
- #model[:URI] = uri("/model/#{model.id}")
- #model[:neighbor_algorithm_parameters][:feature_dataset_uri] = uri("/dataset/#{model[:neighbor_algorithm_parameters][:feature_dataset_id]}") if model[:neighbor_algorithm_parameters][:feature_dataset_id]
- #model[:training_dataset_uri] = uri("/dataset/#{model.training_dataset_id}") if model.training_dataset_id
- #model[:prediction_feature_uri] = uri("/dataset/#{model.prediction_feature_id}") if model.prediction_feature_id
+ halt 404, "#{params[:validationtype]} with id: #{params[:id]} not found." unless validation
return validation.to_json
end