summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristoph Helma <helma@in-silico.ch>2012-07-12 16:35:46 +0200
committerChristoph Helma <helma@in-silico.ch>2012-07-12 16:35:46 +0200
commit66b581a79bd9fdfb923a6b56d84ed95170a77e61 (patch)
treee56bee430b618237f46ec5ca3bf0f47dc95614ea
parent00d70503797a4a9aa64a2c92643c07f261e3a5b0 (diff)
all opentox-client tests pass
-rw-r--r--application.rb608
1 files changed, 143 insertions, 465 deletions
diff --git a/application.rb b/application.rb
index e6d89b3..8b2fadf 100644
--- a/application.rb
+++ b/application.rb
@@ -1,19 +1,19 @@
-#require "./parser.rb"
module OpenTox
class Application < Service
@warnings = []
helpers do
- def parse_csv(csv)
- parse_table CSV.parse(csv)
+ def from_csv(csv)
+ from_table CSV.parse(csv)
end
+=begin
def parse_sdf(sdf)
- obconversion = OpenBabel::OBConversion.new
- obmol = OpenBabel::OBMol.new
- obconversion.set_in_and_out_formats "sdf", "inchi"
+ #obconversion = OpenBabel::OBConversion.new
+ #obmol = OpenBabel::OBMol.new
+ #obconversion.set_in_and_out_formats "sdf", "inchi"
table = []
@@ -31,7 +31,8 @@ module OpenTox
table << []
begin
# TODO: use compound service
- obconversion.read_string obmol, s
+ compound = OpenTox::Compound.from_sdf sdf
+ #obconversion.read_string obmol, s
table.last << obconversion.write_string(obmol).gsub(/\s/,'').chomp
rescue
# TODO: Fix, will lead to follow up errors
@@ -39,15 +40,15 @@ module OpenTox
end
obmol.get_data.each { |d| table.last[table.first.index(d.get_attribute)] = d.get_value }
end
- parse_table table
+ from_table table
end
+=end
- def parse_table table
+ def from_table table
@warnings = []
- dataset_uri = File.join(uri("/dataset"), SecureRandom.uuid)
- #ntriples = []
- ntriples = ["<#{dataset_uri}> <#{RDF.type}> <#{RDF::OT.Dataset}>."]
+ ntriples = ["<#{@uri}> <#{RDF.type}> <#{RDF::OT.Dataset}>."]
+ ntriples << ["<#{@uri}> <#{RDF.type}> <#{RDF::OT.OrderedDataset}>."]
# features
feature_names = table.shift.collect{|f| f.strip}
@@ -57,42 +58,27 @@ module OpenTox
features = []
ignored_feature_indices = []
feature_names.each_with_index do |f,i|
- # TODO search for existing features
feature = OpenTox::Feature.new File.join($feature[:uri], SecureRandom.uuid)
- feature[RDF.type] = RDF::OT.Feature
feature[RDF::DC.title] = f
features << feature
values = table.collect{|row| row[i+1].strip unless row[i+1].nil?}.uniq # skip compound column
if values.size <= 3 # max classes
- feature[RDF.type] = RDF::OT.NominalFeature
- feature[RDF.type] = RDF::OT.StringFeature
+ feature.append RDF.type, RDF::OT.NominalFeature
+ feature.append RDF.type, RDF::OT.StringFeature
feature[RDF::OT.acceptValue] = values
else
types = values.collect{|v| feature_type(v)}
if types.include?(RDF::OT.NominalFeature)
@warnings << "Feature '#{f}' contains nominal and numeric values."
- #ignored_feature_indices << i
- #next
else
- feature[RDF.type] = RDF::OT.NumericFeature
+ feature.append RDF.type, RDF::OT.NumericFeature
end
end
- feature.save
- case feature[RDF.type].class.to_s
- when "Array"
- feature[RDF.type].each{ |t| ntriples << "<#{feature.uri}> <#{RDF.type}> <#{t}>." }
- when "String"
- ntriples << "<#{feature.uri}> <#{RDF.type}> <#{feature[RDF.type]}>."
- end
+ feature.put
+ ntriples << "<#{feature.uri}> <#{RDF.type}> <#{RDF::OT.Feature}>."
+ ntriples << "<#{feature.uri}> <#{RDF::OLO.index}> #{i} ."
end
- # remove invalid features from table
-# puts ignored_feature_indices.inspect
-# ignored_feature_indices.each do |i|
-# features.delete_at(i)
-# table.each{|row| row.delete_at(i)}
-# end
-
# compounds and values
compound_uris = []
data_entry_idx = 0
@@ -118,29 +104,84 @@ module OpenTox
next
end
ntriples << "<#{compound_uri}> <#{RDF.type}> <#{RDF::OT.Compound}>."
+ ntriples << "<#{compound_uri}> <#{RDF::OLO.index}> #{j} ."
values.each_with_index do |v,i|
@warnings << "Empty value for compound '#{compound}' (row #{j+2}) and feature '#{feature_names[i]}' (column #{i+2})." if v.blank?
- # TODO multiple values, use data_entry/value uris for sorted datasets
- # data_entry_uri = File.join dataset_uri, "dataentry", data_entry_idx
- ntriples << "<#{dataset_uri}> <#{RDF::OT.dataEntry}> _:dataentry#{data_entry_idx} ."
- ntriples << "_:dataentry#{data_entry_idx} <#{RDF.type}> <#{RDF::OT.DataEntry}> ."
- ntriples << "_:dataentry#{data_entry_idx} <#{RDF::OT.compound}> <#{compound_uri}> ."
- ntriples << "_:dataentry#{data_entry_idx} <#{RDF::OT.values}> _:values#{data_entry_idx} ."
- ntriples << "_:values#{data_entry_idx} <#{RDF::OT.feature}> <#{features[i].uri}> ."
- ntriples << "_:values#{data_entry_idx} <#{RDF::OT.value}> \"#{v}\" ."
-
- data_entry_idx += 1
+ data_entry_node = "_:dataentry"+ j.to_s
+ value_node = data_entry_node+ "_value"+ i.to_s
+ ntriples << "<#{@uri}> <#{RDF::OT.dataEntry}> #{data_entry_node} ."
+ ntriples << "#{data_entry_node} <#{RDF.type}> <#{RDF::OT.DataEntry}> ."
+ ntriples << "#{data_entry_node} <#{RDF::OLO.index}> #{j} ."
+ ntriples << "#{data_entry_node} <#{RDF::OT.compound}> <#{compound_uri}> ."
+ ntriples << "#{data_entry_node} <#{RDF::OT.values}> #{value_node} ."
+ ntriples << "#{value_node} <#{RDF::OT.feature}> <#{features[i].uri}> ."
+ ntriples << "#{value_node} <#{RDF::OT.value}> \"#{v}\" ."
end
end
- ntriples << "<#{dataset_uri}> <#{RDF::OT.Warnings}> \"#{@warnings.join('\n')}\" ."
+ ntriples << "<#{@uri}> <#{RDF::OT.Warnings}> \"#{@warnings.join('\n')}\" ."
ntriples.join("\n")
end
+ def ordered?
+ sparql = "SELECT DISTINCT ?s FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.OrderedDataset}>}"
+ FourStore.query(sparql, "text/uri-list").split("\n").empty? ? false : true
+ end
+
+ def to_csv
+ accept = "text/uri-list"
+ csv_string = CSV.generate do |csv|
+ if ordered?
+ sparql = "SELECT DISTINCT ?s FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Feature}> . ?s <#{RDF::OLO.index}> ?i} ORDER BY ?i"
+ features = FourStore.query(sparql, accept).split("\n").collect{|uri| OpenTox::Feature.new uri}
+ csv << ["SMILES"] + features.collect{ |f| f.get; f[RDF::DC.title] }
+ sparql = "SELECT DISTINCT ?i FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.DataEntry}> . ?s <#{RDF::OLO.index}> ?i} ORDER BY ?i"
+ FourStore.query(sparql, accept).split("\n").each do |data_entry_idx|
+ sparql = "SELECT DISTINCT ?compound FROM <#{@uri}> WHERE {
+ ?data_entry <#{RDF::OLO.index}> #{data_entry_idx} ;
+ <#{RDF::OT.compound}> ?compound. }"
+ compound = OpenTox::Compound.new FourStore.query(sparql, accept).strip
+ sparql = "SELECT ?value FROM <#{@uri}> WHERE {
+ ?data_entry <#{RDF::OLO.index}> #{data_entry_idx} ;
+ <#{RDF::OT.values}> ?v .
+ ?v <#{RDF::OT.feature}> ?f;
+ <#{RDF::OT.value}> ?value .
+ ?f <#{RDF::OLO.index}> ?i.
+
+ } ORDER BY ?i"
+ csv << [compound.to_smiles] + FourStore.query(sparql,accept).split("\n")
+ end
+ else
+ sparql = "SELECT DISTINCT ?s FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Feature}>}"
+ features = FourStore.query(sparql, accept).split("\n").collect{|uri| OpenTox::Feature.new uri}
+ csv << ["SMILES"] + features.collect{ |f| f.get; f[RDF::DC.title] }
+ sparql = "SELECT ?s FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Compound}>. }"
+ compounds = FourStore.query(sparql, accept).split("\n").collect{|uri| OpenTox::Compound.new uri}
+ compounds.each do |compound|
+ data_entries = []
+ features.each do |feature|
+ sparql = "SELECT ?value FROM <#{@uri}> WHERE {
+ ?data_entry <#{RDF::OT.compound}> <#{compound.uri}>;
+ <#{RDF::OT.values}> ?v .
+ ?v <#{RDF::OT.feature}> <#{feature.uri}>;
+ <#{RDF::OT.value}> ?value.
+ } ORDER BY ?data_entry"
+ FourStore.query(sparql, accept).split("\n").each_with_index do |value,i|
+ data_entries[i] = [] unless data_entries[i]
+ data_entries[i] << value
+ end
+ end
+ data_entries.each{|data_entry| csv << [compound.to_smiles] + data_entry}
+ end
+ end
+ end
+ csv_string
+ end
+
def feature_type(value)
if value.blank?
nil
@@ -151,91 +192,115 @@ module OpenTox
end
end
- end
-
- # Create a new resource
- post "/dataset/?" do
- #begin
+ def parse_put
case @content_type
when "text/plain", "text/turtle", "application/rdf+xml" # no conversion needed
when "text/csv"
- @body = parse_csv @body
+ @body = from_csv @body
@content_type = "text/plain"
when "application/vnd.ms-excel"
xls = params[:file][:tempfile].path + ".xls"
File.rename params[:file][:tempfile].path, xls # roo needs these endings
- @body = parse_csv Excel.new(xls).to_csv
+ @body = from_csv Excel.new(xls).to_csv
@content_type = "text/plain"
when "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
xlsx = params[:file][:tempfile].path + ".xlsx"
File.rename params[:file][:tempfile].path, xlsx # roo needs these endings
- @body = parse_csv Excelx.new(xlsx).to_csv
+ @body = from_csv Excelx.new(xlsx).to_csv
@content_type = "text/plain"
when "application/vnd.oasis.opendocument.spreadsheet"
ods = params[:file][:tempfile].path + ".ods"
File.rename params[:file][:tempfile].path, ods # roo needs these endings
- @body = parse_csv Excelx.new(ods).to_csv
- @content_type = "text/plain"
- when "chemical/x-mdl-sdfile"
- @body = parse_sdf @body
+ @body = from_csv Excelx.new(ods).to_csv
@content_type = "text/plain"
+ # when "chemical/x-mdl-sdfile"
+ # @body = parse_sdf @body
+ # @content_type = "text/plain"
else
bad_request_error "#{@content_type} is not a supported content type."
end
- uri = uri("/#{SERVICE}/#{SecureRandom.uuid}")
- FourStore.put(uri, @body, @content_type)
+ FourStore.put @uri, @body, @content_type
if params[:file]
- nt = "<#{uri}> <#{RDF::DC.title}> \"#{params[:file][:filename]}\".\n<#{uri}> <#{RDF::OT.hasSource}> \"#{params[:file][:filename]}\"."
+ nt = "<#{@uri}> <#{RDF::DC.title}> \"#{params[:file][:filename]}\".\n<#{uri}> <#{RDF::OT.hasSource}> \"#{params[:file][:filename]}\"."
FourStore.post(uri, nt, "text/plain")
end
- #rescue
- #bad_request_error $!.message
- #end
+ end
- #dataset.add_metadata({
- #DC.title => File.basename(params[:file][:filename],".csv"),
- #OT.hasSource => File.basename(params[:file][:filename])
- #})
+ end
+
+ before "/#{SERVICE}/:id/:property" do
+ @uri = uri("/#{SERVICE}/#{params[:id]}")
+ end
+
+ # Create a new resource
+ post "/dataset/?" do
+ # TOD: task
+ @uri = uri("/#{SERVICE}/#{SecureRandom.uuid}")
+ parse_put
response['Content-Type'] = "text/uri-list"
- uri
+ @uri
+ end
+
+ get "/dataset/:id/?" do
+ case @accept
+ when "application/rdf+xml", "text/turtle", "text/plain", /html/
+ FourStore.get(@uri, @accept)
+ else
+ case @accept
+ when "text/csv"
+ to_csv
+ #when "application/vnd.ms-excel"
+ #when "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
+ #when "application/vnd.oasis.opendocument.spreadsheet"
+ #when "chemical/x-mdl-sdfile"
+ else
+ bad_request_error "'#{@accept}' is not a supported content type."
+ end
+ end
end
# Create or updata a resource
put "/dataset/:id/?" do
- FourStore.put uri("/#{SERVICE}/#{params[:id]}"), @body, @content_type
+ # TOD: task
+ parse_put
end
+
# Get metadata of the dataset
# @return [application/rdf+xml] Metadata OWL-DL
get '/dataset/:id/metadata' do
+ case @accept
+ when "application/rdf+xml", "text/turtle", "text/plain"
+ sparql = "CONSTRUCT {?s ?p ?o.} FROM <#{@uri}> WHERE {<#{@uri}> ?p ?o. }"
+ FourStore.query sparql, @accept
+ else
+ bad_request_error "'#{@accept}' is not a supported content type."
+ end
end
# Get a list of all features
# @param [Header] Accept one of `application/rdf+xml, text/turtle, text/plain, text/uri-list` (default application/rdf+xml)
# @return [application/rdf+xml, text/turtle, text/plain, text/uri-list] Feature list
get '/dataset/:id/features' do
- accept = request.env['HTTP_ACCEPT']
- uri = uri "/dataset/#{params[:id]}"
- case accept
+ case @accept
when "application/rdf+xml", "text/turtle", "text/plain"
- sparql = "CONSTRUCT {?s ?p ?o.} FROM <#{uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Feature}>; ?p ?o. }"
+ sparql = "CONSTRUCT {?s ?p ?o.} FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Feature}>; ?p ?o. }"
when "text/uri-list"
- sparql = "SELECT DISTINCT ?s FROM <#{uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Feature}>. }"
+ sparql = "SELECT DISTINCT ?s FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Feature}>. }"
else
- bad_request_error "'#{accept}' is not a supported content type."
+ bad_request_error "'#{@accept}' is not a supported content type."
end
- FourStore.query sparql, accept
+ FourStore.query sparql, @accept
end
# Get a list of all compounds
# @return [text/uri-list] Feature list
get '/dataset/:id/compounds' do
accept = request.env['HTTP_ACCEPT']
- uri = uri "/dataset/#{params[:id]}"
case accept
when "application/rdf+xml", "text/turtle", "text/plain"
- sparql = "CONSTRUCT {?s ?p ?o.} FROM <#{uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Compound}>; ?p ?o. }"
+ sparql = "CONSTRUCT {?s ?p ?o.} FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Compound}>; ?p ?o. }"
when "text/uri-list"
- sparql = "SELECT DISTINCT ?s FROM <#{uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Compound}>. }"
+ sparql = "SELECT DISTINCT ?s FROM <#{@uri}> WHERE {?s <#{RDF.type}> <#{RDF::OT.Compound}>. }"
else
bad_request_error "'#{accept}' is not a supported content type."
end
@@ -244,390 +309,3 @@ module OpenTox
end
end
-=begin
-require 'rubygems'
-gem "opentox-ruby", "~> 3"
-require 'opentox-ruby'
-require 'profiler'
-require 'rjb'
-
-set :lock, true
-
-@@datadir = "data"
-
-@@idfile_path = @@datadir+"/id"
-unless File.exist?(@@idfile_path)
- id = Dir["./#{@@datadir}/*json"].collect{|f| File.basename(f.sub(/.json/,'')).to_i}.sort.last
- id = 0 if id.nil?
- open(@@idfile_path,"w") do |f|
- f.puts(id)
- end
-end
-
-helpers do
- def next_id
- open(@@idfile_path, "r+") do |f|
- f.flock(File::LOCK_EX)
- @id = f.gets.to_i + 1
- f.rewind
- f.print @id
- end
- return @id
- end
-
- def uri(id)
- url_for "/#{id}", :full
- end
-
- # subjectid ist stored as memeber variable, not in params
- def load_dataset(id, params,content_type,input_data)
-
- @uri = uri id
- raise "store subject-id in dataset-object, not in params" if params.has_key?(:subjectid) and @subjectid==nil
-
- content_type = "application/rdf+xml" if content_type.nil?
- dataset = OpenTox::Dataset.new(@uri, @subjectid)
-
- case content_type
-
- when /yaml/
- dataset.load_yaml(input_data)
-
- when /json/
- dataset.load_json(input_data)
-
- when "text/csv"
- dataset.load_csv(input_data, @subjectid)
-
- when /application\/rdf\+xml/
- dataset.load_rdfxml(input_data, @subjectid)
-
- when "chemical/x-mdl-sdfile"
- dataset.load_sdf(input_data, @subjectid)
-
- when /multipart\/form-data/ , "application/x-www-form-urlencoded" # file uploads
-
- case params[:file][:type]
-
- when "chemical/x-mdl-sdfile"
- dataset.load_sdf(input_data, @subjectid)
-
- when /json/
- dataset.load_json(params[:file][:tempfile].read)
-
- when /yaml/
- dataset.load_yaml(params[:file][:tempfile].read)
-
- when "application/rdf+xml"
- dataset.load_rdfxml_file(params[:file][:tempfile], @subjectid)
-
- else
- raise "MIME type \"#{params[:file][:type]}\" not supported."
- end
-
- else
- raise "MIME type \"#{content_type}\" not supported."
- end
-
- dataset.uri = @uri # update uri (also in metdata)
- dataset.features.keys.each { |f| dataset.features[f][OT.hasSource] = dataset.metadata[OT.hasSource] unless dataset.features[f][OT.hasSource]}
- File.open("#{@@datadir}/#{@id}.json","w+"){|f| f.puts dataset.to_json}
- end
-end
-
-before do
-
- @accept = request.env['HTTP_ACCEPT']
- @accept = 'application/rdf+xml' if @accept == '*/*' or @accept == '' or @accept.nil?
- @id = request.path_info.match(/^\/\d+/)
- unless @id.nil?
- @id = @id.to_s.sub(/\//,'').to_i
-
- @uri = uri @id
- @json_file = "#{@@datadir}/#{@id}.json"
- raise OpenTox::NotFoundError.new "Dataset #{@id} not found." unless File.exists? @json_file
-
- extension = File.extname(request.path_info)
- unless extension.empty?
- case extension
- when ".html"
- @accept = 'text/html'
- when ".json"
- @accept = 'application/json'
- when ".yaml"
- @accept = 'application/x-yaml'
- when ".csv"
- @accept = 'text/csv'
- when ".rdfxml"
- @accept = 'application/rdf+xml'
- when ".xls"
- @accept = 'application/ms-excel'
- when ".sdf"
- @accept = 'chemical/x-mdl-sdfile'
- else
- raise OpenTox::NotFoundError.new "File format #{extension} not supported."
- end
- end
- end
-
- # make sure subjectid is not included in params, subjectid is set as member variable
- params.delete(:subjectid)
-end
-
-## REST API
-
-# Get a list of available datasets
-# @return [text/uri-list] List of available datasets
-get '/?' do
- uri_list = Dir["./#{@@datadir}/*json"].collect{|f| File.basename(f.sub(/.json/,'')).to_i}.sort.collect{|n| uri n}.join("\n") + "\n"
- case @accept
- when /html/
- response['Content-Type'] = 'text/html'
- OpenTox.text_to_html uri_list
- else
- response['Content-Type'] = 'text/uri-list'
- uri_list
- end
-end
-
-# Get a dataset representation
-# @param [Header] Accept one of `application/rdf+xml, application-x-yaml, text/csv, application/ms-excel` (default application/rdf+xml)
-# @return [application/rdf+xml, application-x-yaml, text/csv, application/ms-excel] Dataset representation
-get '/:id' do
- case @accept
-
- when /rdf/ # redland sends text/rdf instead of application/rdf+xml
- file = "#{@@datadir}/#{params[:id]}.rdfxml"
- unless File.exists? file # lazy rdfxml generation
- dataset = OpenTox::Dataset.from_json File.read(@json_file)
- File.open(file,"w+") { |f| f.puts dataset.to_rdfxml }
- end
- send_file file, :type => 'application/rdf+xml'
-
- when /json/
- send_file @json_file, :type => 'application/json'
-
- when /yaml/
- file = "#{@@datadir}/#{params[:id]}.yaml"
- unless File.exists? file # lazy yaml generation
- dataset = OpenTox::Dataset.from_json File.read(@json_file)
- File.open(file,"w+") { |f| f.puts dataset.to_yaml }
- end
- send_file file, :type => 'application/x-yaml'
-
- when /html/
- response['Content-Type'] = 'text/html'
- OpenTox.text_to_html JSON.pretty_generate(JSON.parse(File.read(@json_file)))
-
- when "text/csv"
- response['Content-Type'] = 'text/csv'
- OpenTox::Dataset.from_json(File.read(@json_file)).to_csv
-
- when /ms-excel/
- file = "#{@@datadir}/#{params[:id]}.xls"
- OpenTox::Dataset.from_json(File.read(@json_file)).to_xls.write(file) unless File.exists? file # lazy xls generation
- send_file file, :type => 'application/ms-excel'
-
- when /sdfile/
- response['Content-Type'] = 'chemical/x-mdl-sdfile'
- OpenTox::Dataset.from_json(File.read(@json_file)).to_sdf
-
-# when /uri-list/
-# response['Content-Type'] = 'text/uri-list'
-# Yajl::Parser.parse(File.read(@json_file)).to_urilist
-
- else
- raise OpenTox::NotFoundError.new "Content-type #{@accept} not supported."
- end
-end
-
-# Get metadata of the dataset
-# @return [application/rdf+xml] Metadata OWL-DL
-get '/:id/metadata' do
- metadata = OpenTox::Dataset.from_json(File.read(@json_file)).metadata
-
- case @accept
- when /rdf/ # redland sends text/rdf instead of application/rdf+xml
- response['Content-Type'] = 'application/rdf+xml'
- serializer = OpenTox::Serializer::Owl.new
- serializer.add_metadata url_for("/#{params[:id]}",:full), metadata
- serializer.to_rdfxml
- when /yaml/
- response['Content-Type'] = 'application/x-yaml'
- metadata.to_yaml
- end
-
-end
-
-# Get a dataset feature
-# @param [Header] Accept one of `application/rdf+xml or application-x-yaml` (default application/rdf+xml)
-# @return [application/rdf+xml,application/x-yaml] Feature metadata
-get %r{/(\d+)/feature/(.*)$} do |id,feature|
-
- @id = id
- @uri = uri @id
- @json_file = "#{@@datadir}/#{@id}.json"
- feature_uri = url_for("/#{@id}/feature/#{URI.encode(feature)}",:full) # work around racks internal uri decoding
- metadata = OpenTox::Dataset.from_json(File.read(@json_file)).features[feature_uri]
-
- case @accept
- when /rdf/ # redland sends text/rdf instead of application/rdf+xml
- response['Content-Type'] = 'application/rdf+xml'
- serializer = OpenTox::Serializer::Owl.new
- serializer.add_feature feature_uri, metadata
- serializer.to_rdfxml
- when /yaml/
- response['Content-Type'] = 'application/x-yaml'
- metadata.to_yaml
- when /json/
- response['Content-Type'] = 'application/json'
- Yajl::Encoder.encode(metadata)
- end
-
-end
-
-# Get a list of all features
-# @param [Header] Accept one of `application/rdf+xml, application-x-yaml, text/uri-list` (default application/rdf+xml)
-# @return [application/rdf+xml, application-x-yaml, text/uri-list] Feature list
-get '/:id/features' do
-
- features = OpenTox::Dataset.from_json(File.read(@json_file)).features
-
- case @accept
- when /rdf/ # redland sends text/rdf instead of application/rdf+xml
- response['Content-Type'] = 'application/rdf+xml'
- serializer = OpenTox::Serializer::Owl.new
- features.each { |feature,metadata| serializer.add_feature feature, metadata }
- serializer.to_rdfxml
- when /yaml/
- response['Content-Type'] = 'application/x-yaml'
- features.to_yaml
- when /json/
- response['Content-Type'] = 'application/json'
- Yajl::Encoder.encode(features)
- when "text/uri-list"
- response['Content-Type'] = 'text/uri-list'
- features.keys.join("\n") + "\n"
- end
-end
-
-# Get a list of all compounds
-# @return [text/uri-list] Feature list
-get '/:id/compounds' do
- response['Content-Type'] = 'text/uri-list'
- OpenTox::Dataset.from_json(File.read(@json_file)).compounds.join("\n") + "\n"
-end
-
-# Create a new dataset.
-#
-# Posting without parameters creates and saves an empty dataset (with assigned URI).
-# Posting with parameters creates and saves a new dataset.
-# Data can be submitted either
-# - in the message body with the appropriate Content-type header or
-# - as file uploads with Content-type:multipart/form-data and a specified file type
-# @example
-# curl -X POST -F "file=@training.csv;type=text/csv" http://webservices.in-silico.ch/dataset
-# @param [Header] Content-type one of `application/x-yaml, application/rdf+xml, multipart/form-data/`
-# @param [BODY] - string with data in selected Content-type
-# @param [optional] file, for file uploads, Content-type should be multipart/form-data, please specify the file type `application/rdf+xml, application-x-yaml, text/csv, application/ms-excel`
-# @return [text/uri-list] Task URI or Dataset URI (empty datasets)
-post '/?' do
-
- response['Content-Type'] = 'text/uri-list'
-
- # it could be that the read function works only once!, store in varible
- input_data = request.env["rack.input"].read
- @id = next_id
- @uri = uri @id
- @json_file = "#{@@datadir}/#{@id}.json"
- if params.size == 0 and input_data.size==0
- File.open(@json_file,"w+"){|f| f.puts OpenTox::Dataset.new(@uri).to_json}
- OpenTox::Authorization.check_policy(@uri, @subjectid) if File.exists? @json_file
- @uri
- else
- task = OpenTox::Task.create("Converting and saving dataset ", @uri) do
- load_dataset @id, params, request.content_type, input_data
- OpenTox::Authorization.check_policy(@uri, @subjectid) if File.exists? @json_file
- @uri
- end
- raise OpenTox::ServiceUnavailableError.newtask.uri+"\n" if task.status == "Cancelled"
- halt 202,task.uri+"\n"
- end
-end
-
-# Save a dataset, will overwrite all existing data
-#
-# Data can be submitted either
-# - in the message body with the appropriate Content-type header or
-# - as file uploads with Content-type:multipart/form-data and a specified file type
-# @example
-# curl -X POST -F "file=@training.csv;type=text/csv" http://webservices.in-silico.ch/dataset/1
-# @param [Header] Content-type one of `application/x-yaml, application/rdf+xml, multipart/form-data/`
-# @param [BODY] - string with data in selected Content-type
-# @param [optional] file, for file uploads, Content-type should be multipart/form-data, please specify the file type `application/rdf+xml, application-x-yaml, text/csv, application/ms-excel`
-# @return [text/uri-list] Task ID
-post '/:id' do
- response['Content-Type'] = 'text/uri-list'
- task = OpenTox::Task.create("Converting and saving dataset ", @uri) do
- FileUtils.rm Dir["#{@@datadir}/#{@id}.*"]
- load_dataset @id, params, request.content_type, request.env["rack.input"].read
- @uri
- end
- raise OpenTox::ServiceUnavailableError.newtask.uri+"\n" if task.status == "Cancelled"
- halt 202,task.uri.to_s+"\n"
-end
-
-
-# Deletes datasets that have been created by a crossvalidatoin that does not exist anymore
-# (This can happen if a crossvalidation fails unexpectedly)
-delete '/cleanup' do
- Dir["./#{@@datadir}/*json"].each do |file|
- dataset = OpenTox::Dataset.from_json File.read(file)
- if dataset.metadata[DC.creator] && dataset.metadata[DC.creator] =~ /crossvalidation\/[0-9]/
- begin
- cv = OpenTox::Crossvalidation.find(dataset.metadata[DC.creator],@subjectid)
- raise unless cv
- rescue
- LOGGER.debug "deleting #{dataset.uri}, crossvalidation missing: #{dataset.metadata[DC.creator]}"
- begin
- dataset.delete @subjectid
- rescue
- end
- end
- end
- end
- "cleanup done"
-end
-
-# Delete a dataset
-# @return [text/plain] Status message
-delete '/:id' do
- LOGGER.debug "deleting dataset with id #{@id}"
- begin
- FileUtils.rm Dir["#{@@datadir}/#{@id}.*"]
- if @subjectid and !File.exists? @json_file and @uri
- begin
- res = OpenTox::Authorization.delete_policies_from_uri(@uri, @subjectid)
- LOGGER.debug "Policy deleted for Dataset URI: #{@uri} with result: #{res}"
- rescue
- LOGGER.warn "Policy delete error for Dataset URI: #{@uri}"
- end
- end
- response['Content-Type'] = 'text/plain'
- "Dataset #{@id} deleted."
- rescue
- raise OpenTox::NotFoundError.new "Dataset #{@id} does not exist."
- end
-end
-
-# Delete all datasets
-# @return [text/plain] Status message
-delete '/?' do
- FileUtils.rm Dir["#{@@datadir}/*.rdfxml"]
- FileUtils.rm Dir["#{@@datadir}/*.xls"]
- FileUtils.rm Dir["#{@@datadir}/*.yaml"]
- FileUtils.rm Dir["#{@@datadir}/*.json"]
- response['Content-Type'] = 'text/plain'
- "All datasets deleted."
-end
-=end