summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristoph Helma <helma@in-silico.de>2009-11-23 18:11:03 +0100
committerChristoph Helma <helma@in-silico.de>2009-11-23 18:11:03 +0100
commit1034d4f25cdffab093b1e9d479785fb0183867dc (patch)
treeeb63f89e35b1d3dcd8f5d7d211a2027093a453e6
parentc9c1cccb1dcc97d8e8dddcd017faddeb166ba92f (diff)
RDF support added
-rw-r--r--.gitignore1
-rw-r--r--application.rb118
-rw-r--r--config.ru1
-rw-r--r--import.rb27
-rw-r--r--redis/dataset.rb75
5 files changed, 38 insertions, 184 deletions
diff --git a/.gitignore b/.gitignore
index 297fc1c..1441bd2 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,3 +2,4 @@ api_key.rb
*.sqlite3
tmp/*
log/*
+datasets/*
diff --git a/application.rb b/application.rb
index f94ebbe..f360432 100644
--- a/application.rb
+++ b/application.rb
@@ -1,102 +1,56 @@
require 'rubygems'
require 'opentox-ruby-api-wrapper'
-require File.join File.dirname(__FILE__), 'redis', 'dataset.rb'
-set :default_content, :yaml
-
-helpers do
-
- def find
- uri = uri(params[:splat].first)
- halt 404, "Dataset \"#{uri}\" not found." unless @set = Dataset.find(uri)
- end
-
- def uri(name)
- name = URI.encode(name)
- uri = url_for("/", :full) + name
- end
-
-end
+mime :rdf, "application/rdf+xml"
+set :default_content, :rdf
## REST API
get '/?' do
- Dataset.find_all.join("\n")
-end
-
-get '/*/name/?' do
- find
- URI.decode(URI.split(@set.uri)[5].split(/\//)[1])
-end
-
-get '/*/features/?' do
- find
- @set.features.join("\n")
+ Dir["datasets/*"].collect{|dataset| url_for("/", :full) + File.basename(dataset,".rdf")}.sort.join("\n")
end
-get '/*/compounds/?' do
- find
- @set.compounds.join("\n")
+get '/:id/?' do
+ send_file File.join("datasets",params[:id] + ".rdf")
end
-get '/*/compound/*/?' do
- find
- inchi = params[:splat][1]#.gsub(/(InChI.*) (.*)/,'\1+\2')) # reinsert dropped '+' signs in InChIs
- @set.compound_features(inchi).join("\n")
-end
-
-# catch the rest
-get '/*/?' do
- find
- dataset = {}
- @set.compounds.each do |c|
- dataset[c] = @set.compound_features(c)
- end
- dataset.to_yaml
-end
-
-# create a dataset
post '/?' do
- dataset_uri = uri(params[:name])
- halt 403, "Dataset \"#{dataset_uri}\" exists." if Dataset.find(dataset_uri)
- Dataset.create(dataset_uri).uri
-end
-
-put '/*/import/?' do
- find
- halt 404, "Compound format #{params[:compound_format]} not (yet) supported" unless params[:compound_format] =~ /smiles|inchi|name/
- #task = OpenTox::Task.create(@set.uri)
- data = {}
- case params[:file][:type]
- when "text/csv"
- File.open(params[:file][:tempfile].path).each_line do |line|
- record = line.chomp.split(/,\s*/)
- compound_uri = OpenTox::Compound.new(:smiles => record[0]).uri
-# begin
- feature_uri = OpenTox::Feature.new(:name => @set.name, :classification => record[1]).uri
-# rescue
-# puts "Error: " + line
-# puts record.join("\t")
-# puts @set.name.to_s
-# #puts [record[0] , @set.name , record[1]].to_yaml
-# end
- data[compound_uri] = [] unless data[compound_uri]
- data[compound_uri] << feature_uri
+ case request.content_type
+ when"application/rdf+xml"
+ input = request.env["rack.input"].read
+ id = Dir["datasets/*"].collect{|dataset| File.basename(dataset,".rdf").to_i}.sort.last
+ if id.nil?
+ id = 1
+ else
+ id += 1
end
+ File.open(File.join("datasets",id.to_s + ".rdf"),"w+") { |f| f.write input }
+ url_for("/#{id}", :full)
else
- halt 404, "File format #{request.content_type} not (yet) supported"
+ "MIME type \"#{request.content_type}\" not supported."
end
- @set.add(data.to_yaml)
- @set.uri
end
-# import yaml
-put '/*/?' do
- find
- @set.add(params[:features])
+put '/:id/?' do
+ case request.content_type
+ when"application/rdf+xml"
+ input = request.env["rack.input"].read
+ id = params[:id]
+ File.delete(File.join("datasets",id.to_s + ".rdf"))
+ File.open(File.join("datasets",id.to_s + ".rdf"),"w+") { |f| f.write input }
+ url_for("/#{id}", :full)
+ else
+ "MIME type \"#{request.content_type}\" not supported."
+ end
end
-delete '/*/?' do
- find
- @set.delete
+delete '/:id/?' do
+ path = File.join("datasets",params[:id] + ".rdf")
+ if File.exists? path
+ File.delete path
+ "Dataset #{params[:id]} deleted."
+ else
+ status 404
+ "Dataset #{params[:id]} does not exist."
+ end
end
diff --git a/config.ru b/config.ru
index 63dd2ce..c3e280a 100644
--- a/config.ru
+++ b/config.ru
@@ -5,6 +5,7 @@ require 'rack'
require 'rack/contrib'
FileUtils.mkdir_p 'log' unless File.exists?('log')
+FileUtils.mkdir_p 'datasets' unless File.exists?('datasets')
log = File.new("log/#{ENV["RACK_ENV"]}.log", "a")
$stdout.reopen(log)
$stderr.reopen(log)
diff --git a/import.rb b/import.rb
deleted file mode 100644
index 81b9dcb..0000000
--- a/import.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-post '/*/import/?' do
- find
- halt 404, "Compound format #{params[:compound_format]} not (yet) supported" unless params[:compound_format] =~ /smiles|inchi|name/
- task = OpenTox::Task.create(@set.uri)
- Spork.spork do
- @compounds_set = Dataset.find File.join(@set.uri, "compounds")
- @features_set = Dataset.find File.join(@set.uri, "features")
- case params[:file][:type]
- when "text/csv"
- task.start
- File.open(params[:file][:tempfile].path).each_line do |line|
- record = line.chomp.split(/,\s*/)
- compound_uri = OpenTox::Compound.new(:smiles => record[0]).uri
- feature_uri = OpenTox::Feature.new(:name => @set.name, :values => {:classification => record[1]}).uri
- @compounds_set.add compound_uri #unless @compounds_set.member? compound_uri
- @features_set.add feature_uri #unless @features_set.member? feature_uri
- # key: /dataset/:dataset/compound/:inchi
- @compound_features = Dataset.find_or_create File.join(@set.uri,'compound',OpenTox::Compound.new(:uri => compound_uri).inchi)
- @compound_features.add feature_uri
- end
- task.completed
- else
- halt 404, "File format #{request.content_type} not (yet) supported"
- end
- end
- task.uri
-end
diff --git a/redis/dataset.rb b/redis/dataset.rb
deleted file mode 100644
index f7fafa0..0000000
--- a/redis/dataset.rb
+++ /dev/null
@@ -1,75 +0,0 @@
-class Dataset
-
- attr_reader :uri, :members
-
- # key: /datasets
- # set: dataset uris
- # key: :dataset_uri/compounds
- # set: compound uris
- # key: :dataset_uri/features
- # set: feature uris
- # key: :dataset_uri/compound/:inchi
- # set: feature uris
-
- def initialize(uri)
- @uri = uri
- end
-
- def name
- URI.unescape File.basename(uri)
- end
-
- def self.create(uri)
- @@redis.set_add "datasets", uri
- Dataset.new(uri)
- end
-
- def self.find(uri)
- Dataset.new(uri) if @@redis.set_member? "datasets", uri
- end
-
- def self.find_or_create(uri)
- Dataset.find(uri) or Dataset.create(uri)
- end
-
- def self.find_all
- @@redis.set_members "datasets"
- end
-
- def compounds
- @@redis.set_members(File.join(@uri,'compounds'))
- end
-
- def features
- @@redis.set_members(File.join(@uri,'features'))
- end
-
- def compound_features(compound_uri)
- @@redis.set_members(File.join(@uri,'compound',inchi(compound_uri)))
- end
-
- def add(yaml)
- YAML.load(yaml).each do |compound_uri,feature_uris|
- @@redis.set_add File.join(@uri,'compounds'), compound_uri
- feature_uris.each do |feature_uri|
- @@redis.set_add File.join(@uri,'features'), feature_uri
- @@redis.set_add File.join(@uri,'compound',inchi(compound_uri)), feature_uri
- end
- end
- end
-
- def delete
- @@redis.set_members(File.join(@uri,'compounds')).each do |compound_uri|
- @@redis.delete File.join(@uri,'compound',inchi(compound_uri))
- end
- @@redis.delete(File.join(@uri,'compounds'))
- @@redis.delete(File.join(@uri,'features'))
- @@redis.delete @uri
- @@redis.set_delete "datasets", @uri
- end
-
- def inchi(compound_uri)
- inchi = compound_uri.sub(/^.*\/InChI/,'InChI')
- end
-
-end