summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorroot <root@ot-dev.in-silico.ch>2011-02-24 10:45:33 +0000
committerroot <root@ot-dev.in-silico.ch>2011-02-24 10:45:33 +0000
commitb4513bfbf3a2d3e0c34de0765d4ea604e2f1500c (patch)
treece65809e58e2df8ddb91b3a9967672c2cd54d53c
parente9918ecda2a658855d483ca953b1348d1803d22f (diff)
file storage implemented for models
-rw-r--r--.gitignore2
-rw-r--r--application.rb126
-rw-r--r--lazar.rb118
3 files changed, 143 insertions, 103 deletions
diff --git a/.gitignore b/.gitignore
index b5729fb..4638e4f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,4 +2,4 @@ tmp/*
*.sqlite3
log/*
db/*
-models/*
+public/*
diff --git a/application.rb b/application.rb
index ae4d064..eb6d53a 100644
--- a/application.rb
+++ b/application.rb
@@ -2,14 +2,16 @@ require 'rubygems'
gem "opentox-ruby", "~> 0"
require 'opentox-ruby'
+set :lock, true
+=begin
class ModelStore
- include DataMapper::Resource
- attr_accessor :prediction_dataset, :subjectid
- property :id, Serial
- property :uri, String, :length => 255
- property :yaml, Text, :length => 2**32-1
- property :created_at, DateTime
-
+ include DataMapper::Resource
+ attr_accessor :prediction_dataset, :subjectid
+ property :id, Serial
+ property :uri, String, :length => 255
+ property :yaml, Text, :length => 2**32-1
+ property :created_at, DateTime
+
@subjectid = nil
after :save, :check_policy
@@ -18,45 +20,100 @@ class ModelStore
def check_policy
OpenTox::Authorization.check_policy(uri, subjectid)
end
-
+
end
+=end
class PredictionCache
# cache predictions
- include DataMapper::Resource
- property :id, Serial
- property :compound_uri, String, :length => 255
- property :model_uri, String, :length => 255
- property :dataset_uri, String, :length => 255
+ include DataMapper::Resource
+ property :id, Serial
+ property :compound_uri, String, :length => 255
+ property :model_uri, String, :length => 255
+ property :dataset_uri, String, :length => 255
end
DataMapper.auto_upgrade!
-require 'lazar.rb'
-#require 'property_lazar.rb'
+before do
+ @accept = request.env['HTTP_ACCEPT']
+ @accept = 'application/rdf+xml' if @accept == '*/*' or @accept == '' or @accept.nil?
+ @id = request.path_info.match(/^\/\d+/)
+ unless @id.nil?
+ @id = @id.to_s.sub(/\//,'').to_i
+
+ @uri = uri @id
+ @yaml_file = "public/#{@id}.yaml"
+ halt 404, "Dataset #{@id} not found." unless File.exists? @yaml_file
+ end
+
+ # make sure subjectid is not included in params, subjectid is set as member variable
+ params.delete(:subjectid)
+end
+require 'lazar.rb'
helpers do
- def activity(a)
- case a.to_s
- when "true"
- act = "active"
- when "false"
- act = "inactive"
- else
- act = "not available"
- end
- act
- end
+
+ def next_id
+ id = Dir["./public/*yaml"].collect{|f| File.basename(f.sub(/.yaml/,'')).to_i}.sort.last
+ id = 0 if id.nil?
+ id + 1
+ end
+
+ def uri(id)
+ url_for "/#{id}", :full
+ end
+
+ def uri_available?(urlStr)
+ url = URI.parse(urlStr)
+ unless @subjectid
+ Net::HTTP.start(url.host, url.port) do |http|
+ return http.head(url.request_uri).code == "200"
+ end
+ else
+ Net::HTTP.start(url.host, url.port) do |http|
+ return http.post(url.request_uri, "subjectid=#{@subjectid}").code == "202"
+ end
+ end
+ end
+
+ def activity(a)
+ case a.to_s
+ when "true"
+ act = "active"
+ when "false"
+ act = "inactive"
+ else
+ act = "not available"
+ end
+ act
+ end
end
get '/?' do # get index of models
- response['Content-Type'] = 'text/uri-list'
- params.delete_if{|k,v| k=="subjectid"}
- ModelStore.all(params).collect{|m| m.uri}.join("\n") + "\n"
+ response['Content-Type'] = 'text/uri-list'
+ Dir["./public/*yaml"].collect{|f| File.basename(f.sub(/.yaml/,'')).to_i}.sort.collect{|n| uri n}.join("\n") + "\n"
end
delete '/:id/?' do
+ LOGGER.debug "Deleting model with id "+@id.to_s
+ begin
+ FileUtils.rm @yaml_file
+ if @subjectid and !File.exists? @yaml_file and @uri
+ begin
+ res = OpenTox::Authorization.delete_policies_from_uri(@uri, @subjectid)
+ LOGGER.debug "Policy deleted for Dataset URI: #{@uri} with result: #{res}"
+ rescue
+ LOGGER.warn "Policy delete error for Dataset URI: #{@uri}"
+ end
+ end
+ response['Content-Type'] = 'text/plain'
+ "Model #{@id} deleted."
+ rescue
+ halt 404, "Model #{@id} does not exist."
+ end
+=begin
begin
uri = ModelStore.get(params[:id]).uri
ModelStore.get(params[:id]).destroy!
@@ -75,13 +132,14 @@ delete '/:id/?' do
rescue
halt 404, "Model #{params[:id]} does not exist."
end
+=end
end
delete '/?' do
- # TODO delete datasets
- ModelStore.auto_migrate!
- #Prediction.auto_migrate!
- response['Content-Type'] = 'text/plain'
- "All models and cached predictions deleted."
+ # TODO delete datasets
+ FileUtils.rm Dir["public/*.yaml"]
+ PredictionCache.auto_migrate!
+ response['Content-Type'] = 'text/plain'
+ "All models and cached predictions deleted."
end
diff --git a/lazar.rb b/lazar.rb
index 30c0be4..2f3b126 100644
--- a/lazar.rb
+++ b/lazar.rb
@@ -1,72 +1,52 @@
require "haml"
-helpers do
- def uri_available?(urlStr)
- url = URI.parse(urlStr)
- unless @subjectid
- Net::HTTP.start(url.host, url.port) do |http|
- return http.head(url.request_uri).code == "200"
- end
- else
- Net::HTTP.start(url.host, url.port) do |http|
- return http.post(url.request_uri, "subjectid=#{@subjectid}").code == "202"
- end
- end
- end
-end
-
# Get model representation
# @return [application/rdf+xml,application/x-yaml] Model representation
get '/:id/?' do
- accept = request.env['HTTP_ACCEPT']
- accept = "application/rdf+xml" if accept == '*/*' or accept == '' or accept.nil?
- # workaround for browser links
- case params[:id]
- when /.yaml$/
- params[:id].sub!(/.yaml$/,'')
- accept = 'application/x-yaml'
- when /.rdf$/
- params[:id].sub!(/.rdf$/,'')
- accept = 'application/rdf+xml'
- end
- halt 404, "Model #{params[:id]} not found." unless model = ModelStore.get(params[:id])
- lazar = YAML.load model.yaml
- case accept
- when /application\/rdf\+xml/
+=begin
+ accept = request.env['HTTP_ACCEPT']
+ accept = "application/rdf+xml" if accept == '*/*' or accept == '' or accept.nil?
+ # workaround for browser links
+ case params[:id]
+ when /.yaml$/
+ params[:id].sub!(/.yaml$/,'')
+ accept = 'application/x-yaml'
+ when /.rdf$/
+ params[:id].sub!(/.rdf$/,'')
+ accept = 'application/rdf+xml'
+ end
+=end
+ halt 404, "Model #{params[:id]} not found." unless File.exists? @yaml_file
+ response['Content-Type'] = @accept
+ case @accept
+ when /application\/rdf\+xml/
s = OpenTox::Serializer::Owl.new
- s.add_model(model.uri,lazar.metadata)
+ s.add_model(@uri,YAML.load_file(@yaml_file).metadata)
response['Content-Type'] = 'application/rdf+xml'
s.to_rdfxml
- when /yaml/
- response['Content-Type'] = 'application/x-yaml'
- model.yaml
- else
- halt 400, "Unsupported MIME type '#{accept}'"
- end
+ when /yaml/
+ response['Content-Type'] = 'application/x-yaml'
+ File.read @yaml_file
+ when /html/
+ response['Content-Type'] = 'text/html'
+ OpenTox.text_to_html File.read(@yaml_file)
+ else
+ halt 400, "Unsupported MIME type '#{@accept}'"
+ end
end
get '/:id/metadata.?:ext?' do
- metadata = YAML.load(ModelStore.get(params[:id]).yaml).metadata
+ metadata = YAML.load_file(@yaml_file).metadata
- accept = request.env['HTTP_ACCEPT']
- accept = "application/rdf+xml" if accept == '*/*' or accept == '' or accept.nil?
- if params[:ext]
- case params[:ext]
- when "yaml"
- accept = 'application/x-yaml'
- when "rdf", "rdfxml"
- accept = 'application/rdf+xml'
- end
- end
- response['Content-Type'] = accept
- case accept
+ response['Content-Type'] = @accept
+ case @accept
when /yaml/
metadata.to_yaml
else #when /rdf/ and anything else
serializer = OpenTox::Serializer::Owl.new
- serializer.add_metadata url_for("/#{params[:id]}",:full), metadata
+ serializer.add_metadata @uri, metadata
serializer.to_rdfxml
end
@@ -77,14 +57,17 @@ end
# @return [String] Model URI
post '/?' do # create model
halt 400, "MIME type \"#{request.content_type}\" not supported." unless request.content_type.match(/yaml/)
- model = ModelStore.create
- model.subjectid = @subjectid
- model.uri = url_for("/#{model.id}", :full)
- lazar = YAML.load request.env["rack.input"].read
- lazar.uri = model.uri
- model.yaml = lazar.to_yaml
- model.save
- model.uri
+ #model = ModelStore.create
+ #model.subjectid = @subjectid
+ #model.uri = url_for("/#{model.id}", :full)
+ @id = next_id
+ @uri = uri @id
+ @yaml_file = "public/#{@id}.yaml"
+ lazar = YAML.load request.env["rack.input"].read
+ lazar.uri = @uri
+ File.open(@yaml_file,"w+"){|f| f.puts lazar.to_yaml}
+ response['Content-Type'] = 'text/uri-list'
+ @uri
end
# Make a lazar prediction. Predicts either a single compound or all compounds from a dataset
@@ -94,15 +77,14 @@ end
# @return [text/uri-list] URI of prediction task (dataset prediction) or prediction dataset (compound prediction)
post '/:id/?' do
- puts params.to_yaml
- @lazar = YAML.load ModelStore.get(params[:id]).yaml
+ halt 404, "Model #{params[:id]} does not exist." unless File.exists? @yaml_file
- halt 404, "Model #{params[:id]} does not exist." unless @lazar
- halt 404, "No compound_uri or dataset_uri parameter." unless compound_uri = params[:compound_uri] or dataset_uri = params[:dataset_uri]
+ halt 404, "No compound_uri or dataset_uri parameter." unless compound_uri = params[:compound_uri] or dataset_uri = params[:dataset_uri]
+ @lazar = YAML.load_file @yaml_file
response['Content-Type'] = 'text/uri-list'
- if compound_uri
+ if compound_uri
cache = PredictionCache.first(:model_uri => @lazar.uri, :compound_uri => compound_uri)
return cache.dataset_uri if cache and uri_available?(cache.dataset_uri)
begin
@@ -113,12 +95,12 @@ post '/:id/?' do
LOGGER.error "Lazar prediction failed for #{compound_uri} with #{$!} "
halt 500, "Prediction of #{compound_uri} with #{@lazar.uri} failed."
end
- elsif dataset_uri
- task = OpenTox::Task.create("Predict dataset",url_for("/#{@lazar.id}", :full)) do |task|
+ elsif dataset_uri
+ task = OpenTox::Task.create("Predict dataset",url_for("/#{@lazar.id}", :full)) do |task|
@lazar.predict_dataset(dataset_uri, @subjectid, task).uri
- end
+ end
halt 503,task.uri+"\n" if task.status == "Cancelled"
halt 202,task.uri
- end
+ end
end