1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
|
require 'rubygems'
gem "opentox-ruby-api-wrapper", "= 1.6.1"
require 'opentox-ruby-api-wrapper'
class Dataset
include DataMapper::Resource
property :id, Serial
property :uri, String, :length => 255
#property :file, String, :length => 255
property :yaml, Text, :length => 2**32-1
property :owl, Text, :length => 2**32-1
property :created_at, DateTime
def to_owl
data = YAML.load(yaml)
owl = OpenTox::Owl.create 'Dataset', uri
owl.set "title", data.title
owl.set "creator", data.creator
if data.compounds
data.compounds.each do |compound|
owl.add_data_entries compound,data.data[compound]
end
end
owl.rdf
end
end
DataMapper.auto_upgrade!
## REST API
get '/?' do
response['Content-Type'] = 'text/uri-list'
Dataset.all(params).collect{|d| d.uri}.join("\n") + "\n"
end
get '/:id' do
accept = request.env['HTTP_ACCEPT']
accept = 'application/rdf+xml' if accept == '*/*' or accept == '' or accept.nil?
# workaround for browser links
case params[:id]
when /.yaml$/
params[:id].sub!(/.yaml$/,'')
accept = 'application/x-yaml'
when /.rdf$/
params[:id].sub!(/.rdf$/,'')
accept = 'application/rdf+xml'
when /.xls$/
params[:id].sub!(/.xls$/,'')
accept = 'application/vnd.ms-excel'
end
begin
dataset = Dataset.get(params[:id])
halt 404, "Dataset #{params[:id]} not found." unless dataset
rescue => e
raise e.message + e.backtrace
halt 404, "Dataset #{params[:id]} not found."
end
halt 404, "Dataset #{params[:id]} not found." if dataset.nil? # not sure how an empty cataset can be returned, but if this happens stale processes keep runing at 100% cpo
case accept
when /rdf/ # redland sends text/rdf instead of application/rdf+xml
response['Content-Type'] = 'application/rdf+xml'
unless dataset.owl # lazy owl creation
dataset.owl = dataset.to_owl
dataset.save
end
dataset.owl
when /yaml/
response['Content-Type'] = 'application/x-yaml'
dataset.yaml
when /ms-excel/
require 'spreadsheet'
response['Content-Type'] = 'application/vnd.ms-excel'
Spreadsheet.client_encoding = 'UTF-8'
book = Spreadsheet::Workbook.new
tmp = Tempfile.new('opentox-feature-xls')
sheet = book.create_worksheet :name => 'Training Data'
sheet.column(0).width = 100
i = 0
YAML.load(dataset.yaml).data.each do |line|
begin
smilestring = RestClient.get(line[0], :accept => 'chemical/x-daylight-smiles').to_s
if line[1]
val = line[1][0].first[1]
LOGGER.debug val
#line[1][0] ? val = line[1][0].first[1] #? "1" : "0" : val = ""
sheet.update_row(i, smilestring , val)
end
i+=1
rescue
end
end
begin
book.write tmp.path
return tmp
rescue
end
tmp.close!
else
halt 400, "Unsupported MIME type '#{accept}'"
end
end
get '/:id/features/:feature_id/?' do
OpenTox::Dataset.find(url_for("/#{params[:id]}", :full)).feature(params[:feature_id])
end
get '/:id/features/?' do
YAML.load(Dataset.get(params[:id]).yaml).features.join("\n") + "\n"
end
get '/:id/compounds/?' do
YAML.load(Dataset.get(params[:id]).yaml).compounds.join("\n") + "\n"
end
post '/?' do
dataset = Dataset.new
dataset.save
dataset.uri = url_for("/#{dataset.id}", :full)
content_type = request.content_type
content_type = "application/rdf+xml" if content_type.nil?
case request.content_type
when /yaml/
dataset.yaml = request.env["rack.input"].read
# when /csv/
# dataset.yaml = csv2yaml request.env["rack.input"].read
when "application/rdf+xml"
dataset.yaml = OpenTox::Dataset.owl_to_yaml(request.env["rack.input"].read,dataset.uri)
else
halt 404, "MIME type \"#{request.content_type}\" not supported."
end
begin
#dataset.owl = d.rdf
#dataset.uri = uri
raise "saving failed: "+dataset.errors.inspect unless dataset.save
rescue => e
LOGGER.error e.message
LOGGER.info e.backtrace
halt 500, "Could not save dataset #{dataset.uri}."
end
LOGGER.debug "#{dataset.uri} saved."
response['Content-Type'] = 'text/uri-list'
dataset.uri + "\n"
end
delete '/:id/?' do
begin
dataset = Dataset.get(params[:id])
dataset.destroy!
response['Content-Type'] = 'text/plain'
"Dataset #{params[:id]} deleted."
rescue
halt 404, "Dataset #{params[:id]} does not exist."
end
end
delete '/?' do
Dataset.auto_migrate!
response['Content-Type'] = 'text/plain'
"All datasets deleted."
end
|