1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
|
require 'csv'
module OpenTox
# Ruby wrapper for OpenTox Dataset Webservices (http://opentox.org/dev/apis/api-1.2/dataset).
class Dataset
def initialize uri=nil
super uri
@data["features"] ||= []
@data["compounds"] ||= []
@data["data_entries"] ||= []
end
def data_entries
@data["data_entries"]
end
[:features, :compounds, :data_entries].each do |method|
send :define_method, "#{method}=" do |value|
@data[method.to_s] = value.collect{|v| v.uri}
end
send :define_method, "#{method}<<" do |value|
@data[method.to_s] << value.uri
end
end
# Get data (lazy loading from dataset service)
# overrides {OpenTox#metadata} to only load the metadata instead of the whole dataset
# @return [Hash] the metadata
def metadata force_update=false
if @data.empty? or force_update
uri = File.join(@data["uri"],"metadata")
#begin
@data = JSON.parse RestClientWrapper.get(uri,{},{:accept => "application/json"})
#parse_ntriples RestClientWrapper.get(uri,{},{:accept => "text/plain"})
#rescue # fall back to rdfxml
#parse_rdfxml RestClientWrapper.get(uri,{},{:accept => "application/rdf+xml"})
#end
#@data = @rdf.to_hash[RDF::URI.new(@data["uri"])].inject({}) { |h, (predicate, values)| h[predicate] = values.collect{|v| v.to_s}; h }
end
@data
end
# @return [Array] feature objects (NOT uris)
def features force_update=false
if @data["features"].empty? or force_update
uri = File.join(@data["uri"],"features")
begin
uris = JSON.parse RestClientWrapper.get(uri,{},{:accept => "application/json"}) # ordered datasets return ordered features
rescue
uris = []
end
@data["features"] = uris#.collect{|uri| Feature.new(uri)}
end
@data["features"].collect{|uri| Feature.new uri}
end
# @return [Array] compound objects (NOT uris)
def compounds force_update=false
if @data["compounds"].empty? or force_update
uri = File.join(@data["uri"],"compounds")
begin
uris = JSON.parse RestClientWrapper.get(uri,{},{:accept => "application/json"}) # ordered datasets return ordered compounds
rescue
uris = []
end
@data["compounds"] = uris
end
@data["compounds"].collect{|uri| Compound.new(uri)}
end
# @return [Array] with two dimensions,
# first index: compounds, second index: features, values: compound feature values
def data_entries force_update=false
if @data["data_entries"].empty? or force_update
sparql = "SELECT ?cidx ?fidx ?value FROM <#{uri}> WHERE {
?data_entry <#{RDF::OLO.index}> ?cidx ;
<#{RDF::OT.values}> ?v .
?v <#{RDF::OT.feature}> ?f;
<#{RDF::OT.value}> ?value .
?f <#{RDF::OLO.index}> ?fidx.
} ORDER BY ?fidx ?cidx"
RestClientWrapper.get(service_uri,{:query => sparql},{:accept => "text/uri-list"}).split("\n").each do |row|
r,c,v = row.split("\t")
@data["data_entries"][r.to_i] ||= []
# adjust value class depending on feature type, StringFeature takes precedence over NumericFeature
if features[c.to_i][RDF.type].include? RDF::OT.NumericFeature and ! features[c.to_i][RDF.type].include? RDF::OT.StringFeature
v = v.to_f if v
end
@data["data_entries"][r.to_i][c.to_i] = v if v
end
# TODO: fallbacks for external and unordered datasets
end
@data["data_entries"]
end
# Find data entry values for a given compound and feature
# @param compound [OpenTox::Compound] OpenTox Compound object
# @param feature [OpenTox::Feature] OpenTox Feature object
# @return [Array] Data entry values
def values(compound, feature)
rows = (0 ... compounds.length).select { |r| compounds[r].uri == compound.uri }
col = features.collect{|f| f.uri}.index feature.uri
rows.collect{|row| data_entries[row][col]}
end
# Convenience methods to search by compound/feature URIs
# Search a dataset for a feature given its URI
# @param uri [String] Feature URI
# @return [OpenTox::Feature] Feature object, or nil if not present
def find_feature_uri(uri)
features.select{|f| f.uri == uri}.first
end
# Search a dataset for a compound given its URI
# @param uri [String] Compound URI
# @return [OpenTox::Compound] Compound object, or nil if not present
def find_compound_uri(uri)
compounds.select{|f| f.uri == uri}.first
end
# for prediction result datasets
# assumes that there are features with title prediction and confidence
# @return [Array] of Hashes with keys { :compound, :value ,:confidence } (compound value is object not uri)
def predictions
predictions = []
prediction_feature = nil
confidence_feature = nil
metadata[RDF::OT.predictedVariables].each do |uri|
feature = OpenTox::Feature.new uri
case feature.title
when /prediction$/
prediction_feature = feature
when /confidence$/
confidence_feature = feature
end
end
if prediction_feature and confidence_feature
compounds.each do |compound|
value = values(compound,prediction_feature).first
value = value.to_f if prediction_feature[RDF.type].include? RDF::OT.NumericFeature and ! prediction_feature[RDF.type].include? RDF::OT.StringFeature
confidence = values(compound,confidence_feature).first.to_f
predictions << {:compound => compound, :value => value, :confidence => confidence} if value and confidence
end
end
predictions
end
# Adding data methods
# (Alternatively, you can directly change @data["features"] and @data["compounds"])
# Create a dataset from file (csv,sdf,...)
# @param filename [String]
# @return [String] dataset uri
def upload filename, wait=true
uri = RestClientWrapper.put(@data["uri"], {:file => File.new(filename)})
wait_for_task uri if URI.task?(uri) and wait
compounds true
features true
metadata true
@data["uri"]
end
# @param compound [OpenTox::Compound]
# @param feature [OpenTox::Feature]
# @param value [Object] (will be converted to String)
# @return [Array] data_entries
def add_data_entry compound, feature, value
@data["compounds"] << compound unless @data["compounds"].collect{|c| c.uri}.include?(compound.uri)
row = @data["compounds"].collect{|c| c.uri}.index(compound.uri)
@data["features"] << feature unless @data["features"].collect{|f| f.uri}.include?(feature.uri)
col = @data["features"].collect{|f| f.uri}.index(feature.uri)
if @data["data_entries"][row] and @data["data_entries"][row][col] # duplicated values
@data["compounds"] << compound
row = @data["compounds"].collect{|c| c.uri}.rindex(compound.uri)
end
if value
@data["data_entries"][row] ||= []
@data["data_entries"][row][col] = value
end
end
# TODO: remove? might be dangerous if feature ordering is incorrect
# MG: I would not remove this because add_data_entry is very slow (4 times searching in arrays)
# CH: do you have measurements? compound and feature arrays are not that big, I suspect that feature search/creation is the time critical step
# @param row [Array]
# @example
# d = Dataset.new
# d.features << Feature.new(a)
# d.features << Feature.new(b)
# d << [ Compound.new("c1ccccc1"), feature-value-a, feature-value-b ]
def << row
compound = row.shift # removes the compound from the array
bad_request_error "Dataset features are empty." unless @data["features"]
bad_request_error "Row size '#{row.size}' does not match features size '#{@data["features"].size}'." unless row.size == @data["features"].size
bad_request_error "First column is not a OpenTox::Compound" unless compound.class == OpenTox::Compound
@data["compounds"] << compound.uri
@data["data_entries"] << row
end
# Serialisation
# converts dataset to csv format including compound smiles as first column, other column headers are feature titles
# @return [String]
def to_csv(inchi=false)
CSV.generate() do |csv| #{:force_quotes=>true}
csv << [inchi ? "InChI" : "SMILES"] + features.collect{|f| f.title}
compounds.each_with_index do |c,i|
csv << [inchi ? c.inchi : c.smiles] + data_entries[i]
end
end
end
=begin
RDF_FORMATS.each do |format|
# redefine rdf parse methods for all formats e.g. parse_rdfxml
send :define_method, "parse_#{format}".to_sym do |rdf|
# TODO: parse ordered dataset
# TODO: parse data entries
# TODO: parse metadata
@rdf = RDF::Graph.new
RDF::Reader.for(format).new(rdf) do |reader|
reader.each_statement{ |statement| @rdf << statement }
end
query = RDF::Query.new({ :uri => { RDF.type => RDF::OT.Compound } })
@data["compounds"] = query.execute(@rdf).collect { |solution| OpenTox::Compound.new solution.uri }
query = RDF::Query.new({ :uri => { RDF.type => RDF::OT.Feature } })
@data["features"] = query.execute(@rdf).collect { |solution| OpenTox::Feature.new solution.uri }
@data["compounds"].each_with_index do |c,i|
@data["features"].each_with_index do |f,j|
end
end
end
# redefine rdf serialization methods
send :define_method, "to_#{format}".to_sym do
@data[RDF.type] = [RDF::OT.Dataset, RDF::OT.OrderedDataset]
create_rdf
@data["features"].each_with_index do |feature,i|
@rdf << [RDF::URI.new(feature.uri), RDF::URI.new(RDF.type), RDF::URI.new(RDF::OT.Feature)]
@rdf << [RDF::URI.new(feature.uri), RDF::URI.new(RDF::OLO.index), RDF::Literal.new(i)]
end
@data["compounds"].each_with_index do |compound,i|
@rdf << [RDF::URI.new(compound.uri), RDF::URI.new(RDF.type), RDF::URI.new(RDF::OT.Compound)]
if defined? @neighbors and neighbors.include? compound
@rdf << [RDF::URI.new(compound.uri), RDF::URI.new(RDF.type), RDF::URI.new(RDF::OT.Neighbor)]
end
@rdf << [RDF::URI.new(compound.uri), RDF::URI.new(RDF::OLO.index), RDF::Literal.new(i)]
data_entry_node = RDF::Node.new
@rdf << [RDF::URI.new(@data["uri"]), RDF::URI.new(RDF::OT.dataEntry), data_entry_node]
@rdf << [data_entry_node, RDF::URI.new(RDF.type), RDF::URI.new(RDF::OT.DataEntry)]
@rdf << [data_entry_node, RDF::URI.new(RDF::OLO.index), RDF::Literal.new(i)]
@rdf << [data_entry_node, RDF::URI.new(RDF::OT.compound), RDF::URI.new(compound.uri)]
@data["data_entries"][i].each_with_index do |value,j|
value_node = RDF::Node.new
@rdf << [data_entry_node, RDF::URI.new(RDF::OT.values), value_node]
@rdf << [value_node, RDF::URI.new(RDF::OT.feature), RDF::URI.new(@data["features"][j].uri)]
@rdf << [value_node, RDF::URI.new(RDF::OT.value), RDF::Literal.new(value)]
end
end
RDF::Writer.for(format).buffer do |writer|
writer << @rdf
end
end
end
=end
# TODO: fix bug that affects data_entry positions # DG: who wrotes this comment ?
def to_ntriples # redefined string version for better performance
ntriples = ""
@data[RDF.type] = [ RDF::OT.Dataset, RDF::OT.OrderedDataset ]
@data.each do |predicate,values|
[values].flatten.each do |value|
URI.valid?(value) ? value = "<#{value}>" : value = "\"#{value}\""
ntriples << "<#{@data["uri"]}> <#{predicate}> #{value} .\n" #\n"
end
end
@parameters.each_with_index do |parameter,i|
p_node = "_:parameter"+ i.to_s
ntriples << "<#{@data["uri"]}> <#{RDF::OT.parameters}> #{p_node} .\n"
ntriples << "#{p_node} <#{RDF.type}> <#{RDF::OT.Parameter}> .\n"
parameter.each { |k,v| ntriples << "#{p_node} <#{k}> \"#{v.to_s.tr('"', '\'')}\" .\n" }
end
@data["features"].each_with_index do |feature,i|
ntriples << "<#{feature.uri}> <#{RDF.type}> <#{RDF::OT.Feature}> .\n"
ntriples << "<#{feature.uri}> <#{RDF::OLO.index}> \"#{i}\"^^<http://www.w3.org/2001/XMLSchema#integer> .\n" # sorting at dataset service does not work without type information
end
@data["compounds"].each_with_index do |compound,i|
ntriples << "<#{compound.uri}> <#{RDF.type}> <#{RDF::OT.Compound}> .\n"
if defined? @neighbors and neighbors.include? compound
ntriples << "<#{compound.uri}> <#{RDF.type}> <#{RDF::OT.Neighbor}> .\n"
end
ntriples << "<#{compound.uri}> <#{RDF::OLO.index}> \"#{i}\"^^<http://www.w3.org/2001/XMLSchema#integer> .\n" # sorting at dataset service does not work without type information
data_entry_node = "_:dataentry"+ i.to_s
ntriples << "<#{@data["uri"]}> <#{RDF::OT.dataEntry}> #{data_entry_node} .\n"
ntriples << "#{data_entry_node} <#{RDF.type}> <#{RDF::OT.DataEntry}> .\n"
ntriples << "#{data_entry_node} <#{RDF::OLO.index}> \"#{i}\"^^<http://www.w3.org/2001/XMLSchema#integer> .\n" # sorting at dataset service does not work without type information
ntriples << "#{data_entry_node} <#{RDF::OT.compound}> <#{compound.uri}> .\n"
@data["data_entries"][i].each_with_index do |value,j|
value_node = data_entry_node+ "_value"+ j.to_s
ntriples << "#{data_entry_node} <#{RDF::OT.values}> #{value_node} .\n"
ntriples << "#{value_node} <#{RDF::OT.feature}> <#{@data["features"][j].uri}> .\n"
ntriples << "#{value_node} <#{RDF::OT.value}> \"#{value}\" .\n"
end unless @data["data_entries"][i].nil?
end
ntriples
end
# Methods for for validation service
# create a new dataset with the specified compounds and features
# @param compound_indices [Array] compound indices (integers)
# @param feats [Array] features objects
# @param metadata [Hash]
# @return [OpenTox::Dataset]
def split( compound_indices, feats, metadata)
bad_request_error "Dataset.split : Please give compounds as indices" if compound_indices.size==0 or !compound_indices[0].is_a?(Fixnum)
bad_request_error "Dataset.split : Please give features as feature objects (given: #{feats})" if feats!=nil and feats.size>0 and !feats[0].is_a?(OpenTox::Feature)
dataset = OpenTox::Dataset.new
dataset.metadata = metadata
dataset.features = (feats ? feats : self.features)
compound_indices.each do |c_idx|
d = [ self.compounds[c_idx] ]
dataset.features.each_with_index.each do |f,f_idx|
d << (self.data_entries[c_idx] ? self.data_entries[c_idx][f_idx] : nil)
end
dataset << d
end
dataset.put
dataset
end
# maps a compound-index from another dataset to a compound-index from this dataset
# mapping works as follows:
# (compound c is the compound identified by the compound-index of the other dataset)
# * c occurs only once in this dataset? map compound-index of other dataset to index in this dataset
# * c occurs >1 in this dataset?
# ** number of occurences is equal in both datasets? assume order is preserved(!) and map accordingly
# ** number of occurences is not equal in both datasets? cannot map, raise error
# @param dataset [OpenTox::Dataset] dataset that should be mapped to this dataset (fully loaded)
# @param compound_index [Fixnum], corresponding to dataset
def compound_index( dataset, compound_index )
compound_uri = dataset.compounds[compound_index]#.uri
self_indices = compound_indices(compound_uri)
if self_indices==nil
nil
else
dataset_indices = dataset.compound_indices(compound_uri)
if self_indices.size==1
self_indices.first
elsif self_indices.size==dataset_indices.size
# we do assume that the order is preseverd (i.e., the nth occurences in both datasets are mapped to each other)!
self_indices[dataset_indices.index(compound_index)]
else
raise "cannot map compound #{compound_uri} from dataset #{dataset.uri} to dataset #{uri}, "+
"compound occurs #{dataset_indices.size} times and #{self_indices.size} times"
end
end
end
# returns the inidices of the compound in the dataset
# @param compound_uri [String]
# @return [Array] compound index (position) of the compound in the dataset, array-size is 1 unless multiple occurences
def compound_indices( compound_uri )
unless defined?(@cmp_indices) and @cmp_indices.has_key?(compound_uri)
@cmp_indices = {}
compounds().size.times do |i|
c = @data["compounds"][i]#.uri
if @cmp_indices[c]==nil
@cmp_indices[c] = [i]
else
@cmp_indices[c] = @cmp_indices[c]+[i]
end
end
end
@cmp_indices[compound_uri]
end
# returns compound feature value using the compound-index and the feature_uri
def data_entry_value(compound_index, feature_uri)
data_entries(true) if @data["data_entries"].empty?
col = @data["features"].collect{|f| f.uri}.index feature_uri
@data["data_entries"][compound_index] ? @data["data_entries"][compound_index][col] : nil
end
end
end
|