1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
|
module OpenTox
module Model
class Model
include OpenTox
include Mongoid::Document
include Mongoid::Timestamps
store_in collection: "models"
field :name, type: String
field :creator, type: String, default: __FILE__
# datasets
field :training_dataset_id, type: BSON::ObjectId
# algorithms
field :prediction_algorithm, type: String
# prediction feature
field :prediction_feature_id, type: BSON::ObjectId
def training_dataset
Dataset.find(training_dataset_id)
end
end
class Lazar < Model
# algorithms
field :neighbor_algorithm, type: String
field :neighbor_algorithm_parameters, type: Hash, default: {}
# Create a lazar model from a training_dataset and a feature_dataset
# @param [OpenTox::Dataset] training_dataset
# @return [OpenTox::Model::Lazar] Regression or classification model
def initialize training_dataset, params={}
super params
# TODO document convention
prediction_feature = training_dataset.features.first
# set defaults for empty parameters
self.prediction_feature_id ||= prediction_feature.id
self.training_dataset_id ||= training_dataset.id
self.name ||= "#{training_dataset.name} #{prediction_feature.name}"
self.neighbor_algorithm_parameters ||= {}
self.neighbor_algorithm_parameters[:training_dataset_id] = training_dataset.id
save
self
end
def predict_compound compound
prediction_feature = Feature.find prediction_feature_id
neighbors = compound.send(neighbor_algorithm, neighbor_algorithm_parameters)
# remove neighbors without prediction_feature
# check for database activities (neighbors may include query compound)
database_activities = nil
prediction = {}
if neighbors.collect{|n| n["_id"]}.include? compound.id
database_activities = neighbors.select{|n| n["_id"] == compound.id}.first["features"][prediction_feature.id.to_s].uniq
prediction[:database_activities] = database_activities
prediction[:warning] = "#{database_activities.size} compounds have been removed from neighbors, because they have the same structure as the query compound."
neighbors.delete_if{|n| n["_id"] == compound.id}
end
neighbors.delete_if{|n| n['features'].empty? or n['features'][prediction_feature.id.to_s] == [nil] }
if neighbors.empty?
prediction.merge!({:value => nil,:confidence => nil,:warning => "Could not find similar compounds with experimental data in the training dataset.",:neighbors => []})
else
prediction.merge!(Algorithm.run(prediction_algorithm, compound, {:neighbors => neighbors,:training_dataset_id=> training_dataset_id,:prediction_feature_id => prediction_feature.id}))
prediction[:neighbors] = neighbors
prediction[:neighbors] ||= []
end
prediction
end
def predict object
training_dataset = Dataset.find training_dataset_id
# parse data
compounds = []
case object.class.to_s
when "OpenTox::Compound"
compounds = [object]
when "Array"
compounds = object
when "OpenTox::Dataset"
compounds = object.compounds
else
bad_request_error "Please provide a OpenTox::Compound an Array of OpenTox::Compounds or an OpenTox::Dataset as parameter."
end
# make predictions
predictions = {}
compounds.each do |c|
predictions[c.id.to_s] = predict_compound c
predictions[c.id.to_s][:prediction_feature_id] = prediction_feature_id
end
# serialize result
case object.class.to_s
when "OpenTox::Compound"
prediction = predictions[compounds.first.id.to_s]
prediction[:neighbors].sort!{|a,b| b[1] <=> a[1]} # sort according to similarity
return predictions
when "Array"
return predictions
when "OpenTox::Dataset"
predictions.each{|cid,p| p.delete(:neighbors)}
# prepare prediction dataset
measurement_feature = Feature.find prediction_feature_id
prediction_feature = NumericFeature.find_or_create_by( "name" => measurement_feature.name + " (Prediction)" )
prediction_dataset = LazarPrediction.new(
:name => "Lazar prediction for #{prediction_feature.name}",
:creator => __FILE__,
:prediction_feature_id => prediction_feature.id
)
compounds.each_with_index do |c,i|
prediction_dataset.predictions[c.id.to_s] = predictions[i]
end
prediction_dataset.save
return prediction_dataset
end
end
def training_activities
i = training_dataset.feature_ids.index prediction_feature_id
training_dataset.data_entries.collect{|de| de[i]}
end
end
class LazarClassification < Lazar
def self.create training_dataset, params={}
model = self.new training_dataset, params
model.prediction_algorithm = "OpenTox::Algorithm::Classification.weighted_majority_vote" unless model.prediction_algorithm
model.neighbor_algorithm ||= "fingerprint_neighbors"
model.neighbor_algorithm_parameters ||= {}
{
:type => "MP2D",
:training_dataset_id => training_dataset.id,
:min_sim => 0.1
}.each do |key,value|
model.neighbor_algorithm_parameters[key] ||= value
end
model.save
model
end
end
class LazarRegression < Lazar
def self.create training_dataset, params={}
model = self.new training_dataset, params
model.neighbor_algorithm ||= "fingerprint_neighbors"
model.prediction_algorithm ||= "OpenTox::Algorithm::Regression.local_fingerprint_regression"
model.neighbor_algorithm_parameters ||= {}
{
:type => "MP2D",
:training_dataset_id => training_dataset.id,
:min_sim => 0.1
}.each do |key,value|
model.neighbor_algorithm_parameters[key] ||= value
end
model.save
model
end
end
class Prediction
include OpenTox
include Mongoid::Document
include Mongoid::Timestamps
# TODO field Validations
field :endpoint, type: String
field :species, type: String
field :source, type: String
field :unit, type: String
field :model_id, type: BSON::ObjectId
field :repeated_crossvalidation_id, type: BSON::ObjectId
def predict object
Lazar.find(model_id).predict object
end
def training_dataset
model.training_dataset
end
def model
Lazar.find model_id
end
def repeated_crossvalidation
RepeatedCrossValidation.find repeated_crossvalidation_id
end
def crossvalidations
repeated_crossvalidation.crossvalidations
end
def regression?
training_dataset.features.first.numeric?
end
def classification?
training_dataset.features.first.nominal?
end
def self.from_csv_file file
metadata_file = file.sub(/csv$/,"json")
bad_request_error "No metadata file #{metadata_file}" unless File.exist? metadata_file
prediction_model = self.new JSON.parse(File.read(metadata_file))
training_dataset = Dataset.from_csv_file file
model = nil
if training_dataset.features.first.nominal?
model = LazarClassification.create training_dataset
elsif training_dataset.features.first.numeric?
model = LazarRegression.create training_dataset
end
prediction_model[:model_id] = model.id
prediction_model[:repeated_crossvalidation_id] = RepeatedCrossValidation.create(model).id
prediction_model.save
prediction_model
end
end
class NanoLazar
include OpenTox
include Mongoid::Document
include Mongoid::Timestamps
store_in collection: "models"
field :name, type: String
field :creator, type: String, default: __FILE__
# datasets
field :training_dataset_id, type: BSON::ObjectId
# algorithms
field :prediction_algorithm, type: String
# prediction feature
field :prediction_feature_id, type: BSON::ObjectId
field :training_particle_ids, type: Array
def self.create_all
nanoparticles = Nanoparticle.all
toxfeatures = Nanoparticle.all.collect{|np| np.toxicities.keys}.flatten.uniq.collect{|id| Feature.find id}
tox = {}
toxfeatures.each do |t|
tox[t] = nanoparticles.select{|np| np.toxicities.keys.include? t.id.to_s}
end
tox.select!{|t,nps| nps.size > 50}
tox.collect do |t,nps|
find_or_create_by(:prediction_feature_id => t.id, :training_particle_ids => nps.collect{|np| np.id})
end
end
def predict nanoparticle
training = training_particle_ids.collect{|id| Nanoparticle.find id}
training_features = training.collect{|t| t.physchem_descriptors.keys}.flatten.uniq
query_features = nanoparticle.physchem_descriptors.keys
common_features = (training_features & query_features)
p common_features
end
end
end
end
|