diff options
author | mr <mr@mrautenberg.de> | 2010-12-14 12:45:03 +0100 |
---|---|---|
committer | mr <mr@mrautenberg.de> | 2010-12-14 12:45:03 +0100 |
commit | 9487e82a1bd6b6cf42512d6bfed43194c109b114 (patch) | |
tree | 02afc904147511353b8d72626371be2e6ee793c1 | |
parent | 0ff6a589e691b88a46566fafadee5e4ff4c53bb6 (diff) |
remove token_id from saving in table
-rw-r--r-- | fminer.rb | 16 | ||||
-rw-r--r-- | lazar.rb | 8 |
2 files changed, 12 insertions, 12 deletions
@@ -95,9 +95,9 @@ post '/fminer/bbrc/?' do { DC.title => "prediction_feature", OT.paramValue => params[:prediction_feature] } ] }) - feature_dataset.token_id = params[:token_id] if params[:token_id] - feature_dataset.token_id = CGI.unescape(request.env["HTTP_TOKEN_ID"]) if !feature_dataset.token_id and request.env["HTTP_TOKEN_ID"] - feature_dataset.save + token_id = params[:token_id] if params[:token_id] + token_id = CGI.unescape(request.env["HTTP_TOKEN_ID"]) if !token_id and request.env["HTTP_TOKEN_ID"] + feature_dataset.save(token_id) id = 1 # fminer start id is not 0 compounds = [] @@ -204,7 +204,7 @@ post '/fminer/bbrc/?' do ids.each { |id| feature_dataset.add(compounds[id], feature_uri, true)} end end - feature_dataset.save + feature_dataset.save(token_id) feature_dataset.uri end response['Content-Type'] = 'text/uri-list' @@ -252,9 +252,9 @@ post '/fminer/last/?' do { DC.title => "prediction_feature", OT.paramValue => params[:prediction_feature] } ] }) - feature_dataset.token_id = params[:token_id] if params[:token_id] - feature_dataset.token_id = CGI.unescape(request.env["HTTP_TOKEN_ID"]) if !feature_dataset.token_id and request.env["HTTP_TOKEN_ID"] - feature_dataset.save + token_id = params[:token_id] if params[:token_id] + token_id = CGI.unescape(request.env["HTTP_TOKEN_ID"]) if !token_id and request.env["HTTP_TOKEN_ID"] + feature_dataset.save(token_id) id = 1 # fminer start id is not 0 compounds = [] @@ -350,7 +350,7 @@ post '/fminer/last/?' do end ids.each { |id| feature_dataset.add(compounds[id], feature_uri, true)} end - feature_dataset.save + feature_dataset.save(token_id) feature_dataset.uri end response['Content-Type'] = 'text/uri-list' @@ -50,8 +50,8 @@ post '/lazar/?' do task = OpenTox::Task.create("Create lazar model",url_for('/lazar',:full)) do |task| lazar = OpenTox::Model::Lazar.new - lazar.token_id = params[:token_id] if params[:token_id] - lazar.token_id = request.env["HTTP_TOKEN_ID"] if !lazar.token_id and request.env["HTTP_TOKEN_ID"] + token_id = params[:token_id] if params[:token_id] + token_id = request.env["HTTP_TOKEN_ID"] if !token_id and request.env["HTTP_TOKEN_ID"] lazar.min_sim = params[:min_sim] if params[:min_sim] if params[:feature_dataset_uri] @@ -71,7 +71,7 @@ post '/lazar/?' do halt 404, "External feature generation services not yet supported" end feature_dataset_uri = OpenTox::Algorithm::Generic.new(feature_generation_uri).run(params).to_s - training_features = OpenTox::Dataset.new(feature_dataset_uri, lazar.token_id) + training_features = OpenTox::Dataset.new(feature_dataset_uri) end training_features.load_all @@ -140,7 +140,7 @@ post '/lazar/?' do {DC.title => "feature_generation_uri", OT.paramValue => feature_generation_uri} ] - model_uri = lazar.save + model_uri = lazar.save(token_id) LOGGER.info model_uri + " created #{Time.now}" model_uri end |