summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMartin Gütlein <martin.guetlein@gmail.com>2009-11-24 10:26:17 +0100
committerMartin Gütlein <martin.guetlein@gmail.com>2009-11-24 10:26:17 +0100
commitdb65b03dc761f7788dad5bdbfd9e0d7e8376f6cb (patch)
tree108a823cdd26b0f47360d4a3f45502e52735150c
parent199a350dfda66a918bd52b59e7f511a9cc19fd63 (diff)
add prediction_feature param
-rw-r--r--EXAMPLES43
-rw-r--r--application.rb31
-rw-r--r--lib/prediction_util.rb9
-rw-r--r--lib/validation_db.rb1
-rw-r--r--report/environment.rb6
-rw-r--r--report/report_application.rb2
-rw-r--r--report/report_format.rb2
-rw-r--r--report/report_service.rb5
-rw-r--r--report/report_test.rb5
-rw-r--r--report/validation_access.rb22
-rw-r--r--report/validation_data.rb2
-rw-r--r--validation/validation_application.rb39
-rw-r--r--validation/validation_service.rb24
-rw-r--r--validation/validation_test.rb57
14 files changed, 176 insertions, 72 deletions
diff --git a/EXAMPLES b/EXAMPLES
new file mode 100644
index 0000000..8a9f262
--- /dev/null
+++ b/EXAMPLES
@@ -0,0 +1,43 @@
+API-Definition
+====================================
+
+see http://www.opentox.org/dev/apis
+
+
+API-Examples
+====================================
+
+get list of available validations
+------------------------------------
+
+>>> curl <validation_service>/validations
+
+result (accept-header: application/rdf-xml)
+<<< TODO
+
+result (accept-header: text/uri-list)
+<<< <validation_service>/validation/<validation_id>
+<<< <validation_service>/validation/validation_id_i
+<<< <validation_service>/validation/validation_id_j
+
+validate model on test-dateset
+------------------------------------
+
+>>> curl -X POST -d model_uri="<model_service>/model/<model_id>" \
+ -d dataset_uri="<dataset_service>/dataset/<dataset_id>" \
+ <validation_service>/validation
+
+result (accept-header: application/rdf-xml)
+<<< TODO
+
+result (accept-header: text/uri-list)
+<<< <validation_service>/validation/<id>
+
+
+validate an algorithm on a training- and test-dataset
+------------------------------------
+
+
+
+
+
diff --git a/application.rb b/application.rb
index df65cfe..5c1d027 100644
--- a/application.rb
+++ b/application.rb
@@ -3,4 +3,35 @@ load "validation/validation_application.rb"
load "report/report_application.rb"
+[ 'rubygems', 'sinatra', 'sinatra/url_for' ].each do |lib|
+ require lib
+end
+get '/?' do
+
+ transform_example
+end
+
+private
+def transform_example
+
+ file = File.new("EXAMPLES", "r")
+ res = ""
+ while (line = file.gets)
+ res += line
+ end
+ file.close
+
+ sub = { "validation_service" => url_for("", :full),
+ #"validation_id" => "??",
+ #"model_service" => "??",
+ #"model_id" => "??",
+ #"dataset_service" => "??",
+ "dataset_id" => "Hamster Carcenogenicity"}
+
+ sub.each do |k,v|
+ res.gsub!(/<#{k}>/,v)
+ end
+
+ res
+end
diff --git a/lib/prediction_util.rb b/lib/prediction_util.rb
index a01086d..cdd4eb0 100644
--- a/lib/prediction_util.rb
+++ b/lib/prediction_util.rb
@@ -7,17 +7,18 @@ module Lib
class Predictions
- attr_accessor :predicted_values, :actual_values, :confidence_values, :compounds
+ attr_accessor :prediction_feature, :predicted_values, :actual_values, :confidence_values, :compounds
# pending: only classification supported so far
- def initialize(test_dataset_uri, prediction_dataset_uri)
+ def initialize(prediction_feature, test_dataset_uri, prediction_dataset_uri)
- LOGGER.debug("loading prediciton via test-dateset:'"+test_dataset_uri.to_s+"' and prediction-dataset:'"+prediction_dataset_uri.to_s+"'")
+ LOGGER.debug("loading prediciton via test-dateset:'"+test_dataset_uri.to_s+"' and prediction-dataset:'"+prediction_dataset_uri.to_s+"', prediction_feature: '"+prediction_feature.to_s+"'")
test_dataset = OpenTox::Dataset.find(:uri => test_dataset_uri)
prediction_dataset = OpenTox::Dataset.find(:uri => prediction_dataset_uri)
raise "test dataset not found: "+test_dataset_uri.to_s unless test_dataset
raise "prediction dataset not found: "+prediction_dataset_uri.to_s unless prediction_dataset
+ @prediction_feature = prediction_feature
@predicted_values = []
@actual_values = []
@@ -30,7 +31,7 @@ module Lib
{prediction_dataset => @predicted_values, test_dataset => @actual_values}.each do |d, v|
d.features(c).each do |a|
- case OpenTox::Feature.new(:uri => a.uri).value('classification').to_s
+ case OpenTox::Feature.new(:uri => a.uri).value(prediction_feature).to_s
when 'true'
v.push(1.0)
when 'false'
diff --git a/lib/validation_db.rb b/lib/validation_db.rb
index 05e41a7..bfebb40 100644
--- a/lib/validation_db.rb
+++ b/lib/validation_db.rb
@@ -32,6 +32,7 @@ module Lib
property :algorithm_uri, String, :length => 255
property :dataset_uri, String, :length => 255
property :num_folds, Integer, :default => 10
+ property :prediction_feature, String, :length => 255
property :stratified, Boolean, :default => false
property :random_seed, Integer, :default => 1
property :finished, Boolean, :default => false
diff --git a/report/environment.rb b/report/environment.rb
index 6a449b9..cbed3de 100644
--- a/report/environment.rb
+++ b/report/environment.rb
@@ -27,7 +27,9 @@ load "report/external/mimeparse.rb"
load "lib/prediction_util.rb"
-LOGGER = Logger.new(STDOUT)
-LOGGER.datetime_format = "%Y-%m-%d %H:%M:%S "
+unless(defined? LOGGER)
+ LOGGER = Logger.new(STDOUT)
+ LOGGER.datetime_format = "%Y-%m-%d %H:%M:%S "
+end
diff --git a/report/report_application.rb b/report/report_application.rb
index 4cba0ae..83250e8 100644
--- a/report/report_application.rb
+++ b/report/report_application.rb
@@ -2,7 +2,7 @@ load "report/environment.rb"
def perform
begin
- $rep = Reports::ReportService.new(url_for("", :full)) unless $rep
+ $rep = Reports::ReportService.new(url_for("/report", :full)) unless $rep
yield( $rep )
rescue Reports::NotFound => ex
halt 404, ex.message
diff --git a/report/report_format.rb b/report/report_format.rb
index 6d3adcd..d58a1f4 100644
--- a/report/report_format.rb
+++ b/report/report_format.rb
@@ -14,7 +14,7 @@ module Reports::ReportFormat
RF_HTML = "html"
RF_PDF = "pdf"
REPORT_FORMATS = [RF_XML, RF_HTML, RF_PDF]
- CONTENT_TYPES = {"text/xml"=>RF_XML,"text/html"=>RF_HTML,"application/pdf"=>RF_PDF}
+ CONTENT_TYPES = {"*/xml"=>RF_XML,"*/html"=>RF_HTML,"*/pdf"=>RF_PDF}
# returns report-format, according to header value
def self.get_format(accept_header_value)
diff --git a/report/report_service.rb b/report/report_service.rb
index 2ff5415..364ac85 100644
--- a/report/report_service.rb
+++ b/report/report_service.rb
@@ -48,12 +48,13 @@ module Reports
check_report_type(type)
#step 0.5: replace cv-uris with list of validation-uris
- LOGGER.debug "validation uri list '"+uri_list.inspect+"'"
+ raise Reports::BadRequest.new("validation uri_list missing") unless uri_list
+ LOGGER.debug "validation uri_list: '"+uri_list.inspect+"'"
uri_list = Reports.validation_access.resolve_cv_uris(uri_list)
# step1: load validations
validation_set = Reports::ValidationSet.new(uri_list)
- raise Reports::BadRequest.new("no validations found") unless validation_set and validation_set.size > 0
+ raise Reports::BadRequest.new("cannot get validations from uri_list '"+uri_list.inspect+"'") unless validation_set and validation_set.size > 0
LOGGER.debug "loaded "+validation_set.size.to_s+" validation/s"
#step 2: create report of type
diff --git a/report/report_test.rb b/report/report_test.rb
index 8822568..9ee9ce5 100644
--- a/report/report_test.rb
+++ b/report/report_test.rb
@@ -148,14 +148,15 @@ class Reports::ReportServiceTest < Test::Unit::TestCase
val_params = {
:dataset_uri => data_uri,
:algorithm_uri => ws_class_alg,
- :split_ratio=>0.9 }
+ :split_ratio=>0.9,
+ :prediction_feature => "classification",}
val_params[:feature_service_uri] = ws_feat_alg if ws_feat_alg
RestClient.post WS_VAL+"/validation/training_test_split", val_params
end
def create_cross_validation(data_uri, ws_class_alg=WS_CLASS_ALG, ws_feat_alg=WS_FEATURE_ALG)
puts "cross-validating"
- ext("curl -X POST -d dataset_uri="+data_uri+" -d algorithm_uri="+ws_class_alg+
+ ext("curl -X POST -d dataset_uri="+data_uri+" -d algorithm_uri="+ws_class_alg+" -d prediction_feature=classification"+
(ws_feat_alg ? " -d feature_service_uri="+ws_feat_alg : "")+
" "+WS_VAL+"/crossvalidation",nil)
end
diff --git a/report/validation_access.rb b/report/validation_access.rb
index 5f04472..ed19c14 100644
--- a/report/validation_access.rb
+++ b/report/validation_access.rb
@@ -16,7 +16,7 @@ class Reports::ValidationAccess
raise "not implemented"
end
- def get_predictions( test_dataset_uri, prediction_dataset_uri)
+ def get_predictions( prediction_feature, test_dataset_uri, prediction_dataset_uri)
raise "not implemented"
end
@@ -52,6 +52,10 @@ class Reports::ValidationDB < Reports::ValidationAccess
validation.send("#{p.to_s}=".to_sym, v[p])
end
+ model = OpenTox::Model::LazarClassificationModel.new(v[:model_uri])
+ raise "cannot access model '"+v[:model_uri].to_s+"'" unless model
+ validation.prediction_feature = model.get_prediction_feature
+
{OpenTox::Validation::VAL_CLASS_PROP => OpenTox::Validation::VAL_CLASS_PROPS}.each do |subset_name,subset_props|
subset = v[subset_name]
subset_props.each{ |prop| validation.send("#{prop.to_s}=".to_sym, subset[prop]) } if subset
@@ -68,8 +72,8 @@ class Reports::ValidationDB < Reports::ValidationAccess
end
end
- def get_predictions( test_dataset_uri, prediction_dataset_uri)
- Lib::Predictions.new( test_dataset_uri, prediction_dataset_uri)
+ def get_predictions( prediction_feature, test_dataset_uri, prediction_dataset_uri)
+ Lib::Predictions.new( prediction_feature, test_dataset_uri, prediction_dataset_uri)
end
end
@@ -107,6 +111,10 @@ class Reports::ValidationWebservice < Reports::ValidationAccess
validation.send("#{p}=".to_sym, data[p])
end
+ model = OpenTox::Model::LazarClassificationModel.new(v[:model_uri])
+ raise "cannot access model '"+v[:model_uri].to_s+"'" unless model
+ validation.prediction_feature = model.get_prediction_feature
+
{OpenTox::Validation::VAL_CV_PROP => OpenTox::Validation::VAL_CV_PROPS,
OpenTox::Validation::VAL_CLASS_PROP => OpenTox::Validation::VAL_CLASS_PROPS}.each do |subset_name,subset_props|
subset = data[subset_name]
@@ -130,8 +138,8 @@ class Reports::ValidationWebservice < Reports::ValidationAccess
end
end
- def get_predictions( test_dataset_uri, prediction_dataset_uri)
- Lib::Predictions.new( test_dataset_uri, prediction_dataset_uri)
+ def get_predictions( prediction_feature, test_dataset_uri, prediction_dataset_uri)
+ Lib::Predictions.new( prediction_feature, test_dataset_uri, prediction_dataset_uri)
end
end
@@ -197,6 +205,8 @@ class Reports::ValidationMockLayer < Reports::ValidationAccess
validation.training_dataset_uri = @datasets[@count]
validation.test_dataset_uri = @datasets[@count]
+ validation.prediction_feature = "classification"
+
@count += 1
end
@@ -212,7 +222,7 @@ class Reports::ValidationMockLayer < Reports::ValidationAccess
#validation.CV_dataset_name = @datasets[validation.crossvalidation_id.to_i * NUM_FOLDS]
end
- def get_predictions(test_dataset_uri, prediction_dataset_uri)
+ def get_predictions( prediction_feature, test_dataset_uri, prediction_dataset_uri)
p = Array.new
c = Array.new
diff --git a/report/validation_data.rb b/report/validation_data.rb
index 46c5e44..eb609c9 100644
--- a/report/validation_data.rb
+++ b/report/validation_data.rb
@@ -71,7 +71,7 @@ module Reports
LOGGER.info("no predictions available, prediction_dataset_uri not set")
return nil
end
- @predictions = Reports.validation_access.get_predictions( @test_dataset_uri, @prediction_dataset_uri )
+ @predictions = Reports.validation_access.get_predictions( @prediction_feature, @test_dataset_uri, @prediction_dataset_uri )
end
# loads all crossvalidation attributes, of the corresponding cv into this object
diff --git a/validation/validation_application.rb b/validation/validation_application.rb
index c5753b0..dc4a6a6 100644
--- a/validation/validation_application.rb
+++ b/validation/validation_application.rb
@@ -9,8 +9,10 @@ load 'validation/validation_service.rb'
# hack: store self in $sinatra to make url_for method accessible in validation_service
# (before is executed in every rest call, problem is that the request object is not set, until the first rest-call )
before {$sinatra = self unless $sinatra}
-LOGGER = Logger.new(STDOUT)
-LOGGER.datetime_format = "%Y-%m-%d %H:%M:%S "
+unless(defined? LOGGER)
+ LOGGER = Logger.new(STDOUT)
+ LOGGER.datetime_format = "%Y-%m-%d %H:%M:%S "
+end
class Sinatra::Base
# logging halts (!= 202)
@@ -48,13 +50,16 @@ end
post '/crossvalidation/?' do
LOGGER.info "creating crossvalidation "+params.inspect
- halt 400, "alogrithm_uri and/or dataset_uri missing: "+params.inspect unless params[:dataset_uri] and params[:algorithm_uri]
+ halt 400, "dataset_uri missing" unless params[:dataset_uri]
+ halt 400, "algorithm_uri missing" unless params[:algorithm_uri]
+ halt 400, "prediction_feature missing" unless params[:prediction_feature]
cv_params = { :dataset_uri => params[:dataset_uri],
- :algorithm_uri => params[:algorithm_uri] }
+ :algorithm_uri => params[:algorithm_uri],
+ :prediction_feature => params[:prediction_feature] }
[ :num_folds, :random_seed, :stratified ].each{ |sym| cv_params[sym] = params[sym] if params[sym] }
cv = Crossvalidation.new cv_params
cv.create_cv_datasets
- cv.perform_cv params[:feature_service_uri]
+ cv.perform_cv( params[:feature_service_uri])
cv.uri
end
@@ -76,12 +81,15 @@ post '/validation/?' do
v = Validation.new :model_uri => params[:model_uri],
:test_dataset_uri => params[:test_dataset_uri]
v.validate_model
- elsif params[:algorithm_uri] and params[:training_dataset_uri] and params[:test_dataset_uri] and !params[:model_uri]
+ elsif params[:algorithm_uri] and params[:training_dataset_uri] and params[:test_dataset_uri] and params[:prediction_feature] and !params[:model_uri]
v = Validation.new :training_dataset_uri => params[:training_dataset_uri],
- :test_dataset_uri => params[:test_dataset_uri]
- v.validate_algorithm( params[:algorithm_uri], params[:feature_service_uri])
+ :test_dataset_uri => params[:test_dataset_uri]
+ v.validate_algorithm( params[:algorithm_uri], params[:prediction_feature], params[:feature_service_uri])
else
- halt 400, "illegal param combination, use either (model_uri and test_dataset_uri) OR (algorithm_uri and training_dataset_uri and test_dataset_uri): "+params.inspect
+ halt 400, "illegal parameter combination for validation, use either\n"+
+ "* model_uri, test_dataset_uri\n"+
+ "* algorithm_uri, training_dataset_uri, test_dataset_uri, prediction_feature\n"
+ "params given: "+params.inspect
end
v.uri
@@ -90,14 +98,13 @@ end
post '/validation/training_test_split' do
LOGGER.info "creating training test split "+params.inspect
halt 400, "dataset_uri missing" unless params[:dataset_uri]
+ halt 400, "algorithm_uri missing" unless params[:algorithm_uri]
+ halt 400, "prediction_feature missing" unless params[:prediction_feature]
+
params.merge!(ValidationUtil.train_test_dataset_split(params[:dataset_uri], params[:split_ratio], params[:random_seed]))
- if (params[:algorithm_uri])
- v = Validation.new :training_dataset_uri => params[:training_dataset_uri],
- :test_dataset_uri => params[:test_dataset_uri]
- v.validate_algorithm( params[:algorithm_uri], params[:feature_service_uri])
- else
- v = Validation.new :training_dataset_uri => params[:training_dataset_uri], :test_dataset_uri => params[:test_dataset_uri]
- end
+ v = Validation.new :training_dataset_uri => params[:training_dataset_uri],
+ :test_dataset_uri => params[:test_dataset_uri]
+ v.validate_algorithm( params[:algorithm_uri], params[:prediction_feature], params[:feature_service_uri])
v.uri
end
diff --git a/validation/validation_service.rb b/validation/validation_service.rb
index 62bd7a0..c51863f 100644
--- a/validation/validation_service.rb
+++ b/validation/validation_service.rb
@@ -73,9 +73,10 @@ class Validation < Lib::Validation
# validates an algorithm by building a model and validating this model
# PENDING: so far, feature_service_uri is used to construct a second dataset (first is training-dataset)
- def validate_algorithm( algorithm_uri, feature_service_uri=nil )
+ def validate_algorithm( algorithm_uri, prediction_feature, feature_service_uri=nil )
- LOGGER.debug "building model "+algorithm_uri.to_s+" "+feature_service_uri.to_s
+ LOGGER.debug "building model "+algorithm_uri.to_s+" "+prediction_feature.to_s+" "+feature_service_uri.to_s
+ # PENDING: use prediciton_feature to build model
params = {}
if feature_service_uri
params[:activity_dataset_uri] = @training_dataset_uri
@@ -100,6 +101,7 @@ class Validation < Lib::Validation
model = OpenTox::Model::LazarClassificationModel.new(@model_uri)
prediction_dataset = OpenTox::Dataset.create!
+ prediction_feature = model.get_prediction_feature
count = 1
benchmark = Benchmark.measure do
@@ -108,7 +110,7 @@ class Validation < Lib::Validation
prediction = model.predict(c)
LOGGER.debug "prediction "+count.to_s+"/"+compounds.size.to_s+" class: "+prediction.classification.to_s+", confidence: "+prediction.confidence.to_s+", compound: "+c.uri.to_s
pred_feature = OpenTox::Feature.new(:name => "prediction",
- :classification => prediction.classification,
+ prediction_feature.to_sym => prediction.classification,
:confidence => prediction.confidence)
prediction_dataset.add({c.uri => [pred_feature.uri]}.to_yaml)
count += 1
@@ -116,7 +118,7 @@ class Validation < Lib::Validation
end
LOGGER.debug "computing prediction stats"
- update :classification_info => Lib::Predictions.new( @test_dataset_uri, prediction_dataset.uri ).compute_prediction_stats
+ update :classification_info => Lib::Predictions.new( prediction_feature, @test_dataset_uri, prediction_dataset.uri ).compute_prediction_stats
update :prediction_dataset_uri => prediction_dataset.uri, :finished => true, :elapsedTimeTesting => benchmark.real
#PENDING cannot estimate cpu time as this is done on another server
end
@@ -154,7 +156,7 @@ class Crossvalidation < Lib::Crossvalidation
LOGGER.debug "perform cv validations"
Validation.all( :crossvalidation_id => id ).each do |v|
- v.validate_algorithm( @algorithm_uri, feature_service_uri )
+ v.validate_algorithm( @algorithm_uri, @prediction_feature, feature_service_uri )
#break
end
end
@@ -164,10 +166,10 @@ class Crossvalidation < Lib::Crossvalidation
# returns true if successfull, false otherwise
def copy_cv_datasets()
- equal_cvs = Crossvalidation.all( :dataset_uri => @dataset_uri,
- :num_folds => @num_folds,
- :stratified => @stratified,
- :random_seed => @random_seed ).reject{ |cv| cv.id == @id }
+ equal_params = { :dataset_uri => @dataset_uri, :num_folds => @num_folds,
+ :stratified => @stratified, :random_seed => @random_seed }
+ equal_params[:prediction_feature] = @prediction_feature if @stratified
+ equal_cvs = Crossvalidation.all( equal_params ).reject{ |cv| cv.id == @id }
return false if equal_cvs.size == 0
cv = equal_cvs[0]
@@ -193,8 +195,6 @@ class Crossvalidation < Lib::Crossvalidation
# stores uris in validation objects
def create_new_cv_datasets
- class_feature = "classification"
-
LOGGER.debug "creating datasets for crossvalidation"
orig_dataset = OpenTox::Dataset.find :uri => @dataset_uri
halt 400, "Dataset not found: "+@dataset_uri.to_s unless orig_dataset
@@ -207,7 +207,7 @@ class Crossvalidation < Lib::Crossvalidation
class_compounds = {} # "inactive" => compounds[], "active" => compounds[] ..
shuffled_compounds.each do |c|
orig_dataset.features(c).each do |a|
- value = OpenTox::Feature.new(:uri => a.uri).value(class_feature).to_s
+ value = OpenTox::Feature.new(:uri => a.uri).value(@prediction_feature).to_s
class_compounds[value] = [] unless class_compounds.has_key?(value)
class_compounds[value].push(c)
end
diff --git a/validation/validation_test.rb b/validation/validation_test.rb
index 8c6ae2a..bd40f3d 100644
--- a/validation/validation_test.rb
+++ b/validation/validation_test.rb
@@ -1,6 +1,7 @@
ENV['RACK_ENV'] = 'test'
-load 'validation/validation_application.rb'
+#load 'validation/validation_application.rb'
+load 'application.rb'
require 'test/unit'
require 'rack/test'
@@ -18,11 +19,11 @@ FILE_TRAIN= File.new("data/hamster_carcinogenicity_TRAIN.csv","r")
DATA_TEST="hamster_test"
FILE_TEST=File.new("data/hamster_carcinogenicity_TEST.csv","r")
-#WS_CLASS_ALG=@@config[:services]["opentox-algorithm"]+"lazar_classification" #"localhost:4003/lazar_classification"
-WS_CLASS_ALG=@@config[:services]["opentox-majority"]+"algorithm" #"localhost:4008/algorithm"
+WS_CLASS_ALG=@@config[:services]["opentox-algorithm"]+"lazar_classification" #"localhost:4003/lazar_classification"
+#WS_CLASS_ALG=@@config[:services]["opentox-majority"]+"algorithm" #"localhost:4008/algorithm"
-#WS_FEATURE_ALG=@@config[:services]["opentox-algorithm"]+"fminer" #"localhost:4003/fminer"
-WS_FEATURE_ALG=nil
+WS_FEATURE_ALG=@@config[:services]["opentox-algorithm"]+"fminer" #"localhost:4003/fminer"
+#WS_FEATURE_ALG=nil
class ValidationTest < Test::Unit::TestCase
@@ -71,8 +72,8 @@ class ValidationTest < Test::Unit::TestCase
## 2.times do
#
# num_folds = 9
-# post '/crossvalidation', { :dataset_uri => data_uri,
-# :algorithm_uri => WS_CLASS_ALG, :feature_service_uri => WS_FEATURE_ALG, :num_folds => num_folds, :random_seed => 2 }
+# post '/crossvalidation', { :dataset_uri => data_uri, :algorithm_uri => WS_CLASS_ALG, :prediction_feature => "classification",
+# :feature_service_uri => WS_FEATURE_ALG, :num_folds => num_folds, :random_seed => 2 }
#
# puts "crossvalidation: "+last_response.body
# assert last_response.ok?
@@ -130,31 +131,37 @@ class ValidationTest < Test::Unit::TestCase
# end
# end
#
-# def test_validate_algorithm
+ def test_validate_algorithm
+ begin
+ data_uri_train = upload_data(WS_DATA, DATA_TRAIN, FILE_TRAIN)
+ data_uri_test = upload_data(WS_DATA, DATA_TEST, FILE_TEST)
+ #data_uri_train = WS_DATA+"/"+DATA_TRAIN
+ #data_uri_test = WS_DATA+"/"+DATA_TEST
+ post '/validation', { :training_dataset_uri => data_uri_train, :test_dataset_uri => data_uri_test,
+ :algorithm_uri => WS_CLASS_ALG, :prediction_feature => "classification", :feature_service_uri => WS_FEATURE_ALG}
+ verify_validation
+ ensure
+ delete_resources
+ end
+ end
+
+# def test_split
# begin
-# data_uri_train = upload_data(WS_DATA, DATA_TRAIN, FILE_TRAIN)
-# data_uri_test = upload_data(WS_DATA, DATA_TEST, FILE_TEST)
-# #data_uri_train = WS_DATA+"/"+DATA_TRAIN
-# #data_uri_test = WS_DATA+"/"+DATA_TEST
-# post '/validation', { :training_dataset_uri => data_uri_train, :test_dataset_uri => data_uri_test,
-# :algorithm_uri => WS_CLASS_ALG, :feature_service_uri => WS_FEATURE_ALG}
+# data_uri = upload_data(WS_DATA, DATA, FILE)
+# #data_uri=WS_DATA+"/"+DATA
+# post '/validation/training_test_split', { :dataset_uri => data_uri, :algorithm_uri => WS_CLASS_ALG, :prediction_feature => "classification",
+# :feature_service_uri => WS_FEATURE_ALG, :split_ratio=>0.9, :random_seed=>2}
# verify_validation
# ensure
# delete_resources
# end
# end
- def test_split
- begin
- data_uri = upload_data(WS_DATA, DATA, FILE)
- #data_uri=WS_DATA+"/"+DATA
- post '/validation/training_test_split', { :dataset_uri => data_uri, :algorithm_uri => WS_CLASS_ALG,
- :feature_service_uri => WS_FEATURE_ALG, :split_ratio=>0.9, :random_seed=>2}
- verify_validation
- ensure
- delete_resources
- end
- end
+# def test_nothing
+# puts "testing nothing"
+# get ''
+# puts last_response.body
+# end
private
def verify_validation (delete=true)