summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Rakefile2
-rw-r--r--VERSION2
-rw-r--r--lib/algorithm.rb7
-rw-r--r--lib/r-util.rb123
-rw-r--r--lib/stratification.R78
-rw-r--r--lib/transform.rb4
-rw-r--r--opentox-ruby.gemspec200
7 files changed, 268 insertions, 148 deletions
diff --git a/Rakefile b/Rakefile
index dddea1b..4d1d0c3 100644
--- a/Rakefile
+++ b/Rakefile
@@ -42,7 +42,7 @@ begin
gem.add_dependency "dm-migrations", "=1.1.0"
gem.add_dependency "dm-validations", "=1.1.0"
gem.add_dependency "dm-sqlite-adapter", "=1.1.0"
- gem.add_dependency "ruby-plot", "=0.6.0"
+ gem.add_dependency "ruby-plot", "=0.6.1"
gem.add_dependency "gsl", "=1.14.7"
gem.add_dependency "statsample", "=1.1.0"
diff --git a/VERSION b/VERSION
index a0cd9f0..fd2a018 100644
--- a/VERSION
+++ b/VERSION
@@ -1 +1 @@
-3.1.0 \ No newline at end of file
+3.1.0
diff --git a/lib/algorithm.rb b/lib/algorithm.rb
index c026c56..9dcf6a8 100644
--- a/lib/algorithm.rb
+++ b/lib/algorithm.rb
@@ -381,10 +381,10 @@ module OpenTox
else
#LOGGER.debug gram_matrix.to_yaml
@r = RinRuby.new(false,false) # global R instance leads to Socket errors after a large number of requests
- @r.eval "set.seed(1)"
@r.eval "suppressPackageStartupMessages(library('caret'))" # requires R packages "caret" and "kernlab"
@r.eval "suppressPackageStartupMessages(library('doMC'))" # requires R packages "multicore"
@r.eval "registerDoMC()" # switch on parallel processing
+ @r.eval "set.seed(1)"
begin
# set data
@@ -417,7 +417,7 @@ module OpenTox
# model + support vectors
LOGGER.debug "Creating R SVM model ..."
- @r.eval <<-EOR
+ train_success = @r.eval <<-EOR
model = train(prop_matrix,y,method="svmradial",tuneLength=8,trControl=trainControl(method="LGOCV",number=10),preProcess=c("center", "scale"))
perf = ifelse ( class(y)!='numeric', max(model$results$Accuracy), model$results[which.min(model$results$RMSE),]$Rsquared )
EOR
@@ -431,6 +431,7 @@ module OpenTox
# censoring
prediction = nil if ( @r.perf.nan? || @r.perf < min_train_performance )
+ prediction = nil unless train_success
LOGGER.debug "Performance: #{sprintf("%.2f", @r.perf)}"
rescue Exception => e
LOGGER.debug "#{e.class}: #{e.message}"
@@ -459,12 +460,12 @@ module OpenTox
# need packs 'randomForest', 'RANN'
@r.eval <<-EOR
- set.seed(1)
suppressPackageStartupMessages(library('caret'))
suppressPackageStartupMessages(library('randomForest'))
suppressPackageStartupMessages(library('RANN'))
suppressPackageStartupMessages(library('doMC'))
registerDoMC()
+ set.seed(1)
acts = read.csv(ds_csv_file, check.names=F)
feats = read.csv(fds_csv_file, check.names=F)
diff --git a/lib/r-util.rb b/lib/r-util.rb
index 7163c46..0d4e82c 100644
--- a/lib/r-util.rb
+++ b/lib/r-util.rb
@@ -8,6 +8,18 @@ PACKAGE_DIR = package_dir
require "tempfile"
+class Array
+
+ def check_uniq
+ hash = {}
+ self.each do |x|
+ raise "duplicate #{x}" if hash[x]
+ hash[x] = true
+ end
+ end
+
+end
+
module OpenTox
class RUtil
@@ -75,12 +87,10 @@ module OpenTox
end
# embedds feature values of two datasets into 2D and plots it
- # fast_plot = true -> PCA, fast_plot = false -> SMACOF (iterative optimisation method)
#
def feature_value_plot(files, dataset_uri1, dataset_uri2, dataset_name1, dataset_name2,
- features=nil, fast_plot=true, subjectid=nil, waiting_task=nil)
+ features=nil, subjectid=nil, waiting_task=nil)
- raise "r-package smacof missing" if fast_plot==false and !package_installed?("smacof")
LOGGER.debug("r-util> create feature value plot")
d1 = OpenTox::Dataset.find(dataset_uri1,subjectid)
d2 = OpenTox::Dataset.find(dataset_uri2,subjectid)
@@ -102,17 +112,13 @@ module OpenTox
@r.eval "split <- c(rep(0,nrow(#{df1})),rep(1,nrow(#{df2})))"
@r.names = [dataset_name1, dataset_name2]
LOGGER.debug("r-util> - convert data to 2d")
- @r.eval "df.2d <- plot_pre_process(df, method='#{(fast_plot ? "pca" : "smacof")}')"
+ #@r.eval "save.image(\"/tmp/image.R\")"
+ @r.eval "df.2d <- plot_pre_process(df, method='sammon')"
waiting_task.progress(75) if waiting_task
- if fast_plot
- info = "main='PCA-Embedding of #{features.size} features',xlab='PC1',ylab='PC2'"
- else
- info = "main='SMACOF-Embedding of #{features.size} features',xlab='x',ylab='y'"
- end
LOGGER.debug("r-util> - plot data")
plot_to_files(files) do |file|
- @r.eval "plot_split( df.2d, split, names, #{info})"
+ @r.eval "plot_split( df.2d, split, names, main='Sammon embedding of #{features.size} features',xlab='x',ylab='y')"
end
end
@@ -170,19 +176,68 @@ module OpenTox
end
end
- # stratified splits a dataset into two dataset the feature values
+ # stratified splits a dataset into two dataset according to the feature values
+ # all features are taken into account unless <split_features> is given
+ # returns two datases
+ def stratified_split( dataset, metadata={}, missing_values="NA", pct=0.3, subjectid=nil, seed=42, split_features=nil )
+ stratified_split_internal( dataset, metadata, missing_values, nil, pct, subjectid, seed, split_features )
+ end
+
+ # stratified splits a dataset into k datasets according the feature values
# all features are taken into account unless <split_features> is given
- def stratified_split( dataset, missing_values="NA", pct=0.3, subjectid=nil, seed=42, split_features=nil )
+ # returns two arrays of datasets
+ def stratified_k_fold_split( dataset, metadata={}, missing_values="NA", num_folds=10, subjectid=nil, seed=42, split_features=nil )
+ stratified_split_internal( dataset, metadata, missing_values, num_folds, nil, subjectid, seed, split_features )
+ end
+
+ private
+ def stratified_split_internal( dataset, metadata={}, missing_values="NA", num_folds=nil, pct=nil, subjectid=nil, seed=42, split_features=nil )
+ raise "internal error" if num_folds!=nil and pct!=nil
+ k_fold_split = num_folds!=nil
+ if k_fold_split
+ raise "num_folds not a fixnum: #{num_folds}" unless num_folds.is_a?(Fixnum)
+ else
+ raise "pct is not a numeric: #{pct}" unless pct.is_a?(Numeric)
+ end
raise "not a loaded ot-dataset" unless dataset.is_a?(OpenTox::Dataset) and dataset.compounds.size>0 and dataset.features.size>0
+ raise "missing_values=#{missing_values}" unless missing_values.is_a?(String) or missing_values==0
+ raise "subjectid=#{subjectid}" unless subjectid==nil or subjectid.is_a?(String)
LOGGER.debug("r-util> apply stratified split to #{dataset.uri}")
- df = dataset_to_dataframe( dataset, missing_values, subjectid, split_features )
+ df = dataset_to_dataframe( dataset, missing_values, subjectid)
@r.eval "set.seed(#{seed})"
- @r.eval "split <- stratified_split(#{df}, ratio=#{pct})"
- split = @r.pull 'split'
- split = split.collect{|s| 1-s.to_i} # reverse 1s and 0s, as 1 means selected, but 0 will be first set
- split_to_datasets( df, split, subjectid )
+ str_split_features = ""
+ if split_features
+ @r.split_features = split_features if split_features
+ str_split_features = "colnames=split_features"
+ end
+ @r.eval "save.image(\"/tmp/image.R\")"
+
+ if k_fold_split
+ @r.eval "split <- stratified_k_fold_split(#{df}, num_folds=#{num_folds}, #{str_split_features})"
+ split = @r.pull 'split'
+ train = []
+ test = []
+ num_folds.times do |f|
+ datasetname = 'dataset fold '+(f+1).to_s+' of '+num_folds.to_s
+ metadata[DC.title] = "training "+datasetname
+ train << split_to_dataset( df, split, metadata, subjectid ){ |i| i!=(f+1) }
+ metadata[DC.title] = "test "+datasetname
+ test << split_to_dataset( df, split, metadata, subjectid ){ |i| i==(f+1) }
+ end
+ return train, test
+ else
+ puts "split <- stratified_split(#{df}, ratio=#{pct}, #{str_split_features})"
+ @r.eval "split <- stratified_split(#{df}, ratio=#{pct}, #{str_split_features})"
+ split = @r.pull 'split'
+ metadata[DC.title] = "Training dataset split of "+dataset.uri
+ train = split_to_dataset( df, split, metadata, subjectid ){ |i| i==1 }
+ metadata[DC.title] = "Test dataset split of "+dataset.uri
+ test = split_to_dataset( df, split, metadata, subjectid ){ |i| i==0 }
+ return train, test
+ end
end
+ public
# dataset should be loaded completely (use Dataset.find)
# takes duplicates into account
@@ -212,9 +267,13 @@ module OpenTox
features = dataset.features.keys.sort
end
compounds = []
+ compound_names = []
dataset.compounds.each do |c|
+ count = 0
num_compounds[c].times do |i|
compounds << c
+ compound_names << "#{c}$#{count}"
+ count+=1
end
end
@@ -238,7 +297,7 @@ module OpenTox
end
end
df_name = "df_#{dataset.uri.split("/")[-1].split("?")[0]}"
- assign_dataframe(df_name,d_values,compounds,features)
+ assign_dataframe(df_name,d_values,compound_names,features)
# set dataframe column types accordingly
f_count = 1 #R starts at 1
@@ -264,16 +323,18 @@ module OpenTox
# converts a dataframe into a dataset (a new dataset is created at the dataset webservice)
# this is only possible if a superset of the dataframe was created by dataset_to_dataframe (metadata and URIs!)
- def dataframe_to_dataset( df, subjectid=nil )
- dataframe_to_dataset_indices( df, subjectid, nil)
+ def dataframe_to_dataset( df, metadata={}, subjectid=nil )
+ dataframe_to_dataset_indices( df, metadata, subjectid, nil)
end
private
- def dataframe_to_dataset_indices( df, subjectid=nil, compound_indices=nil )
+ def dataframe_to_dataset_indices( df, metadata={}, subjectid=nil, compound_indices=nil )
raise unless @@feats[df].size>0
- values, compounds, features = pull_dataframe(df)
+ values, compound_names, features = pull_dataframe(df)
+ compounds = compound_names.collect{|c| c.split("$")[0]}
features.each{|f| raise unless @@feats[df][f]}
dataset = OpenTox::Dataset.create(CONFIG[:services]["opentox-dataset"],subjectid)
+ dataset.add_metadata(metadata)
LOGGER.debug "r-util> convert dataframe to dataset #{dataset.uri}"
compounds.size.times{|i| dataset.add_compound(compounds[i]) if compound_indices==nil or compound_indices.include?(i)}
features.each{|f| dataset.add_feature(f,@@feats[df][f])}
@@ -290,16 +351,12 @@ module OpenTox
dataset
end
- def split_to_datasets( df, split, subjectid=nil )
- sets = []
- (split.min.to_i .. split.max.to_i).each do |i|
- indices = []
- split.size.times{|j| indices<<j if split[j]==i}
- dataset = dataframe_to_dataset_indices( df, subjectid, indices )
- LOGGER.debug("r-util> split into #{dataset.uri}, c:#{dataset.compounds.size}, f:#{dataset.features.size}")
- sets << dataset
- end
- sets
+ def split_to_dataset( df, split, metadata={}, subjectid=nil )
+ indices = []
+ split.size.times{|i| indices<<i if yield(split[i]) }
+ dataset = dataframe_to_dataset_indices( df, metadata, subjectid, indices )
+ LOGGER.debug("r-util> split into #{dataset.uri}, c:#{dataset.compounds.size}, f:#{dataset.features.size}")
+ dataset
end
def pull_dataframe(df)
@@ -323,6 +380,8 @@ module OpenTox
end
def assign_dataframe(df,input,rownames,colnames)
+ rownames.check_uniq if rownames
+ colnames.check_uniq if colnames
tmp = File.join(Dir.tmpdir,Time.new.to_f.to_s+"_"+rand(10000).to_s+".csv")
file = File.new(tmp, 'w')
input.each{|i| file.puts(i.collect{|e| "\"#{e}\""}.join("#")+"\n")}
diff --git a/lib/stratification.R b/lib/stratification.R
index 76ff2d8..3f8698c 100644
--- a/lib/stratification.R
+++ b/lib/stratification.R
@@ -1,4 +1,13 @@
+round_it <- function( x )
+{
+ if(isTRUE((x - floor(x))>=0.5))
+ ceiling(x)
+ else
+ floor(x)
+}
+
+
nominal_to_binary <- function( data )
{
result = NULL
@@ -41,9 +50,13 @@ nominal_to_binary <- function( data )
result
}
-process_data <- function( data )
+process_data <- function( data, colnames=NULL )
{
data.num <- as.data.frame(data)
+ if (!is.null(colnames))
+ {
+ data.num = subset(data.num, select = colnames)
+ }
if (!is.numeric(data.num))
{
data.num = nominal_to_binary(data.num)
@@ -72,14 +85,15 @@ cluster <- function( data, min=10, max=15 )
cbind(s$partition[,m])
}
-stratified_split <- function( data, ratio=0.3, method="cluster" )
+stratified_split <- function( data, ratio=0.3, method="cluster", colnames=NULL )
{
- data.processed = as.matrix(process_data( data ))
+ data.processed = as.matrix(process_data( data, colnames ))
+ print(paste("split using #features: ",ncol(data.processed)))
if (method == "samplecube")
{
require("sampling")
# adjust ratio to make samplecube return exact number of samples
- ratio = round(nrow(data.processed)*ratio)/nrow(data.processed)
+ ratio = round_it(nrow(data.processed)*ratio)/nrow(data.processed)
pik = rep(ratio,times=nrow(data.processed))
data.strat = cbind(pik,data.processed)
samplecube(data.strat,pik,order=2,comment=F)
@@ -101,10 +115,11 @@ stratified_split <- function( data, ratio=0.3, method="cluster" )
stop("unknown method")
}
-stratified_k_fold_split <- function( data, num_folds=10, method="cluster" )
+stratified_k_fold_split <- function( data, num_folds=10, method="cluster", colnames=NULL )
{
print(paste(num_folds,"-fold-split, data-size",nrow(data)))
- data.processed = as.matrix(process_data( data ))
+ data.processed = as.matrix(process_data( data, colnames ))
+ print(paste("split using #features: ",ncol(data.processed)))
if (method == "samplecube")
{
folds = rep(0, times=nrow(data))
@@ -133,7 +148,7 @@ stratified_k_fold_split <- function( data, num_folds=10, method="cluster" )
{
require("TunePareto")
cl = cluster(data.processed)
- res = generateCVRuns(cl,ntimes=1,nfold=3)
+ res = generateCVRuns(cl,ntimes=1,nfold=num_folds)
folds = rep(0, times=nrow(data))
for (i in 1:num_folds)
for(j in 1:length(res[[1]][[i]]))
@@ -144,6 +159,50 @@ stratified_k_fold_split <- function( data, num_folds=10, method="cluster" )
stop("unknown method")
}
+duplicate_indices <- function( data ) {
+ indices = 1:nrow(data)
+ z = data
+ duplicate_index = anyDuplicated(z)
+ while(duplicate_index) {
+ duplicate_to_index = anyDuplicated(z[1:duplicate_index,],fromLast=T)
+ #print(paste(duplicate_index,'is dupl to',duplicate_to_index))
+ indices[duplicate_index] <- duplicate_to_index
+ z[duplicate_index,] <- paste('123$ยง%',duplicate_index)
+ duplicate_index = anyDuplicated(z)
+ }
+ indices
+}
+
+add_duplicates <- function( data, dup_indices ) {
+ result = data[1,]
+ for(i in 2:length(dup_indices)) {
+ row = data[rownames(data)==dup_indices[i],]
+ if(length(row)==0)
+ stop(paste('index ',i,' dup-index ',dup_indices[i],'not found in data'))
+ result = rbind(result, row)
+ }
+ rownames(result)<-NULL
+ result
+}
+
+sammon_duplicates <- function( data, ... ) {
+ di <- duplicate_indices(data)
+ print(di)
+ u <- unique(data)
+ print(paste('unique data points',nrow(u),'of',nrow(data)))
+ if(nrow(u) <= 4) stop("number of unqiue datapoints <= 4")
+ points_unique <- sammon(dist(u), ...)$points
+ if (nrow(u)<nrow(data))
+ {
+ points <- add_duplicates(points_unique, di)
+ points
+ }
+ else
+ {
+ points_unique
+ }
+}
+
plot_pre_process <- function( data, method="pca" )
{
data.processed = process_data( data )
@@ -158,6 +217,11 @@ plot_pre_process <- function( data, method="pca" )
data.emb <- smacofSym(dist(data.processed, method = "euclidean"), ndim=2, verbose=T)
data.emb$conf
}
+ else if (method == "sammon")
+ {
+ require("MASS")
+ sammon_duplicates(data.processed, k=2)
+ }
else
stop("unknown method")
}
diff --git a/lib/transform.rb b/lib/transform.rb
index 8fe1093..f6f769d 100644
--- a/lib/transform.rb
+++ b/lib/transform.rb
@@ -396,8 +396,8 @@ module OpenTox
@q_prop = gsl_q_prop_orig.row(0).to_a
end
- LOGGER.debug "F: #{@n_prop.size}x#{@n_prop[0].size}; R: #{@q_prop.size}"
- LOGGER.debug "Sims: #{@sims.size}, Acts: #{@acts.size}"
+ LOGGER.debug "F: #{@n_prop.size}x#{@n_prop[0].size}; R: #{@q_prop.size}" if (@n_prop && @n_prop[0] && @q_prop)
+ LOGGER.debug "Sims: #{@sims.size}, Acts: #{@acts.size}" if (@sims && @acts)
@sims = [ gram_matrix, @sims ]
diff --git a/opentox-ruby.gemspec b/opentox-ruby.gemspec
index ca2d397..d3ae2d7 100644
--- a/opentox-ruby.gemspec
+++ b/opentox-ruby.gemspec
@@ -5,24 +5,25 @@
Gem::Specification.new do |s|
s.name = %q{opentox-ruby}
- s.version = "3.1.0"
+ s.version = "2.0.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.authors = ["Christoph Helma, Martin Guetlein, Andreas Maunz, Micha Rautenberg, David Vorgrimmler"]
- s.date = %q{2012-03-26}
+ s.date = %q{2011-06-15}
s.description = %q{Ruby wrapper for the OpenTox REST API (http://www.opentox.org)}
s.email = %q{helma@in-silico.ch}
+ s.executables = ["opentox-install-debian.sh", "opentox-install-ubuntu.sh"]
s.extra_rdoc_files = [
- "ChangeLog",
"LICENSE",
"README.markdown"
]
s.files = [
- "ChangeLog",
"LICENSE",
"README.markdown",
"Rakefile",
"VERSION",
+ "bin/opentox-install-debian.sh",
+ "bin/opentox-install-ubuntu.sh",
"lib/algorithm.rb",
"lib/authorization.rb",
"lib/compound.rb",
@@ -40,126 +41,121 @@ Gem::Specification.new do |s|
"lib/overwrite.rb",
"lib/parser.rb",
"lib/policy.rb",
- "lib/r-util.rb",
"lib/rest_client_wrapper.rb",
"lib/serializer.rb",
"lib/spork.rb",
- "lib/stratification.R",
"lib/task.rb",
"lib/templates/default_guest_policy.xml",
"lib/templates/default_policy.xml",
"lib/to-html.rb",
"lib/transform.rb",
- "lib/utils.rb",
+ "lib/utils.rb"
"lib/validation.rb"
]
- s.homepage = %q{http://github.com/opentox/opentox-ruby}
+ s.homepage = %q{http://github.com/helma/opentox-ruby}
s.require_paths = ["lib"]
- s.rubygems_version = %q{1.5.3}
+ s.rubygems_version = %q{1.5.2}
s.summary = %q{Ruby wrapper for the OpenTox REST API}
if s.respond_to? :specification_version then
s.specification_version = 3
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
- s.add_runtime_dependency(%q<sinatra>, ["= 1.2.6"])
- s.add_runtime_dependency(%q<emk-sinatra-url-for>, ["= 0.2.1"])
- s.add_runtime_dependency(%q<sinatra-respond_to>, ["= 0.7.0"])
- s.add_runtime_dependency(%q<sinatra-static-assets>, ["= 0.5.0"])
- s.add_runtime_dependency(%q<rest-client>, ["= 1.6.1"])
- s.add_runtime_dependency(%q<rack>, ["= 1.3.5"])
- s.add_runtime_dependency(%q<rack-contrib>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<rack-flash>, ["= 0.1.1"])
- s.add_runtime_dependency(%q<nokogiri>, ["= 1.4.4"])
- s.add_runtime_dependency(%q<rubyzip>, ["= 0.9.4"])
- s.add_runtime_dependency(%q<roo>, ["= 1.9.3"])
- s.add_runtime_dependency(%q<spreadsheet>, ["= 0.6.5.4"])
- s.add_runtime_dependency(%q<google-spreadsheet-ruby>, ["= 0.1.5"])
- s.add_runtime_dependency(%q<yajl-ruby>, ["= 0.8.2"])
- s.add_runtime_dependency(%q<rinruby>, ["= 2.0.2"])
- s.add_runtime_dependency(%q<ohm>, ["= 0.1.3"])
- s.add_runtime_dependency(%q<ohm-contrib>, ["= 0.1.1"])
- s.add_runtime_dependency(%q<SystemTimer>, ["= 1.2.3"])
- s.add_runtime_dependency(%q<rjb>, ["= 1.3.4"])
- s.add_runtime_dependency(%q<haml>, ["= 3.1.1"])
- s.add_runtime_dependency(%q<akephalos>, ["= 0.2.5"])
- s.add_runtime_dependency(%q<dm-core>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<dm-serializer>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<dm-timestamps>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<dm-types>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<dm-migrations>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<dm-validations>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<dm-sqlite-adapter>, ["= 1.1.0"])
- s.add_runtime_dependency(%q<ruby-plot>, ["= 0.6.0"])
- s.add_runtime_dependency(%q<gsl>, ["= 1.14.7"])
- s.add_runtime_dependency(%q<statsample>, ["= 1.1.0"])
+ s.add_runtime_dependency(%q<sinatra>, [">= 0"])
+ s.add_runtime_dependency(%q<emk-sinatra-url-for>, [">= 0"])
+ s.add_runtime_dependency(%q<sinatra-respond_to>, [">= 0"])
+ s.add_runtime_dependency(%q<sinatra-static-assets>, [">= 0"])
+ s.add_runtime_dependency(%q<rest-client>, [">= 0"])
+ s.add_runtime_dependency(%q<rack>, [">= 0"])
+ s.add_runtime_dependency(%q<rack-contrib>, [">= 0"])
+ s.add_runtime_dependency(%q<rack-flash>, [">= 0"])
+ s.add_runtime_dependency(%q<nokogiri>, [">= 0"])
+ s.add_runtime_dependency(%q<rubyzip>, [">= 0"])
+ s.add_runtime_dependency(%q<roo>, [">= 0"])
+ s.add_runtime_dependency(%q<spreadsheet>, [">= 0"])
+ s.add_runtime_dependency(%q<google-spreadsheet-ruby>, [">= 0"])
+ s.add_runtime_dependency(%q<yajl-ruby>, [">= 0"])
+ s.add_runtime_dependency(%q<tmail>, [">= 0"])
+ s.add_runtime_dependency(%q<rinruby>, [">= 0"])
+ s.add_runtime_dependency(%q<ohm>, [">= 0"])
+ s.add_runtime_dependency(%q<ohm-contrib>, [">= 0"])
+ s.add_runtime_dependency(%q<SystemTimer>, [">= 0"])
+ s.add_runtime_dependency(%q<rjb>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-core>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-serializer>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-timestamps>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-types>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-migrations>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-validations>, [">= 0"])
+ s.add_runtime_dependency(%q<dm-sqlite-adapter>, [">= 0"])
+ s.add_runtime_dependency(%q<haml>, [">= 3"])
+ s.add_runtime_dependency(%q<ruby-plot>, ["~> 0.4.0"])
+ s.add_runtime_dependency(%q<statsample>, [">= 0"])
s.add_development_dependency(%q<jeweler>, [">= 0"])
else
- s.add_dependency(%q<sinatra>, ["= 1.2.6"])
- s.add_dependency(%q<emk-sinatra-url-for>, ["= 0.2.1"])
- s.add_dependency(%q<sinatra-respond_to>, ["= 0.7.0"])
- s.add_dependency(%q<sinatra-static-assets>, ["= 0.5.0"])
- s.add_dependency(%q<rest-client>, ["= 1.6.1"])
- s.add_dependency(%q<rack>, ["= 1.3.5"])
- s.add_dependency(%q<rack-contrib>, ["= 1.1.0"])
- s.add_dependency(%q<rack-flash>, ["= 0.1.1"])
- s.add_dependency(%q<nokogiri>, ["= 1.4.4"])
- s.add_dependency(%q<rubyzip>, ["= 0.9.4"])
- s.add_dependency(%q<roo>, ["= 1.9.3"])
- s.add_dependency(%q<spreadsheet>, ["= 0.6.5.4"])
- s.add_dependency(%q<google-spreadsheet-ruby>, ["= 0.1.5"])
- s.add_dependency(%q<yajl-ruby>, ["= 0.8.2"])
- s.add_dependency(%q<rinruby>, ["= 2.0.2"])
- s.add_dependency(%q<ohm>, ["= 0.1.3"])
- s.add_dependency(%q<ohm-contrib>, ["= 0.1.1"])
- s.add_dependency(%q<SystemTimer>, ["= 1.2.3"])
- s.add_dependency(%q<rjb>, ["= 1.3.4"])
- s.add_dependency(%q<haml>, ["= 3.1.1"])
- s.add_dependency(%q<akephalos>, ["= 0.2.5"])
- s.add_dependency(%q<dm-core>, ["= 1.1.0"])
- s.add_dependency(%q<dm-serializer>, ["= 1.1.0"])
- s.add_dependency(%q<dm-timestamps>, ["= 1.1.0"])
- s.add_dependency(%q<dm-types>, ["= 1.1.0"])
- s.add_dependency(%q<dm-migrations>, ["= 1.1.0"])
- s.add_dependency(%q<dm-validations>, ["= 1.1.0"])
- s.add_dependency(%q<dm-sqlite-adapter>, ["= 1.1.0"])
- s.add_dependency(%q<ruby-plot>, ["= 0.6.0"])
- s.add_dependency(%q<gsl>, ["= 1.14.7"])
- s.add_dependency(%q<statsample>, ["= 1.1.0"])
+ s.add_dependency(%q<sinatra>, [">= 0"])
+ s.add_dependency(%q<emk-sinatra-url-for>, [">= 0"])
+ s.add_dependency(%q<sinatra-respond_to>, [">= 0"])
+ s.add_dependency(%q<sinatra-static-assets>, [">= 0"])
+ s.add_dependency(%q<rest-client>, [">= 0"])
+ s.add_dependency(%q<rack>, [">= 0"])
+ s.add_dependency(%q<rack-contrib>, [">= 0"])
+ s.add_dependency(%q<rack-flash>, [">= 0"])
+ s.add_dependency(%q<nokogiri>, [">= 0"])
+ s.add_dependency(%q<rubyzip>, [">= 0"])
+ s.add_dependency(%q<roo>, [">= 0"])
+ s.add_dependency(%q<spreadsheet>, [">= 0"])
+ s.add_dependency(%q<google-spreadsheet-ruby>, [">= 0"])
+ s.add_dependency(%q<yajl-ruby>, [">= 0"])
+ s.add_dependency(%q<tmail>, [">= 0"])
+ s.add_dependency(%q<rinruby>, [">= 0"])
+ s.add_dependency(%q<ohm>, [">= 0"])
+ s.add_dependency(%q<ohm-contrib>, [">= 0"])
+ s.add_dependency(%q<SystemTimer>, [">= 0"])
+ s.add_dependency(%q<rjb>, [">= 0"])
+ s.add_dependency(%q<dm-core>, [">= 0"])
+ s.add_dependency(%q<dm-serializer>, [">= 0"])
+ s.add_dependency(%q<dm-timestamps>, [">= 0"])
+ s.add_dependency(%q<dm-types>, [">= 0"])
+ s.add_dependency(%q<dm-migrations>, [">= 0"])
+ s.add_dependency(%q<dm-validations>, [">= 0"])
+ s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
+ s.add_dependency(%q<haml>, [">= 3"])
+ s.add_dependency(%q<ruby-plot>, ["~> 0.4.0"])
+ s.add_dependency(%q<statsample>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
else
- s.add_dependency(%q<sinatra>, ["= 1.2.6"])
- s.add_dependency(%q<emk-sinatra-url-for>, ["= 0.2.1"])
- s.add_dependency(%q<sinatra-respond_to>, ["= 0.7.0"])
- s.add_dependency(%q<sinatra-static-assets>, ["= 0.5.0"])
- s.add_dependency(%q<rest-client>, ["= 1.6.1"])
- s.add_dependency(%q<rack>, ["= 1.3.5"])
- s.add_dependency(%q<rack-contrib>, ["= 1.1.0"])
- s.add_dependency(%q<rack-flash>, ["= 0.1.1"])
- s.add_dependency(%q<nokogiri>, ["= 1.4.4"])
- s.add_dependency(%q<rubyzip>, ["= 0.9.4"])
- s.add_dependency(%q<roo>, ["= 1.9.3"])
- s.add_dependency(%q<spreadsheet>, ["= 0.6.5.4"])
- s.add_dependency(%q<google-spreadsheet-ruby>, ["= 0.1.5"])
- s.add_dependency(%q<yajl-ruby>, ["= 0.8.2"])
- s.add_dependency(%q<rinruby>, ["= 2.0.2"])
- s.add_dependency(%q<ohm>, ["= 0.1.3"])
- s.add_dependency(%q<ohm-contrib>, ["= 0.1.1"])
- s.add_dependency(%q<SystemTimer>, ["= 1.2.3"])
- s.add_dependency(%q<rjb>, ["= 1.3.4"])
- s.add_dependency(%q<haml>, ["= 3.1.1"])
- s.add_dependency(%q<akephalos>, ["= 0.2.5"])
- s.add_dependency(%q<dm-core>, ["= 1.1.0"])
- s.add_dependency(%q<dm-serializer>, ["= 1.1.0"])
- s.add_dependency(%q<dm-timestamps>, ["= 1.1.0"])
- s.add_dependency(%q<dm-types>, ["= 1.1.0"])
- s.add_dependency(%q<dm-migrations>, ["= 1.1.0"])
- s.add_dependency(%q<dm-validations>, ["= 1.1.0"])
- s.add_dependency(%q<dm-sqlite-adapter>, ["= 1.1.0"])
- s.add_dependency(%q<ruby-plot>, ["= 0.6.0"])
- s.add_dependency(%q<gsl>, ["= 1.14.7"])
- s.add_dependency(%q<statsample>, ["= 1.1.0"])
+ s.add_dependency(%q<sinatra>, [">= 0"])
+ s.add_dependency(%q<emk-sinatra-url-for>, [">= 0"])
+ s.add_dependency(%q<sinatra-respond_to>, [">= 0"])
+ s.add_dependency(%q<sinatra-static-assets>, [">= 0"])
+ s.add_dependency(%q<rest-client>, [">= 0"])
+ s.add_dependency(%q<rack>, [">= 0"])
+ s.add_dependency(%q<rack-contrib>, [">= 0"])
+ s.add_dependency(%q<rack-flash>, [">= 0"])
+ s.add_dependency(%q<nokogiri>, [">= 0"])
+ s.add_dependency(%q<rubyzip>, [">= 0"])
+ s.add_dependency(%q<roo>, [">= 0"])
+ s.add_dependency(%q<spreadsheet>, [">= 0"])
+ s.add_dependency(%q<google-spreadsheet-ruby>, [">= 0"])
+ s.add_dependency(%q<yajl-ruby>, [">= 0"])
+ s.add_dependency(%q<tmail>, [">= 0"])
+ s.add_dependency(%q<rinruby>, [">= 0"])
+ s.add_dependency(%q<ohm>, [">= 0"])
+ s.add_dependency(%q<ohm-contrib>, [">= 0"])
+ s.add_dependency(%q<SystemTimer>, [">= 0"])
+ s.add_dependency(%q<rjb>, [">= 0"])
+ s.add_dependency(%q<dm-core>, [">= 0"])
+ s.add_dependency(%q<dm-serializer>, [">= 0"])
+ s.add_dependency(%q<dm-timestamps>, [">= 0"])
+ s.add_dependency(%q<dm-types>, [">= 0"])
+ s.add_dependency(%q<dm-migrations>, [">= 0"])
+ s.add_dependency(%q<dm-validations>, [">= 0"])
+ s.add_dependency(%q<dm-sqlite-adapter>, [">= 0"])
+ s.add_dependency(%q<haml>, [">= 3"])
+ s.add_dependency(%q<ruby-plot>, ["~> 0.4.0"])
+ s.add_dependency(%q<statsample>, [">= 0"])
s.add_dependency(%q<jeweler>, [">= 0"])
end
end