summaryrefslogtreecommitdiff
path: root/scripts/tensorflow/prediction-v2-ext-Padel-2D.ipynb
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/tensorflow/prediction-v2-ext-Padel-2D.ipynb')
-rw-r--r--scripts/tensorflow/prediction-v2-ext-Padel-2D.ipynb1007
1 files changed, 1007 insertions, 0 deletions
diff --git a/scripts/tensorflow/prediction-v2-ext-Padel-2D.ipynb b/scripts/tensorflow/prediction-v2-ext-Padel-2D.ipynb
new file mode 100644
index 0000000..a66b942
--- /dev/null
+++ b/scripts/tensorflow/prediction-v2-ext-Padel-2D.ipynb
@@ -0,0 +1,1007 @@
+{
+ "cells": [
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "Using TensorFlow backend.\n"
+ ]
+ }
+ ],
+ "source": [
+ "from keras import optimizers, regularizers\n",
+ "from keras.layers import Dense, Dropout, Input\n",
+ "from keras.models import Model, Sequential\n",
+ "from random import shuffle\n",
+ "from scipy import interp\n",
+ "from sklearn.linear_model import LogisticRegression\n",
+ "from scipy.stats.mstats import gmean\n",
+ "from sklearn.ensemble import RandomForestClassifier\n",
+ "from sklearn.metrics import roc_curve, auc\n",
+ "from sklearn.model_selection import StratifiedKFold, train_test_split\n",
+ "from sklearn.preprocessing import QuantileTransformer\n",
+ "import contextlib\n",
+ "import glob\n",
+ "import gzip\n",
+ "import h5py\n",
+ "import keras\n",
+ "import numpy as np\n",
+ "import os\n",
+ "import pandas as pd\n",
+ "import pylab as plt\n",
+ "import random\n",
+ "import scipy\n",
+ "import sklearn\n",
+ "import tensorflow as tf\n",
+ "from sklearn.ensemble import RandomForestClassifier\n",
+ "from sklearn.datasets import make_classification\n",
+ "\n",
+ "\n",
+ "\n",
+ "random_state = np.random.RandomState(0)\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "(8080, 1173)\n",
+ "(8080, 1)\n"
+ ]
+ },
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/fast_data/drewe/software/envs/tf_gpu/lib/python3.6/site-packages/IPython/core/interactiveshell.py:3020: DtypeWarning: Columns (517,531,534,542,545,547,573,576,599,601,604,607,620,627,629,636,650,653,661,664,666,692,695,718,720,723,726,739,746,748,755,769,772,780,783,785,805,811,814,819,837,845,901,1005) have mixed types. Specify dtype option on import or set low_memory=False.\n",
+ " interactivity=interactivity, compiler=compiler, result=result)\n"
+ ]
+ }
+ ],
+ "source": [
+ "X_f_ext = '/home/drewe/notebooks/genotox/mutagenicity-mod-2.csv'\n",
+ "y_f_ext = '/home/drewe/notebooks/genotox/outcome-mod-2.csv'\n",
+ "\n",
+ "x_pad = pd.read_csv('/home/drewe/notebooks/genotox/PA_Padel_2D_m2.csv', sep=';', decimal=',')\n",
+ "\n",
+ "X_ext = pd.read_csv(X_f_ext,sep=';')\n",
+ "del X_ext['Name']\n",
+ "selected_cols = list(set(x_pad.columns.tolist()).intersection(set(X_ext.columns.tolist())))\n",
+ "\n",
+ "X_ext = np.float64(X_ext[selected_cols].values)\n",
+ "y_ext = np.float64(pd.read_csv(y_f_ext).values)\n",
+ "x_pad = np.float64(x_pad[selected_cols].values)\n",
+ "print(X_ext.shape)\n",
+ "print(y_ext.shape)\n",
+ "\n",
+ "names = pd.read_csv('/home/drewe/notebooks/genotox/PA_Padel_2D_m2.csv', sep=';')['Name'].values\n",
+ "#X_norm = QuantileTransformer(output_distribution='uniform').fit_transform(X.values.T).T\n",
+ "\n",
+ "\n",
+ "trnsf = sklearn.preprocessing.QuantileTransformer( output_distribution='uniform')\n",
+ "trnsf.fit(X_ext)\n",
+ "\n",
+ "X = trnsf.transform(X_ext)\n",
+ "x_pad = trnsf.transform(x_pad)\n",
+ "y = y_ext[: ,0]\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ ">>\n",
+ ".\n",
+ ">>\n",
+ ".\n",
+ ">>\n",
+ ".\n",
+ ">>\n",
+ ".\n",
+ ">>\n",
+ ".\n"
+ ]
+ }
+ ],
+ "source": [
+ "cv = StratifiedKFold(n_splits=10)\n",
+ "results = np.zeros_like(y, dtype=float)\n",
+ "\n",
+ "tprs = []\n",
+ "aucs = []\n",
+ "mean_fpr = np.linspace(0, 1, 100)\n",
+ "\n",
+ "i = 0\n",
+ "\n",
+ "keras.backend.clear_session()\n",
+ "prbs=[]\n",
+ "for mod in range(5):\n",
+ " print('>>')\n",
+ " curr_try = 0\n",
+ " while curr_try <10:\n",
+ " print('.')\n",
+ "\n",
+ " model = Sequential()\n",
+ " model.add(Dense(64, input_dim=X.shape[1], activation='relu'))\n",
+ " model.add(Dense(64, activation='relu'))\n",
+ " model.add(Dense(64, activation='relu'))\n",
+ " model.add(Dense(64, activation='relu'))\n",
+ " model.add(Dense(1, activation='sigmoid'))\n",
+ " # Compile model\n",
+ " opt = keras.optimizers.Adam(epsilon=None, amsgrad=True)\n",
+ " model.compile(loss='binary_crossentropy', optimizer=opt, metrics=['accuracy'])\n",
+ "\n",
+ " # Fit the model\n",
+ " history = model.fit(X, y, epochs=50, batch_size=64, verbose=0)\n",
+ " if history.history['acc'][-1] > 0.53:\n",
+ " break\n",
+ " else:\n",
+ " curr_try += 1\n",
+ "\n",
+ " # Fit the model\n",
+ " probas_ = model.predict(x_pad)\n",
+ " prbs.append(probas_)\n",
+ "# Average the predictions\n",
+ "probas_ = np.mean(np.hstack(prbs), axis=1)\n",
+ "results = probas_\n",
+ "\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "df_results = pd.DataFrame(data={\"name\": names, 'pred': results})\n",
+ "df_results.to_csv('/home/drewe/notebooks/genotox/pred.nn.v3-ext-Padel-2D.csv', index=None)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD8CAYAAABw1c+bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAEUBJREFUeJzt3X+MZWV9x/H3R360UamoOyICw9oWSZEUJJNVQ2qhKMKGQNug3U1VtNhVqqa2pqmtiRD9p62xJhYjrrJBG0VqW5TURSDWBm1AXRBwUSlIUdYlLAiCBq1d/faPOaTjcO/O3XvunZnd5/1Kbub8eO4533nmzmfOPveeZ1NVSJLa8aSVLkCStLwMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDlzpAgZZs2ZNrV27dqXLkKR9xk033fRgVc2M0nZVBv/atWvZtm3bSpchSfuMJN8Zta1DPZLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1JhVeeeuBMBFFw1eHredJMArfklqzpJX/Em2AGcBu6rq+G7bFcCxXZNDgR9U1YkDnnsP8EPgZ8DuqpqbUN2SpDGNMtRzGXAx8LHHN1TVHzy+nOS9wCN7eP6pVfXguAVKkiZryeCvquuTrB20L0mAVwK/M9myJEnT0neM/7eA+6vqziH7C7g2yU1JNu3pQEk2JdmWZNsDDzzQsyxJ0jB9g38jcPke9p9cVScBZwJvSvKSYQ2ranNVzVXV3MzMSP+XgCRpDGMHf5IDgd8HrhjWpqp2dl93AVcC68Y9nyRpMvpc8b8U+FZV7Ri0M8lTkhzy+DJwOrC9x/kkSROwZPAnuRy4ATg2yY4k53e7NrBomCfJc5Js7VYPA76U5FbgK8Bnq+pzkytdkjSOUT7Vs3HI9tcO2LYTWN8t3w2c0LM+SdKEOWWDVp5TLkjLyikbJKkxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYsGfxJtiTZlWT7gm0XJfleklu6x/ohzz0jyR1J7kry9kkWLkkazyhX/JcBZwzY/r6qOrF7bF28M8kBwAeAM4HjgI1JjutTrCSpvyWDv6quBx4a49jrgLuq6u6q+inwSeCcMY4jSZqgA3s8981JXgNsA95WVQ8v2n8EcO+C9R3AC4cdLMkmYBPA7Oxsj7I0VRddNHh5tVjt9UmrwLhv7n4Q+DXgROA+4L0D2mTAthp2wKraXFVzVTU3MzMzZlmSpKWMFfxVdX9V/ayqfg58mPlhncV2AEctWD8S2DnO+SRJkzNW8Cc5fMHq7wHbBzT7KnBMkucmORjYAFw1zvkkSZOz5Bh/ksuBU4A1SXYAFwKnJDmR+aGbe4A3dG2fA3ykqtZX1e4kbwauAQ4AtlTV7VP5LiRJI1sy+Ktq44DNlw5puxNYv2B9K/CEj3pKklaOd+5KUmMMfklqjMEvSY0x+CWpMQa/JDXG4JekxvSZq0fav7Q4z0+L37O84pek1hj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGKds0L5ptU0vMI2pD4YdZ9TjT3s6Bqd72Gd5xS9JjVky+JNsSbIryfYF296T5FtJbktyZZJDhzz3niRfT3JLkm2TLFySNJ5RrvgvA85YtO064Piq+k3gv4C/2sPzT62qE6tqbrwSJUmTtGTwV9X1wEOLtl1bVbu71RuBI6dQmyRpCiYxxv9HwNVD9hVwbZKbkmyawLkkST31+lRPkncAu4GPD2lyclXtTPIs4Lok3+r+BTHoWJuATQCzs7N9ypIk7cHYV/xJzgPOAv6wqmpQm6ra2X3dBVwJrBt2vKraXFVzVTU3MzMzblmSpCWMFfxJzgD+Eji7qh4b0uYpSQ55fBk4Hdg+qK0kafmM8nHOy4EbgGOT7EhyPnAxcAjzwze3JLmka/ucJFu7px4GfCnJrcBXgM9W1eem8l1Ikka25Bh/VW0csPnSIW13Auu75buBE3pVJ0maOO/claTGOFePVsbezu3Sdy6YYfPKtDjHTIvfs36BV/yS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGuOUDdp/OTWBNJBX/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNWak4E+yJcmuJNsXbHtGkuuS3Nl9ffqQ557XtbkzyXmTKlySNJ5Rr/gvA85YtO3twOer6hjg8936L0jyDOBC4IXAOuDCYX8gJEnLY6Tgr6rrgYcWbT4H+Gi3/FHgdwc89eXAdVX1UFU9DFzHE/+ASJKWUZ8x/sOq6j6A7uuzBrQ5Arh3wfqObpskaYVMe66eDNhWAxsmm4BNALOzs9OsSVrawnl+hs35M8p25wta3Rr9WfW54r8/yeEA3dddA9rsAI5asH4ksHPQwapqc1XNVdXczMxMj7IkSXvSJ/ivAh7/lM55wGcGtLkGOD3J07s3dU/vtkmSVsioH+e8HLgBODbJjiTnA38DvCzJncDLunWSzCX5CEBVPQS8G/hq93hXt02StEJGGuOvqo1Ddp02oO024PUL1rcAW8aqTpI0cd65K0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhoz7SkbpP/X0C3xSxo2VYB9pGXgFb8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQY5+qRVtr+Nj/Pcs49tKdz7W0dw9rsbz8fvOKXpOaMHfxJjk1yy4LHo0neuqjNKUkeWdDmnf1LliT1MfZQT1XdAZwIkOQA4HvAlQOafrGqzhr3PJKkyZrUUM9pwLer6jsTOp4kaUomFfwbgMuH7HtxkluTXJ3k+RM6nyRpTL2DP8nBwNnApwbsvhk4uqpOAP4B+PQejrMpybYk2x544IG+ZUmShpjEFf+ZwM1Vdf/iHVX1aFX9qFveChyUZM2gg1TV5qqaq6q5mZmZCZQlSRpkEsG/kSHDPEmenSTd8rrufN+fwDklSWPqdQNXkicDLwPesGDbGwGq6hLgXOCCJLuBHwMbqqr6nFOS1E+v4K+qx4BnLtp2yYLli4GL+5xDkjRZTtmwLxl2C/qkbpHvc5xp1DMtq7Gmfd2k+rTPa9yf68icskGSGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMc/W0bpT5TfZ2bpRJzRekpS3ur2n33zSO78982XnFL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSY3oHf5J7knw9yS1Jtg3YnyTvT3JXktuSnNT3nJKk8U3qBq5Tq+rBIfvOBI7pHi8EPth9lSStgOUY6jkH+FjNuxE4NMnhy3BeSdIAk7jiL+DaJAV8qKo2L9p/BHDvgvUd3bb7FjZKsgnYBDA7OzuBsqQpWA3TC6yGGpbDtL7PVvpvDyZxxX9yVZ3E/JDOm5K8ZNH+DHhOPWFD1eaqmququZmZmQmUJUkapHfwV9XO7usu4Epg3aImO4CjFqwfCezse15J0nh6BX+SpyQ55PFl4HRg+6JmVwGv6T7d8yLgkaq6D0nSiug7xn8YcGWSx4/1iar6XJI3AlTVJcBWYD1wF/AY8Lqe55Qk9dAr+KvqbuCEAdsvWbBcwJv6nEeSNDneuStJjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaM6nZObVajDIPyaTatGJf6othtfb5mU/q+194nD7H3Jd+HquUV/yS1BiDX5IaY/BLUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGuOUDauNt6PvX1r/eU57GgiNxSt+SWrM2MGf5KgkX0jyzSS3J/nTAW1OSfJIklu6xzv7lStJ6qvPUM9u4G1VdXOSQ4CbklxXVd9Y1O6LVXVWj/NIkiZo7Cv+qrqvqm7uln8IfBM4YlKFSZKmYyJj/EnWAi8Avjxg94uT3Jrk6iTPn8T5JEnj6/2pniRPBf4FeGtVPbpo983A0VX1oyTrgU8Dxww5ziZgE8Ds7GzfsiRJQ/S64k9yEPOh//Gq+tfF+6vq0ar6Ube8FTgoyZpBx6qqzVU1V1VzMzMzfcqSJO1Bn0/1BLgU+GZV/f2QNs/u2pFkXXe+7497TklSf32Gek4GXg18Pckt3ba/BmYBquoS4FzggiS7gR8DG6qqepxTktTT2MFfVV8CskSbi4GLxz2HJGnyvHNXkhrTzlw9C+cG2dt5Qvb2ucPaT3J+Euc60XJp/bU2rd/hFeQVvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TG7H9TNkzq9uo+UzPs7XGW22qsSZqmSb3m9/Z3flI5MmFe8UtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1JhewZ/kjCR3JLkrydsH7P+lJFd0+7+cZG2f80mS+hs7+JMcAHwAOBM4DtiY5LhFzc4HHq6qXwfeB/ztuOeTJE1Gnyv+dcBdVXV3Vf0U+CRwzqI25wAf7Zb/GTgtSXqcU5LUU5/gPwK4d8H6jm7bwDZVtRt4BHhmj3NKknpKVY33xOQVwMur6vXd+quBdVX1lgVtbu/a7OjWv921+f6A420CNnWrxwJ3LGqyBnhwrGL3H/aBfQD2AdgH8MQ+OLqqZkZ5Yp9J2nYARy1YPxLYOaTNjiQHAk8DHhp0sKraDGwedrIk26pqrke9+zz7wD4A+wDsA+jXB32Ger4KHJPkuUkOBjYAVy1qcxVwXrd8LvDvNe4/MSRJEzH2FX9V7U7yZuAa4ABgS1XdnuRdwLaqugq4FPjHJHcxf6W/YRJFS5LG12s+/qraCmxdtO2dC5Z/AryizzkWGDoM1BD7wD4A+wDsA+jRB2O/uStJ2jc5ZYMkNWbVBb/TQIzUB3+e5BtJbkvy+SRHr0Sd07RUHyxod26SSrLffcJjlD5I8srutXB7kk8sd43TNsLvwmySLyT5Wvf7sH4l6pyWJFuS7Eqyfcj+JHl/1z+3JTlppANX1ap5MP8m8beBXwUOBm4FjlvU5k+AS7rlDcAVK133CvTBqcCTu+ULWuyDrt0hwPXAjcDcSte9Aq+DY4CvAU/v1p+10nWvQB9sBi7olo8D7lnpuifcBy8BTgK2D9m/HrgaCPAi4MujHHe1XfE7DcQIfVBVX6iqx7rVG5m/h2J/MsrrAODdwN8BP1nO4pbJKH3wx8AHquphgKratcw1TtsofVDAr3TLT+OJ9xLt06rqeobc+9Q5B/hYzbsRODTJ4Usdd7UFv9NAjNYHC53P/F/8/cmSfZDkBcBRVfVvy1nYMhrldfA84HlJ/jPJjUnOWLbqlscofXAR8KokO5j/hOFbaMve5gXQ8+OcUzDoyn3xx45GabMvG/n7S/IqYA747alWtPz22AdJnsT8bK+vXa6CVsAor4MDmR/uOYX5f/V9McnxVfWDKde2XEbpg43AZVX13iQvZv6+oeOr6ufTL29VGCsPV9sV/95MA8FS00Dso0bpA5K8FHgHcHZV/c8y1bZcluqDQ4Djgf9Icg/zY5tX7Wdv8I76u/CZqvrfqvpv5ue3OmaZ6lsOo/TB+cA/AVTVDcAvMz+HTStGyovFVlvwOw3ECH3QDXN8iPnQ39/GdWGJPqiqR6pqTVWtraq1zL/PcXZVbVuZcqdilN+FTzP/Rj9J1jA/9HP3slY5XaP0wXeB0wCS/Abzwf/Asla5sq4CXtN9uudFwCNVdd9ST1pVQz3lNBCj9sF7gKcCn+re1/5uVZ29YkVP2Ih9sF8bsQ+uAU5P8g3gZ8Bf1ICZb/dVI/bB24APJ/kz5oc4Xrs/XQgmuZz5obw13fsYFwIHAVTVJcy/r7EeuAt4DHjdSMfdj/pIkjSC1TbUI0maMoNfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TG/B9x4xPmUSVEdwAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ "<Figure size 432x288 with 1 Axes>"
+ ]
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plt.hist(results,100, color='red', alpha=0.5)\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ ">>\n",
+ ".\n",
+ "Epoch 1/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6169 - acc: 0.6601\n",
+ "Epoch 2/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6184 - acc: 0.6558\n",
+ "Epoch 3/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6161 - acc: 0.6590\n",
+ "Epoch 4/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6197 - acc: 0.6550\n",
+ "Epoch 5/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6150 - acc: 0.6579\n",
+ "Epoch 6/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6167 - acc: 0.6572\n",
+ "Epoch 7/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6193 - acc: 0.6522\n",
+ "Epoch 8/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6150 - acc: 0.6620\n",
+ "Epoch 9/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6164 - acc: 0.6589\n",
+ "Epoch 10/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6147 - acc: 0.6610\n",
+ "Epoch 11/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6145 - acc: 0.6562\n",
+ "Epoch 12/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6162 - acc: 0.6575\n",
+ "Epoch 13/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6146 - acc: 0.6627\n",
+ "Epoch 14/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6132 - acc: 0.6621\n",
+ "Epoch 15/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6138 - acc: 0.6615\n",
+ "Epoch 16/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6178 - acc: 0.6531\n",
+ "Epoch 17/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6126 - acc: 0.6645\n",
+ "Epoch 18/50\n",
+ "8080/8080 [==============================] - 0s 41us/step - loss: 0.6136 - acc: 0.6605\n",
+ "Epoch 19/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6127 - acc: 0.6631\n",
+ "Epoch 20/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6126 - acc: 0.6585\n",
+ "Epoch 21/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6137 - acc: 0.6582\n",
+ "Epoch 22/50\n",
+ "8080/8080 [==============================] - 0s 34us/step - loss: 0.6129 - acc: 0.6605\n",
+ "Epoch 23/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6114 - acc: 0.6626\n",
+ "Epoch 24/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6144 - acc: 0.6640\n",
+ "Epoch 25/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6169 - acc: 0.6583\n",
+ "Epoch 26/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6153 - acc: 0.6574\n",
+ "Epoch 27/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6113 - acc: 0.6649\n",
+ "Epoch 28/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6124 - acc: 0.6619\n",
+ "Epoch 29/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6130 - acc: 0.6608\n",
+ "Epoch 30/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6150 - acc: 0.6573\n",
+ "Epoch 31/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6136 - acc: 0.6615\n",
+ "Epoch 32/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6137 - acc: 0.6585\n",
+ "Epoch 33/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6128 - acc: 0.6627\n",
+ "Epoch 34/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6139 - acc: 0.6580\n",
+ "Epoch 35/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6097 - acc: 0.6652\n",
+ "Epoch 36/50\n",
+ "8080/8080 [==============================] - 0s 34us/step - loss: 0.6118 - acc: 0.6603\n",
+ "Epoch 37/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6111 - acc: 0.6645\n",
+ "Epoch 38/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6105 - acc: 0.6653\n",
+ "Epoch 39/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6117 - acc: 0.6624\n",
+ "Epoch 40/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6138 - acc: 0.6599\n",
+ "Epoch 41/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6101 - acc: 0.6616\n",
+ "Epoch 42/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6123 - acc: 0.6620\n",
+ "Epoch 43/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6116 - acc: 0.6632\n",
+ "Epoch 44/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6118 - acc: 0.6559\n",
+ "Epoch 45/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6115 - acc: 0.6618\n",
+ "Epoch 46/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6130 - acc: 0.6642\n",
+ "Epoch 47/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6089 - acc: 0.6603\n",
+ "Epoch 48/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6110 - acc: 0.6634\n",
+ "Epoch 49/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6107 - acc: 0.6670\n",
+ "Epoch 50/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6119 - acc: 0.6624\n",
+ ">>\n",
+ ".\n",
+ "Epoch 1/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6197 - acc: 0.6557\n",
+ "Epoch 2/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6191 - acc: 0.6562\n",
+ "Epoch 3/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6144 - acc: 0.6599\n",
+ "Epoch 4/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6151 - acc: 0.6593\n",
+ "Epoch 5/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6173 - acc: 0.6634\n",
+ "Epoch 6/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6189 - acc: 0.6546\n",
+ "Epoch 7/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6152 - acc: 0.6547\n",
+ "Epoch 8/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6239 - acc: 0.6484\n",
+ "Epoch 9/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6182 - acc: 0.6546\n",
+ "Epoch 10/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6126 - acc: 0.6632\n",
+ "Epoch 11/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6137 - acc: 0.6600\n",
+ "Epoch 12/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6144 - acc: 0.6601\n",
+ "Epoch 13/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6154 - acc: 0.6583\n",
+ "Epoch 14/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6156 - acc: 0.6569\n",
+ "Epoch 15/50\n",
+ "8080/8080 [==============================] - 0s 42us/step - loss: 0.6138 - acc: 0.6575\n",
+ "Epoch 16/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6143 - acc: 0.6618\n",
+ "Epoch 17/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6146 - acc: 0.6589\n",
+ "Epoch 18/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6125 - acc: 0.6593\n",
+ "Epoch 19/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6137 - acc: 0.6611\n",
+ "Epoch 20/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6156 - acc: 0.6579\n",
+ "Epoch 21/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6133 - acc: 0.6613\n",
+ "Epoch 22/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6141 - acc: 0.6615\n",
+ "Epoch 23/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6121 - acc: 0.6600\n",
+ "Epoch 24/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6141 - acc: 0.6582\n",
+ "Epoch 25/50\n",
+ "8080/8080 [==============================] - 0s 40us/step - loss: 0.6120 - acc: 0.6632\n",
+ "Epoch 26/50\n",
+ "8080/8080 [==============================] - 0s 40us/step - loss: 0.6111 - acc: 0.6625\n",
+ "Epoch 27/50\n",
+ "8080/8080 [==============================] - 0s 52us/step - loss: 0.6118 - acc: 0.6611\n",
+ "Epoch 28/50\n",
+ "8080/8080 [==============================] - 0s 48us/step - loss: 0.6122 - acc: 0.6624\n",
+ "Epoch 29/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6134 - acc: 0.6632\n",
+ "Epoch 30/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6129 - acc: 0.6603\n",
+ "Epoch 31/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6105 - acc: 0.6636\n",
+ "Epoch 32/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6123 - acc: 0.6662\n",
+ "Epoch 33/50\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6107 - acc: 0.6623\n",
+ "Epoch 34/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6162 - acc: 0.6599\n",
+ "Epoch 35/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6123 - acc: 0.6631\n",
+ "Epoch 36/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6124 - acc: 0.6618\n",
+ "Epoch 37/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6102 - acc: 0.6609\n",
+ "Epoch 38/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6104 - acc: 0.6653\n",
+ "Epoch 39/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6119 - acc: 0.6627\n",
+ "Epoch 40/50\n",
+ "8080/8080 [==============================] - 0s 56us/step - loss: 0.6110 - acc: 0.6625\n",
+ "Epoch 41/50\n",
+ "8080/8080 [==============================] - 0s 43us/step - loss: 0.6120 - acc: 0.6601\n",
+ "Epoch 42/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6125 - acc: 0.6598\n",
+ "Epoch 43/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6115 - acc: 0.6580\n",
+ "Epoch 44/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6135 - acc: 0.6614\n",
+ "Epoch 45/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6117 - acc: 0.6656\n",
+ "Epoch 46/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6133 - acc: 0.6624\n",
+ "Epoch 47/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6114 - acc: 0.6624\n",
+ "Epoch 48/50\n",
+ "8080/8080 [==============================] - 0s 34us/step - loss: 0.6082 - acc: 0.6676\n",
+ "Epoch 49/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6104 - acc: 0.6646\n",
+ "Epoch 50/50\n",
+ "8080/8080 [==============================] - 0s 34us/step - loss: 0.6081 - acc: 0.6642\n",
+ ">>\n",
+ ".\n",
+ "Epoch 1/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6171 - acc: 0.6542\n",
+ "Epoch 2/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6166 - acc: 0.6593\n",
+ "Epoch 3/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6170 - acc: 0.6568\n",
+ "Epoch 4/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6180 - acc: 0.6577\n",
+ "Epoch 5/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6155 - acc: 0.6580\n",
+ "Epoch 6/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6177 - acc: 0.6583\n",
+ "Epoch 7/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6147 - acc: 0.6584\n",
+ "Epoch 8/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6169 - acc: 0.6561\n",
+ "Epoch 9/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6142 - acc: 0.6608\n",
+ "Epoch 10/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6170 - acc: 0.6568\n",
+ "Epoch 11/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6167 - acc: 0.6606\n",
+ "Epoch 12/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6190 - acc: 0.6566\n",
+ "Epoch 13/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6146 - acc: 0.6629\n",
+ "Epoch 14/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6133 - acc: 0.6543\n",
+ "Epoch 15/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6139 - acc: 0.6616\n",
+ "Epoch 16/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6137 - acc: 0.6580\n",
+ "Epoch 17/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6140 - acc: 0.6610\n",
+ "Epoch 18/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6124 - acc: 0.6611\n",
+ "Epoch 19/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6131 - acc: 0.6616\n",
+ "Epoch 20/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6142 - acc: 0.6603\n",
+ "Epoch 21/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6125 - acc: 0.6603\n",
+ "Epoch 22/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6125 - acc: 0.6558\n",
+ "Epoch 23/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6116 - acc: 0.6632\n",
+ "Epoch 24/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6130 - acc: 0.6645\n",
+ "Epoch 25/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6121 - acc: 0.6579\n",
+ "Epoch 26/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6113 - acc: 0.6632\n",
+ "Epoch 27/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6113 - acc: 0.6619\n",
+ "Epoch 28/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6117 - acc: 0.6618\n",
+ "Epoch 29/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6134 - acc: 0.6621\n",
+ "Epoch 30/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6129 - acc: 0.6563\n",
+ "Epoch 31/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6097 - acc: 0.6649\n",
+ "Epoch 32/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6121 - acc: 0.6615\n",
+ "Epoch 33/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6119 - acc: 0.6644\n",
+ "Epoch 34/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6110 - acc: 0.6593\n",
+ "Epoch 35/50\n",
+ "8080/8080 [==============================] - 0s 40us/step - loss: 0.6110 - acc: 0.6634\n",
+ "Epoch 36/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6099 - acc: 0.6629\n",
+ "Epoch 37/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6104 - acc: 0.6623\n",
+ "Epoch 38/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6100 - acc: 0.6649\n",
+ "Epoch 39/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6098 - acc: 0.6626\n",
+ "Epoch 40/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6129 - acc: 0.6572\n",
+ "Epoch 41/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6087 - acc: 0.6642\n",
+ "Epoch 42/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6091 - acc: 0.6657\n",
+ "Epoch 43/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6108 - acc: 0.6619\n",
+ "Epoch 44/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6101 - acc: 0.6658\n",
+ "Epoch 45/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6097 - acc: 0.6618\n",
+ "Epoch 46/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6095 - acc: 0.6666\n",
+ "Epoch 47/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6086 - acc: 0.6675\n",
+ "Epoch 48/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6096 - acc: 0.6634\n",
+ "Epoch 49/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6130 - acc: 0.6593\n",
+ "Epoch 50/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6102 - acc: 0.6651\n",
+ ">>\n",
+ ".\n",
+ "Epoch 1/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6185 - acc: 0.6600\n",
+ "Epoch 2/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6160 - acc: 0.6578\n",
+ "Epoch 3/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6164 - acc: 0.6569\n",
+ "Epoch 4/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6160 - acc: 0.6584\n",
+ "Epoch 5/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6164 - acc: 0.6559\n",
+ "Epoch 6/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6143 - acc: 0.6624\n",
+ "Epoch 7/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6174 - acc: 0.6553\n",
+ "Epoch 8/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6137 - acc: 0.6609\n",
+ "Epoch 9/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6150 - acc: 0.6577\n",
+ "Epoch 10/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6195 - acc: 0.6590\n",
+ "Epoch 11/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6155 - acc: 0.6589\n",
+ "Epoch 12/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6131 - acc: 0.6616\n",
+ "Epoch 13/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6144 - acc: 0.6553\n",
+ "Epoch 14/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6132 - acc: 0.6636\n",
+ "Epoch 15/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6137 - acc: 0.6594\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "Epoch 16/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6151 - acc: 0.6592\n",
+ "Epoch 17/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6163 - acc: 0.6603\n",
+ "Epoch 18/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6127 - acc: 0.6625\n",
+ "Epoch 19/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6141 - acc: 0.6594\n",
+ "Epoch 20/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6129 - acc: 0.6619\n",
+ "Epoch 21/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6172 - acc: 0.6563\n",
+ "Epoch 22/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6136 - acc: 0.6636\n",
+ "Epoch 23/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6138 - acc: 0.6652\n",
+ "Epoch 24/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6140 - acc: 0.6603\n",
+ "Epoch 25/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6128 - acc: 0.6604\n",
+ "Epoch 26/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6113 - acc: 0.6636\n",
+ "Epoch 27/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6131 - acc: 0.6640\n",
+ "Epoch 28/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6125 - acc: 0.6592\n",
+ "Epoch 29/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6124 - acc: 0.6592\n",
+ "Epoch 30/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6137 - acc: 0.6609\n",
+ "Epoch 31/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6147 - acc: 0.6571\n",
+ "Epoch 32/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6145 - acc: 0.6587\n",
+ "Epoch 33/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6117 - acc: 0.6637\n",
+ "Epoch 34/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6093 - acc: 0.6646\n",
+ "Epoch 35/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6118 - acc: 0.6613\n",
+ "Epoch 36/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6105 - acc: 0.6605\n",
+ "Epoch 37/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6113 - acc: 0.6694\n",
+ "Epoch 38/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6102 - acc: 0.6613\n",
+ "Epoch 39/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6120 - acc: 0.6587\n",
+ "Epoch 40/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6142 - acc: 0.6579\n",
+ "Epoch 41/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6108 - acc: 0.6603\n",
+ "Epoch 42/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6090 - acc: 0.6627\n",
+ "Epoch 43/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6089 - acc: 0.6642\n",
+ "Epoch 44/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6101 - acc: 0.6635\n",
+ "Epoch 45/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6106 - acc: 0.6615\n",
+ "Epoch 46/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6108 - acc: 0.6595\n",
+ "Epoch 47/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6106 - acc: 0.6595\n",
+ "Epoch 48/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6118 - acc: 0.6605\n",
+ "Epoch 49/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6105 - acc: 0.6639\n",
+ "Epoch 50/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6099 - acc: 0.6632\n",
+ ">>\n",
+ ".\n",
+ "Epoch 1/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6169 - acc: 0.6575\n",
+ "Epoch 2/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6150 - acc: 0.6625\n",
+ "Epoch 3/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6166 - acc: 0.6583\n",
+ "Epoch 4/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6130 - acc: 0.6609\n",
+ "Epoch 5/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6135 - acc: 0.6609\n",
+ "Epoch 6/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6166 - acc: 0.6599\n",
+ "Epoch 7/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6159 - acc: 0.6601\n",
+ "Epoch 8/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6176 - acc: 0.6538\n",
+ "Epoch 9/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6129 - acc: 0.6620\n",
+ "Epoch 10/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6139 - acc: 0.6574\n",
+ "Epoch 11/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6157 - acc: 0.6621\n",
+ "Epoch 12/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6133 - acc: 0.6645\n",
+ "Epoch 13/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6139 - acc: 0.6623\n",
+ "Epoch 14/50\n",
+ "8080/8080 [==============================] - 0s 40us/step - loss: 0.6141 - acc: 0.6592\n",
+ "Epoch 15/50\n",
+ "8080/8080 [==============================] - 0s 50us/step - loss: 0.6149 - acc: 0.6613\n",
+ "Epoch 16/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6122 - acc: 0.6613\n",
+ "Epoch 17/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6137 - acc: 0.6604\n",
+ "Epoch 18/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6147 - acc: 0.6592\n",
+ "Epoch 19/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6145 - acc: 0.6618\n",
+ "Epoch 20/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6138 - acc: 0.6594\n",
+ "Epoch 21/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6126 - acc: 0.6573\n",
+ "Epoch 22/50\n",
+ "8080/8080 [==============================] - 0s 35us/step - loss: 0.6120 - acc: 0.6624\n",
+ "Epoch 23/50\n",
+ "8080/8080 [==============================] - 0s 61us/step - loss: 0.6112 - acc: 0.6636\n",
+ "Epoch 24/50\n",
+ "8080/8080 [==============================] - 0s 44us/step - loss: 0.6115 - acc: 0.6653\n",
+ "Epoch 25/50\n",
+ "8080/8080 [==============================] - 0s 36us/step - loss: 0.6153 - acc: 0.6599\n",
+ "Epoch 26/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6142 - acc: 0.6589\n",
+ "Epoch 27/50\n",
+ "8080/8080 [==============================] - 0s 40us/step - loss: 0.6139 - acc: 0.6605\n",
+ "Epoch 28/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6146 - acc: 0.6573\n",
+ "Epoch 29/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6104 - acc: 0.6656\n",
+ "Epoch 30/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6112 - acc: 0.6662\n",
+ "Epoch 31/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6109 - acc: 0.6640\n",
+ "Epoch 32/50\n",
+ "8080/8080 [==============================] - 0s 37us/step - loss: 0.6139 - acc: 0.6624\n",
+ "Epoch 33/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6146 - acc: 0.6631\n",
+ "Epoch 34/50\n",
+ "8080/8080 [==============================] - 0s 46us/step - loss: 0.6145 - acc: 0.6625\n",
+ "Epoch 35/50\n",
+ "8080/8080 [==============================] - 0s 47us/step - loss: 0.6099 - acc: 0.6631\n",
+ "Epoch 36/50\n",
+ "8080/8080 [==============================] - 0s 51us/step - loss: 0.6102 - acc: 0.6640\n",
+ "Epoch 37/50\n",
+ "8080/8080 [==============================] - 0s 50us/step - loss: 0.6119 - acc: 0.6592\n",
+ "Epoch 38/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6109 - acc: 0.6623\n",
+ "Epoch 39/50\n",
+ "8080/8080 [==============================] - 0s 48us/step - loss: 0.6128 - acc: 0.6577\n",
+ "Epoch 40/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6111 - acc: 0.6636\n",
+ "Epoch 41/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6096 - acc: 0.6634\n",
+ "Epoch 42/50\n",
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6104 - acc: 0.6606\n",
+ "Epoch 43/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6090 - acc: 0.6645\n",
+ "Epoch 44/50\n",
+ "8080/8080 [==============================] - 0s 42us/step - loss: 0.6103 - acc: 0.6641\n",
+ "Epoch 45/50\n",
+ "8080/8080 [==============================] - 0s 45us/step - loss: 0.6112 - acc: 0.6630\n",
+ "Epoch 46/50\n",
+ "8080/8080 [==============================] - 0s 43us/step - loss: 0.6098 - acc: 0.6649\n",
+ "Epoch 47/50\n",
+ "8080/8080 [==============================] - 0s 45us/step - loss: 0.6103 - acc: 0.6624\n",
+ "Epoch 48/50\n"
+ ]
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "8080/8080 [==============================] - 0s 39us/step - loss: 0.6118 - acc: 0.6597\n",
+ "Epoch 49/50\n",
+ "8080/8080 [==============================] - 0s 38us/step - loss: 0.6102 - acc: 0.6611\n",
+ "Epoch 50/50\n",
+ "8080/8080 [==============================] - 0s 43us/step - loss: 0.6083 - acc: 0.6649\n"
+ ]
+ }
+ ],
+ "source": [
+ "#Logistic regression (SGD)\n",
+ "cv = StratifiedKFold(n_splits=10)\n",
+ "results = np.zeros_like(y, dtype=float)\n",
+ "\n",
+ "tprs = []\n",
+ "aucs = []\n",
+ "mean_fpr = np.linspace(0, 1, 100)\n",
+ "\n",
+ "i = 0\n",
+ "keras.backend.clear_session()\n",
+ "prbs=[]\n",
+ "for mod in range(5):\n",
+ " print('>>')\n",
+ " curr_try = 0\n",
+ " while curr_try <10:\n",
+ " print('.')\n",
+ "\n",
+ " model = Sequential()\n",
+ " model.add(Dense(1, activation='sigmoid'))\n",
+ " # Compile model\n",
+ " opt = keras.optimizers.Adam(epsilon=None, amsgrad=True)\n",
+ " model.compile(loss='binary_crossentropy', optimizer=opt, metrics=['accuracy'])\n",
+ "\n",
+ " # Fit the model\n",
+ " history = model.fit(X, y, epochs=50, batch_size=64, verbose=0)\n",
+ " if history.history['acc'][-1] > 0.53:\n",
+ " break\n",
+ " else:\n",
+ " curr_try += 1\n",
+ "\n",
+ " # Fit the model\n",
+ " model.fit(X, y, epochs=50, batch_size=64, verbose=1)\n",
+ "\n",
+ " # evaluate the model\n",
+ " probas_ = model.predict(x_pad)\n",
+ " prbs.append(probas_)\n",
+ "# Average the predictions\n",
+ "probas_ = np.mean(np.hstack(prbs), axis=1)\n",
+ "results = probas_"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "df_results = pd.DataFrame(data={\"name\": names, 'pred': results})\n",
+ "df_results.to_csv('/home/drewe/notebooks/genotox/pred.lr.v3-ext-Padel-2D.csv', index=None)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAADYJJREFUeJzt3W+MZXddx/H3FwoaperiTptN7TBiFsKGxLaZbGqaYEkFyz5gIQHTTahrXB1ASiDypIEHbuQJMbZNTBpkSDddDVRQwG5M/VPXkgqh1Sms7ZYNttYVFza721RLE6Oy7dcH91DHcab33D/n3JnvvF/JzZx77rn3fH/n3vnsb879nd9GZiJJ2vpeNusCJEnTYaBLUhEGuiQVYaBLUhEGuiQVYaBLUhEGuiQVYaBLUhEGuiQVcUmfO9u5c2cuLCz0uUtJ2vIeeeSRpzNzbth2vQb6wsICKysrfe5Skra8iPiXNtt5ykWSijDQJakIA12SijDQJakIA12SijDQJakIA12SijDQJakIA12Siuj1SlFpqMOH11+WNJQ9dEkqwkCXpCIMdEkqwkCXpCIMdEkqwkCXpCIMdEkqwkCXpCIMdEkqwkCXpCKGBnpEXBkRD0TEqYh4PCI+1Kw/HBHfiYgTzW1f9+VKkjbSZi6Xi8BHMvPrEXEp8EhE3N88dkdm/m535UmS2hoa6Jl5FjjbLD8XEaeAK7ouTJI0mpHOoUfEAnA18HCz6paIeDQijkTEjinXJkkaQetAj4hXAV8APpyZ3wM+CfwMcBWDHvxtGzxvKSJWImLlwoULUyhZkrSeVoEeEa9gEOafycwvAmTmucx8PjNfAD4N7F3vuZm5nJmLmbk4Nzc3rbolSWu0GeUSwF3Aqcy8fdX6Xas2eydwcvrlSZLaajPK5TrgZuCxiDjRrPsocCAirgISOA28t5MKJUmttBnl8hUg1nnovumXI0kal1eKSlIRBrokFWGgS1IRBrokFWGgS1IRBrokFWGgS1IRBrokFWGgS1IRBrokFdFmLhdpNg4fXn9Z0rrsoUtSEQa6JBVhoEtSEQa6JBVhoEtSEQa6JBVhoEtSEQa6JBVhoEtSEQa6JBVhoEtSEQa6JBVhoEtSEQa6JBVhoEtSEc6HrtlzrnNpKuyhS1IRBrokFWGgS1IRQwM9Iq6MiAci4lREPB4RH2rWvzoi7o+IJ5qfO7ovV5K0kTY99IvARzLzDcC1wAciYg9wK3A8M3cDx5v7kqQZGRromXk2M7/eLD8HnAKuAPYDR5vNjgLv6KpISdJwI51Dj4gF4GrgYeDyzDwLg9AHLtvgOUsRsRIRKxcuXJisWknShloHekS8CvgC8OHM/F7b52XmcmYuZubi3NzcODVKklpoFegR8QoGYf6ZzPxis/pcROxqHt8FnO+mRElSG21GuQRwF3AqM29f9dAx4GCzfBC4d/rlSZLaanPp/3XAzcBjEXGiWfdR4BPA5yPiEPBt4N3dlChJamNooGfmV4DY4OEbpluOJGlcXikqSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUUY6JJUhIEuSUVcMusCNEOHD6+/3NXrTnMfkv4fe+iSVISBLklFGOiSVMTQQI+IIxFxPiJOrlp3OCK+ExEnmtu+bsuUJA3Tpod+N3DjOuvvyMyrmtt90y1LkjSqoYGemQ8Cz/RQiyRpApOcQ78lIh5tTsnsmFpFkqSxjDsO/ZPAx4Fsft4G/Op6G0bEErAEMD8/P+butGVNa+y549mlocbqoWfmucx8PjNfAD4N7H2JbZczczEzF+fm5satU5I0xFiBHhG7Vt19J3Byo20lSf0YesolIu4Brgd2RsQZ4LeA6yPiKganXE4D7+2wRklSC0MDPTMPrLP6rg5qkSRNwCtFJakIA12SijDQJakIA12SijDQJakIA12SijDQJakIA12SijDQJakIA12Sihh3+lxtJ05dOxsed43IHrokFWGgS1IRBrokFWGgS1IRBrokFWGgS1IRBrokFeE49K3Oscrbg+/z+LbRsbOHLklFGOiSVISBLklFGOiSVISBLklFGOiSVITDFqUutRkyt42G1U2dx+7/sIcuSUUY6JJUhIEuSUUMDfSIOBIR5yPi5Kp1r46I+yPiiebnjm7LlCQN06aHfjdw45p1twLHM3M3cLy5L0maoaGBnpkPAs+sWb0fONosHwXeMeW6JEkjGvcc+uWZeRag+XnZ9EqSJI2j83HoEbEELAHMz893vTsNM82xulXH/W40tnlte2fV/s0+9noz1LcZj0sPxu2hn4uIXQDNz/MbbZiZy5m5mJmLc3NzY+5OkjTMuIF+DDjYLB8E7p1OOZKkcbUZtngP8DXg9RFxJiIOAZ8A3hIRTwBvae5LkmZo6Dn0zDywwUM3TLkWSdIEvFJUkoow0CWpCANdkopwPnSNZpuO75W2AnvoklSEgS5JRRjoklSEgS5JRRjoklSEgS5JRThscTtwqOH/2gxTu/a972m1ue9j13YaY73IHrokFWGgS1IRBrokFWGgS1IRBrokFWGgS1IRBrokFeE49Koqj9XdqG2bpc2T1LdZ2qAtyR66JBVhoEtSEQa6JBVhoEtSEQa6JBVhoEtSEQa6JBXhOHTVMer82RXm2K7QhtUqtGGG7KFLUhEGuiQVYaBLUhETnUOPiNPAc8DzwMXMXJxGUZKk0U3jS9E3Z+bTU3gdSdIEPOUiSUVM2kNP4K8iIoFPZeby2g0iYglYApifn59wdwK6Gdq13Ye/TdrmCsesC30OGZ3kdYp8/iftoV+XmdcAbwM+EBFvWrtBZi5n5mJmLs7NzU24O0nSRiYK9Mz8bvPzPPAlYO80ipIkjW7sQI+IH42IS3+wDLwVODmtwiRJo5nkHPrlwJci4gev89nM/IupVCVJGtnYgZ6ZTwE/O8VaJEkTcNiiJBVhoEtSEU6fu1W0GRu7GcbzVuOxWN+k47Y9rp2why5JRRjoklSEgS5JRRjoklSEgS5JRRjoklSEgS5JRTgOfTNzrO7mtpnfn2ldt9Dn9Q99K/j/CthDl6QiDHRJKsJAl6QiDHRJKsJAl6QiDHRJKsJhi10YdajXjIc6qYjKn50+27aFfx/toUtSEQa6JBVhoEtSEQa6JBVhoEtSEQa6JBVhoEtSEVtnHPpmGRva9fjxLTbuVcVV+zyO2p5Jpg9eu76HY2kPXZKKMNAlqQgDXZKKmCjQI+LGiPhWRDwZEbdOqyhJ0ujGDvSIeDlwJ/A2YA9wICL2TKswSdJoJumh7wWezMynMvO/gT8C9k+nLEnSqCYJ9CuAf111/0yzTpI0A5GZ4z0x4t3AL2bmrzX3bwb2ZuYH12y3BCw1d18PfAvYCTw9btEF2H7bb/u3r3Ha/5rMnBu20SQXFp0Brlx1/6eA767dKDOXgeXV6yJiJTMXJ9j3lmb7bb/tt/1dvPYkp1z+HtgdET8dEa8EbgKOTacsSdKoxu6hZ+bFiLgF+Evg5cCRzHx8apVJkkYy0VwumXkfcN8YT10evklptn97s/3bW2ftH/tLUUnS5uKl/5JURKeBPmxqgIj4oYj4XPP4wxGx0GU9fWvR/t+MiG9GxKMRcTwiXjOLOrvSdmqIiHhXRGRElBr50Kb9EfFLzWfg8Yj4bN81dqnF538+Ih6IiG80vwP7ZlFnFyLiSEScj4iTGzweEfF7zbF5NCKumcqOM7OTG4MvSv8JeC3wSuAfgD1rtvkN4Peb5ZuAz3VVT9+3lu1/M/AjzfL7t1v7m+0uBR4EHgIWZ113z+//buAbwI7m/mWzrrvn9i8D72+W9wCnZ133FNv/JuAa4OQGj+8D/hwI4Frg4Wnst8seepupAfYDR5vlPwFuiIjosKY+DW1/Zj6Qmf/R3H2IwVj+KtpODfFx4HeA/+yzuB60af+vA3dm5r8BZOb5nmvsUpv2J/BjzfKPs851LFtVZj4IPPMSm+wH/iAHHgJ+IiJ2TbrfLgO9zdQAL26TmReBZ4Gf7LCmPo06NcIhBv9iVzG0/RFxNXBlZv5Zn4X1pM37/zrgdRHx1Yh4KCJu7K267rVp/2HgPRFxhsFouQ+yfXQydUqX/wXdej3ttUNq2myzVbVuW0S8B1gEfr7Tivr1ku2PiJcBdwC/0ldBPWvz/l/C4LTL9Qz+OvvbiHhjZv57x7X1oU37DwB3Z+ZtEfFzwB827X+h+/JmrpPs67KH3mZqgBe3iYhLGPzZ9VJ/pmwlraZGiIhfAD4GvD0z/6un2vowrP2XAm8EvhwRpxmcRzxW6IvRtp//ezPz+5n5zwzmOdrdU31da9P+Q8DnATLza8APM5jnZDtolQ+j6jLQ20wNcAw42Cy/C/ibbL4xKGBo+5tTDp9iEOaVzp/CkPZn5rOZuTMzFzJzgcF3CG/PzJXZlDt1bT7/f8rgi3EiYieDUzBP9Vpld9q0/9vADQAR8QYGgX6h1ypn5xjwy81ol2uBZzPz7MSv2vE3vfuAf2TwbffHmnW/zeAXFwZv4B8DTwJ/B7x21t9O99z+vwbOASea27FZ19xn+9ds+2UKjXJp+f4HcDvwTeAx4KZZ19xz+/cAX2UwAuYE8NZZ1zzFtt8DnAW+z6A3fgh4H/C+Ve/9nc2xeWxan32vFJWkIrxSVJKKMNAlqQgDXZKKMNAlqQgDXZKKMNAlqQgDXZKKMNAlqYj/AekIrXFzJKplAAAAAElFTkSuQmCC\n",
+ "text/plain": [
+ "<Figure size 432x288 with 1 Axes>"
+ ]
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plt.hist(results,100, color='red', alpha=0.5)\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "/fast_data/drewe/software/envs/tf_gpu/lib/python3.6/site-packages/sklearn/linear_model/_logistic.py:940: ConvergenceWarning: lbfgs failed to converge (status=1):\n",
+ "STOP: TOTAL NO. of ITERATIONS REACHED LIMIT.\n",
+ "\n",
+ "Increase the number of iterations (max_iter) or scale the data as shown in:\n",
+ " https://scikit-learn.org/stable/modules/preprocessing.html\n",
+ "Please also refer to the documentation for alternative solver options:\n",
+ " https://scikit-learn.org/stable/modules/linear_model.html#logistic-regression\n",
+ " extra_warning_msg=_LOGISTIC_SOLVER_CONVERGENCE_MSG)\n"
+ ]
+ }
+ ],
+ "source": [
+ "#Logistic regression (scikit)\n",
+ "cv = StratifiedKFold(n_splits=10)\n",
+ "results = np.zeros_like(y, dtype=float)\n",
+ "\n",
+ "tprs = []\n",
+ "aucs = []\n",
+ "mean_fpr = np.linspace(0, 1, 100)\n",
+ "\n",
+ "i = 0\n",
+ "keras.backend.clear_session()\n",
+ "prbs=[]\n",
+ "model = LogisticRegression(random_state=0)\n",
+ "model.fit(X, y)\n",
+ "probas_ = model.predict_proba(x_pad)[:, 1]\n",
+ "results = probas_\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "df_results = pd.DataFrame(data={\"name\": names, 'pred': results})\n",
+ "df_results.to_csv('/home/drewe/notebooks/genotox/pred.lr2.v3-ext-Padel-2D.csv', index=None)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD8CAYAAABw1c+bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAAEOlJREFUeJzt3X+MZWV9x/H3R5A2KvXXDqjAuLZFUiQFzWTVkLZQFGFDoG3Q7qYqtugoVVOjaWprokT/aWusSYspbmWDNopoW5TURSCWBm1AXXBREChIsYxLWH4JGrR29ds/5mycDnd2LvfcuTM7z/uV3Mz58dzzPPfZez/z7LnnPJOqQpLUjietdgMkSZNl8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5Iac/BqN2CQDRs21MaNG1e7GZJ0wLjhhhseqKqpYcquyeDfuHEjO3fuXO1mSNIBI8l3hy3rqR5JaozBL0mNMfglqTEGvyQ1xuCXpMYY/JLUGINfkhpj8EtSYwx+SWrMmrxzV1pR558/eFlqhCN+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmMMfklqjMEvSY1xygZpUtbzVBHr+bWtQ8sGf5LtwBnAnqo6rtt2KXBMV+QZwPer6oQBz70b+AHwU2BvVc2Mqd2SpBENM+K/GLgA+MS+DVX1+/uWk3wIeGQ/zz+5qh4YtYGSpPFaNvir6tokGwftSxLgNcBvj7dZkqSV0vfL3d8A7quqO5bYX8BVSW5IMtuzLknSGPT9cncrcMl+9p9YVbuTHAZcneS2qrp2UMHuF8MswPT0dM9mSZKWMvKIP8nBwO8Bly5Vpqp2dz/3AJcBm/ZTdltVzVTVzNTU1KjNkiQto8+pnlcAt1XV3KCdSZ6a5NB9y8CpwM096pMkjcGywZ/kEuA64Jgkc0nO7XZtYdFpniTPS7KjWz0c+EqSm4CvAV+oqi+Or+mSpFEMc1XP1iW2v2HAtt3A5m75LuD4nu2TJI2ZUzZIUmMMfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mN8Y+tS1o5S/3hdf8g+6pyxC9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaY/BLUmOG+WPr25PsSXLzgm3nJ/lekl3dY/MSzz0tye1J7kzy7nE2XJI0mmFG/BcDpw3Y/uGqOqF77Fi8M8lBwEeA04Fjga1Jju3TWElSf8sGf1VdCzw0wrE3AXdW1V1V9RPg08BZIxxHkjRGfaZseFuS1wM7gXdV1cOL9h8B3LNgfQ546VIHSzILzAJMT0/3aJYkjWDhNBLrfEqJUb/c/XvgV4ATgHuBDw0okwHbaqkDVtW2qpqpqpmpqakRmyVJWs5IwV9V91XVT6vqZ8A/MH9aZ7E54KgF60cCu0epT5I0PiMFf5LnLlj9XeDmAcW+Dhyd5AVJDgG2AJePUp8kaXyWPcef5BLgJGBDkjngfcBJSU5g/tTN3cCbu7LPAz5WVZuram+StwFXAgcB26vqlhV5FZKkoS0b/FW1dcDmi5YouxvYvGB9B/C4Sz0lSavHO3clqTEGvyQ1xuCXpMYY/JLUGINfkhrTZ8oGSaNqaHoArT2O+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xikb1LbWp05o/fU3yhG/JDVm2eBPsj3JniQ3L9j2wSS3JflmksuSPGOJ596d5FtJdiXZOc6GS5JGM8yI/2LgtEXbrgaOq6pfB/4T+PP9PP/kqjqhqmZGa6IkaZyWDf6quhZ4aNG2q6pqb7d6PXDkCrRNkrQCxnGO/4+AK5bYV8BVSW5IMjuGuiRJPfW6qifJe4C9wCeXKHJiVe1OchhwdZLbuv9BDDrWLDALMD093adZkqT9GHnEn+Qc4AzgD6qqBpWpqt3dzz3AZcCmpY5XVduqaqaqZqampkZtliRpGSMFf5LTgD8Dzqyqx5Yo89Qkh+5bBk4Fbh5UVpI0OcNcznkJcB1wTJK5JOcCFwCHMn/6ZleSC7uyz0uyo3vq4cBXktwEfA34QlV9cUVehSRpaMue46+qrQM2X7RE2d3A5m75LuD4Xq2TJI2dUzbowLB4OgGnFxidfdc8p2yQpMYY/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1BinbJC09i2cZsIpJ3pzxC9JjTH4JakxBr8kNcbgl6TGGPyS1BiDX5IaM1TwJ9meZE+Smxdse1aSq5Pc0f185hLPPacrc0eSc8bVcEnSaIYd8V8MnLZo27uBL1XV0cCXuvX/J8mzgPcBLwU2Ae9b6heEJGkyhgr+qroWeGjR5rOAj3fLHwd+Z8BTXwVcXVUPVdXDwNU8/heIJGmC+pzjP7yq7gXofh42oMwRwD0L1ue6bZKkVbLSUzZkwLYaWDCZBWYBpqenV7JNWs9W4tb+SU4X4NQEmoA+I/77kjwXoPu5Z0CZOeCoBetHArsHHayqtlXVTFXNTE1N9WiWJGl/+gT/5cC+q3TOAT4/oMyVwKlJntl9qXtqt02StEqGvZzzEuA64Jgkc0nOBf4SeGWSO4BXduskmUnyMYCqegj4APD17vH+bpskaZUMdY6/qrYuseuUAWV3Am9csL4d2D5S6yRJY+edu5LUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxKz1lg9aDVqYRGOa1LdUXwyz3qXeU505yeolJ1rverMLnyxG/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMY4ZYPWr9WaamJcdfWdEmFcr38l+nFc011oJCOP+JMck2TXgsejSd6xqMxJSR5ZUOa9/ZssSepj5BF/Vd0OnACQ5CDge8BlA4p+uarOGLUeSdJ4jesc/ynAd6rqu2M6niRphYwr+LcAlyyx7+VJbkpyRZIXjak+SdKIegd/kkOAM4HPDth9I/D8qjoe+Dvgc/s5zmySnUl23n///X2bJUlawjhG/KcDN1bVfYt3VNWjVfXDbnkH8OQkGwYdpKq2VdVMVc1MTU2NoVmSpEHGEfxbWeI0T5LnJEm3vKmr78Ex1ClJGlGv6/iTPAV4JfDmBdveAlBVFwJnA+cl2Qv8CNhSVdWnTklSP72Cv6oeA569aNuFC5YvAC7oU4ckabycskGSGuOUDRq/J3rb/YF6m/6B1NZhrNbrWal6V2taiANgOgpH/JLUGINfkhpj8EtSYwx+SWqMwS9JjTH4JakxBr8kNcbgl6TGGPyS1Bjv3NXkPNG7FdfbnbHrwVr4N1nNO2DX6J24T5QjfklqjMEvSY0x+CWpMQa/JDXG4Jekxhj8ktQYg1+SGtM7+JPcneRbSXYl2Tlgf5L8bZI7k3wzyUv61ilJGt24buA6uaoeWGLf6cDR3eOlwN93PyVJq2ASp3rOAj5R864HnpHkuROoV5I0wDhG/AVclaSAj1bVtkX7jwDuWbA+1227d2GhJLPALMD09PQYmqUVMck/VD3O5xxgt9Tv10r10Vozzik+1sp7dRzPHYNxjPhPrKqXMH9K561JfnPR/gx4Tj1uQ9W2qpqpqpmpqakxNEuSNEjv4K+q3d3PPcBlwKZFReaAoxasHwns7luvJGk0vYI/yVOTHLpvGTgVuHlRscuB13dX97wMeKSq7kWStCr6nuM/HLgsyb5jfaqqvpjkLQBVdSGwA9gM3Ak8BvxhzzolST30Cv6qugs4fsD2CxcsF/DWPvVIksbHO3clqTEGvyQ1xuCXpMYY/JLUGINfkhozrknadKAa1xQMk74F/UCcgkBrz0pP97FG36eO+CWpMQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xikb1oNxTbuwEtZae6RhrPP3rSN+SWrMyMGf5Kgk1yS5NcktSf5kQJmTkjySZFf3eG+/5kqS+upzqmcv8K6qujHJocANSa6uqm8vKvflqjqjRz2SpDEaecRfVfdW1Y3d8g+AW4EjxtUwSdLKGMs5/iQbgRcDXx2w++VJbkpyRZIXjaM+SdLoel/Vk+RpwD8D76iqRxftvhF4flX9MMlm4HPA0UscZxaYBZienu7bLEnSEnqN+JM8mfnQ/2RV/cvi/VX1aFX9sFveATw5yYZBx6qqbVU1U1UzU1NTfZolSdqPPlf1BLgIuLWq/maJMs/pypFkU1ffg6PWKUnqr8+pnhOB1wHfSrKr2/YXwDRAVV0InA2cl2Qv8CNgS1VVjzolST2NHPxV9RUgy5S5ALhg1DokSeOXtTgAn5mZqZ07d652M0Y3zikUhjnWuLZLWl09PptJbqiqmWHKOmWDJDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mNMfglqTEGvyQ1xuCXpMb0no9/zRnndAnjOOY4p03o83qcpkFSxxG/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5Jakyv4E9yWpLbk9yZ5N0D9v9Ckku7/V9NsrFPfZKk/kYO/iQHAR8BTgeOBbYmOXZRsXOBh6vqV4EPA381an2SpPHoM+LfBNxZVXdV1U+ATwNnLSpzFvDxbvmfgFOS7PcPtEuSVlaf4D8CuGfB+ly3bWCZqtoLPAI8u0edkqSeUlWjPTF5NfCqqnpjt/46YFNVvX1BmVu6MnPd+ne6Mg8OON4sMNutHgPcvp/qNwAPjNTw9cV+mGc/zLMffq7Fvnh+VU0NU7DPXD1zwFEL1o8Edi9RZi7JwcDTgYcGHayqtgHbhqk4yc6qmnnCLV5n7Id59sM8++Hn7Iv963Oq5+vA0UlekOQQYAtw+aIylwPndMtnA/9Wo/4XQ5I0FiOP+Ktqb5K3AVcCBwHbq+qWJO8HdlbV5cBFwD8muZP5kf6WcTRakjS6XtMyV9UOYMeibe9dsPxj4NV96ljCUKeEGmA/zLMf5tkPP2df7MfIX+5Kkg5MTtkgSY1Z08HvlBDzhuiHdyb5dpJvJvlSkuevRjtX2nL9sKDc2Ukqybq8qmOYfkjymu49cUuST026jZMwxOdiOsk1Sb7RfTY2r0Y716SqWpMP5r8w/g7wy8AhwE3AsYvK/DFwYbe8Bbh0tdu9Sv1wMvCUbvm8VvuhK3cocC1wPTCz2u1epffD0cA3gGd264etdrtXqR+2Aed1y8cCd692u9fKYy2P+J0SYt6y/VBV11TVY93q9czfU7HeDPN+APgA8NfAjyfZuAkaph/eBHykqh4GqKo9E27jJAzTDwX8Urf8dB5/n1Gz1nLwOyXEvGH6YaFzgStWtEWrY9l+SPJi4Kiq+tdJNmzChnk/vBB4YZL/SHJ9ktMm1rrJGaYfzgdem2SO+asP346AnpdzrrBBI/fFlyANU+ZAN/RrTPJaYAb4rRVt0erYbz8keRLzM8C+YVINWiXDvB8OZv50z0nM/+/vy0mOq6rvr3DbJmmYftgKXFxVH0rycubvKTquqn628s1b29byiP+JTAnBclNCHMCG6QeSvAJ4D3BmVf3PhNo2Scv1w6HAccC/J7kbeBlw+Tr8gnfYz8Xnq+p/q+q/mJ/36ugJtW9ShumHc4HPAFTVdcAvMj+HT/PWcvA7JcS8ZfuhO8XxUeZDfz2ez4Vl+qGqHqmqDVW1sao2Mv9dx5lVtXN1mrtihvlcfI75L/xJsoH5Uz93TbSVK2+Yfvhv4BSAJL/GfPDfP9FWrlFrNvi7c/b7poS4FfhMdVNCJDmzK3YR8OxuSoh3Akte4negGrIfPgg8Dfhskl1JFn8ADnhD9sO6N2Q/XAk8mOTbwDXAn9aAGXEPZEP2w7uANyW5CbgEeMM6HBiOxDt3Jakxa3bEL0laGQa/JDXG4Jekxhj8ktQYg1+SGmPwS1JjDH5JaozBL0mN+T/AVPLHAsZClgAAAABJRU5ErkJggg==\n",
+ "text/plain": [
+ "<Figure size 432x288 with 1 Axes>"
+ ]
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plt.hist(results,100, color='red', alpha=0.5)\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "cv = StratifiedKFold(n_splits=10)\n",
+ "results = np.zeros_like(y, dtype=float)\n",
+ "\n",
+ "tprs = []\n",
+ "aucs = []\n",
+ "mean_fpr = np.linspace(0, 1, 100)\n",
+ "\n",
+ "i = 0\n",
+ "keras.backend.clear_session()\n",
+ "prbs=[]\n",
+ "model = RandomForestClassifier(n_estimators=1000, random_state=0, max_leaf_nodes=200)\n",
+ "# Fit the model\n",
+ "model.fit(X, y)\n",
+ "\n",
+ "\n",
+ "probas_ = model.predict_proba(x_pad)[:, 1]\n",
+ "results = probas_"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "df_results = pd.DataFrame(data={\"name\": names, 'pred': results})\n",
+ "df_results.to_csv('/home/drewe/notebooks/genotox/pred.rf.v3-ext-Padel-2D.csv', index=None)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 14,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD8CAYAAABn919SAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDMuMC4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvOIA7rQAADExJREFUeJzt3VGMrOVZB/D/02IvbDFSWQipnJ5qTolotMQT0qQ3NLUVTQQarSmJDY3Uo6a0GhsjUROJvWk0yo3EhFpSLiyo1QZssBWxDakRIyi2UKy0iBUhIKWmTYxa8PFiB7M93cPOznw7s/Oe3y/ZzPd9++1+z7uz+8+778wzU90dADbfi9ZdAADTEOgAgxDoAIMQ6ACDEOgAgxDoAIMQ6ACDEOgAgxDoAIM4Y5UXO/vss/vo0aOrvCTAxrvvvvue7u6tvc5baaAfPXo099577yovCbDxqupf5jnPkgvAIAQ6wCAEOsAgBDrAIAQ6wCAEOsAgBDrAIAQ6wCAEOsAgVtopCgu77rrdt4H/Z4YOMAiBDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAiBDjAIjUVsHk1GsCszdIBBCHSAQQh0gEEIdIBBCHSAQewZ6FV1flV9oqoeqqoHq+rnZsdfXlV3VtXDs9uzDr5cAE5lnhn6s0ne093fleS1Sd5ZVRcmuTbJXd19LMlds30A1mTPQO/uJ7r772bbX03yUJJXJLk8yc2z025OcsVBFQnA3va1hl5VR5NclORvkpzb3U8k26Gf5JypiwNgfnN3ilbVy5L8cZKf7+6vVNW8X3ciyYkkOXLkyCI1MjqdnzCJuWboVfVN2Q7z3+/uP5kdfrKqzpt9/rwkT+32td19Y3cf7+7jW1tbU9QMwC7meZZLJflAkoe6+7d3fOr2JFfNtq9Kctv05QEwr3mWXF6X5G1JPlNV98+O/XKS9yX5w6q6OskXk7zlYEoEYB57Bnp3fyrJqRbM3zBtOQAsSqcowCAEOsAgBDrAIAQ6wCC8BR3T0ygEa2GGDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAiBDjAIgQ4wCI1FjE+jE6cJM3SAQQh0gEEIdIBBCHSAQQh0gEEIdIBBCHSAQQh0gEEIdIBB6BTl9LVJHaSbVCtrY4YOMAiBDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAiBDjCIPQO9qm6qqqeq6oEdx66rqn+rqvtnHz98sGUCsJd5ZugfTHLpLsev7+7XzD7umLYsAPZrz0Dv7ruTPLOCWgBYwjJr6NdU1adnSzJnTVYRAAtZ9C3ofjfJe5P07Pa3kvzkbidW1YkkJ5LkyJEjC16O05K3WoN9WWiG3t1Pdvdz3f2/Sd6f5OIXOPfG7j7e3ce3trYWrROAPSwU6FV13o7dNyd54FTnArAaey65VNUtSS5JcnZVPZbk15JcUlWvyfaSy6NJfvoAawRgDnsGendfucvhDxxALQAsQacowCAEOsAgBDrAIAQ6wCAWbSyC08PO5iaNTrs7+efi57Q2ZugAgxDoAIMQ6ACDEOgAgxDoAIMQ6ACDEOgAgxDoAIPQWMQ3mrJRRGMOrIwZOsAgBDrAIAQ6wCAEOsAgBDrAIAQ6wCAEOsAgBDrAIAQ6wCB0irK4ZbpA19U1uorrHobu2MNQAytnhg4wCIEOMAiBDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAiNRaezeRtO5jlvqnNYv1M1JWlWOvTM0AEGIdABBiHQAQYh0AEGIdABBrFnoFfVTVX1VFU9sOPYy6vqzqp6eHZ71sGWCcBe5pmhfzDJpScduzbJXd19LMlds30A1mjPQO/uu5M8c9Lhy5PcPNu+OckVE9cFwD4tuoZ+bnc/kSSz23OmKwmARRx4p2hVnUhyIkmOHDly0JeDw+uwdVoetnpY2qIz9Cer6rwkmd0+daoTu/vG7j7e3ce3trYWvBwAe1k00G9PctVs+6okt01TDgCLmudpi7ck+eskF1TVY1V1dZL3JXljVT2c5I2zfQDWaM819O6+8hSfesPEtQCwBJ2iAIMQ6ACDEOgAgxDoAIPwFnSj2tSmkU2qdadT1T3VcZiDGTrAIAQ6wCAEOsAgBDrAIAQ6wCAEOsAgBDrAIAQ6wCA0Fp1uTpfGlRHGud8xjDBmlmKGDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAiBDjAIgQ4wCIEOMAidomy20bojD3o8U33/F/o+m/r2hwMwQwcYhEAHGIRABxiEQAcYhEAHGIRABxiEQAcYhEAHGITGIkjma4bZ1CaZw1i35qMDYYYOMAiBDjAIgQ4wCIEOMAiBDjCIpZ7lUlWPJvlqkueSPNvdx6coCoD9m+Jpi6/v7qcn+D4ALMGSC8Aglg30TvLnVXVfVZ2YoiAAFrPsksvruvvxqjonyZ1V9Y/dfffOE2ZBfyJJjhw5suTl+Aa67E4/B9FluUndsbpMT2mpGXp3Pz67fSrJR5JcvMs5N3b38e4+vrW1tczlAHgBCwd6Vb20qs58fjvJm5I8MFVhAOzPMksu5yb5SFU9/30+1N0fm6QqAPZt4UDv7keSfN+EtQCwBE9bBBiEQAcYhEAHGIRABxiEt6DbFMs0U2i+4DDTKDQZM3SAQQh0gEEIdIBBCHSAQQh0gEEIdIBBCHSAQQh0gEEIdIBB6BSFk+lWnM6oP8t5ultPPr6Cn4UZOsAgBDrAIAQ6wCAEOsAgBDrAIAQ6wCAEOsAgBDrAIDQWrdKpmhG8BReLOt1/X/xNfR0zdIBBCHSAQQh0gEEIdIBBCHSAQQh0gEEIdIBBCHSAQWxOY9E6GwWWufYytZ5GDRGQZP53/2FXZugAgxDoAIMQ6ACDEOgAgxDoAINYKtCr6tKq+lxVfb6qrp2qKAD2b+FAr6oXJ7khyQ8luTDJlVV14VSFAbA/y8zQL07y+e5+pLv/J8mtSS6fpiwA9muZQH9Fkn/dsf/Y7BgAa1DdvdgXVr0lyQ929ztm+29LcnF3v+uk804kOTHbvSDJ5xYvd25nJ3l6BddZJ2Mcw+hjHH18yWrG+Mru3trrpGVa/x9Lcv6O/W9P8vjJJ3X3jUluXOI6+1ZV93b38VVec9WMcQyjj3H08SWHa4zLLLn8bZJjVfWqqnpJkrcmuX2asgDYr4Vn6N39bFVdk+TjSV6c5KbufnCyygDYl6VebbG770hyx0S1TGmlSzxrYoxjGH2Mo48vOURjXPhBUQAOF63/AIPY6EDf66UHquoXquqzVfXpqrqrql65jjqXMccYf6aqPlNV91fVpzaxW3fel5Coqh+rqq6qQ/GMgnnNcR++var+fXYf3l9V71hHncuY5z6sqh+f/T0+WFUfWnWNy5rjfrx+x334T1X1Hysvsrs38iPbD8R+Icl3JHlJkn9IcuFJ57w+yTfPtn82yR+su+4DGOO37Ni+LMnH1l331GOcnXdmkruT3JPk+Lrrnvg+fHuS31l3rQc8xmNJ/j7JWbP9c9Zd99RjPOn8d2X7iSIrrXOTZ+h7vvRAd3+iu/9ztntPtp8rv0nmGeNXduy+NMmmPSgy70tIvDfJbyT5r1UWN4HT4SUy5hnjTyW5obu/nCTd/dSKa1zWfu/HK5PcspLKdtjkQN/vSw9cneTPDrSi6c01xqp6Z1V9IduB9+4V1TaVPcdYVRclOb+7P7rKwiYy7+/pj86WBj9cVefv8vnDbJ4xvjrJq6vqr6rqnqq6dGXVTWPuvJkt7b4qyV+uoK6vs8mBXrsc23V2WlU/keR4kt880IqmN9cYu/uG7v7OJL+U5FcPvKppveAYq+pFSa5P8p6VVTStee7DP01ytLu/N8lfJLn5wKua1jxjPCPbyy6XZHv2+ntV9a0HXNeU5s6bbDdZfri7nzvAena1yYE+10sPVNUPJPmVJJd193+vqLapzDXGHW5NcsWBVjS9vcZ4ZpLvSfLJqno0yWuT3L5BD4zueR9295d2/G6+P8n3r6i2qczze/pYktu6+2vd/c/Zfk2nYyuqbwr7+Vt8a9aw3JJkox8UPSPJI9n+1+b5Bym++6RzLsr2AxnH1l3vAY7x2I7tH0ly77rrnnqMJ53/yWzWg6Lz3Ifn7dh+c5J71l33AYzx0iQ3z7bPzvbyxbetu/Ypxzg774Ikj2bW47Pqj6U6RdepT/HSA1X169kOtduzvcTysiR/VFVJ8sXuvmxtRe/TnGO8ZvZfyNeSfDnJVeureP/mHOPGmnN8766qy5I8m+SZbD/rZWPMOcaPJ3lTVX02yXNJfrG7v7S+qvdnH7+nVya5tWfpvmo6RQEGsclr6ADsINABBiHQAQYh0AEGIdABBiHQAQYh0AEGIdABBvF/nDSaG548kqEAAAAASUVORK5CYII=\n",
+ "text/plain": [
+ "<Figure size 432x288 with 1 Axes>"
+ ]
+ },
+ "metadata": {
+ "needs_background": "light"
+ },
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "plt.hist(results,100, color='red', alpha=0.5)\n",
+ "plt.show()"
+ ]
+ }
+ ],
+ "metadata": {
+ "kernelspec": {
+ "display_name": "Python 3",
+ "language": "python",
+ "name": "python3"
+ },
+ "language_info": {
+ "codemirror_mode": {
+ "name": "ipython",
+ "version": 3
+ },
+ "file_extension": ".py",
+ "mimetype": "text/x-python",
+ "name": "python",
+ "nbconvert_exporter": "python",
+ "pygments_lexer": "ipython3",
+ "version": "3.6.8"
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}