import os
import warnings
import time
import copy
import numpy as np
import pandas as pd
from sklearn.metrics import mean_squared_error, explained_variance_score, r2_score
from sklearn.model_selection import train_test_split, KFold
from sklearn.linear_model import LinearRegression, Lasso, lasso_path, lars_path, LassoLarsIC
from sklearn.neural_network import MLPRegressor
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' #Hide messy TensorFlow warnings
warnings.filterwarnings("ignore") #Hide messy Numpy warnings
import keras
from keras.layers.core import Dense, Activation, Dropout
from keras.layers import Input
from keras.models import Model
from keras.layers.recurrent import LSTM, GRU
from keras.regularizers import l1
from keras.models import Sequential
from keras.models import load_model
#plotly charts
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
from plotly.graph_objs import *
import plotly.figure_factory as ff
init_notebook_mode(connected=True)
Using TensorFlow backend.
# splines chart
# R code
"""
testdata <- data.frame(seq(from=1, to = 1000))
testdata['x'] <- seq(from=0, to = 3*pi, length.out=1000)
testdata['y'] <- -cos(testdata$x) + testdata$x/(3*pi) + rnorm(1000)*0.25
nrows <- 300
indexes <- sample(nrow(testdata), nrows)
indexes <- indexes[order(indexes)]
testdata <- testdata[indexes, ]
plot(testdata$x, testdata$y, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=2)
testdata$fit2 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit2, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=3)
testdata$fit3 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit3, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=4)
testdata$fit4 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit4, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=5)
testdata$fit5 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit5, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=6)
testdata$fit6 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit6, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=20)
testdata$fit20 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit20, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=40)
testdata$fit40 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit40, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=80)
testdata$fit80 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit80, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=160)
testdata$fit160 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit160, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,df=240)
testdata$fit240 <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fit240, main="Scatterplot", xlab="x", ylab="y", pch=1)
fit <- smooth.spline(testdata$x,testdata$y,cv=TRUE)
testdata$fitcv <- predict(fit, newdata=testdata$x)[2]$y
plot(testdata$x, testdata$fitcv, main="Scatterplot", xlab="x", ylab="y", pch=1)
write.csv(testdata, "splines.csv")
print(fit)
"""
splines = pd.read_csv('splines.csv')
data = splines.values
x = data[:,2]
y = data[:,3]
fit2 = data[:,4]
fit3 = data[:,5]
fit4 = data[:,6]
fit5 = data[:,7]
fit6 = data[:,8]
fit20 = data[:,9]
fit40 = data[:,10]
fit80 = data[:,11]
fit160 = data[:,12]
fit240 = data[:,13]
fitcv = data[:,14]
plotdata = []
plotdata.append(Scatter(x=x,
y=y,
name='Raw data',
mode = 'markers',
marker = dict(
size = 3,
line = dict(
width = 2,
color = 'rgba(128, 128, 192, .75)',
)
)
)
)
plotdata.append(Scatter(x=x,
y=fit2,
name='Linear (high bias / low variance)',
mode = 'line',
line = dict(
color = ('orange'),
width = 2)
))
plotdata.append(Scatter(x=x,
y=fit4,
name='Underfit (high bias / low variance)',
mode = 'line',
line = dict(
width = 2,
color= ('blue'),
)
))
plotdata.append(Scatter(x=x,
y=fitcv,
name='Lowest CV error',
mode = 'line',
line = dict(
color = ('green'),
width = 5)
) )
plotdata.append(Scatter(x=x,
y=fit240,
name='Overfit (high variance / low bias)',
mode = 'line',
line = dict(
width = 2,
color= ('rgb(192,0,0)'))
))
layout = Layout(
yaxis=dict(
autorange=True))
fig = Figure(data=plotdata, layout=layout)
iplot(fig) # png to save notebook w/static image
# create a data set, sin wave plus trend plus random noise
nobs = 2000
x = np.linspace(0, 3*np.pi, num=nobs)
y = -np.cos(x) + x/(3*np.pi) + np.random.normal(0, 0.25, nobs)
x2 = np.linspace(0, 15*np.pi, num=nobs)
y1 = -np.sin(x2) + x2/(3*np.pi) + np.random.normal(0, 0.25, nobs)
y2 = -np.cos(x2) + x2/(3*np.pi) + np.random.normal(0, 0.25, nobs)
# sample 20% to make it more sparse
#arr = np.arange(nobs)
#np.random.shuffle(arr)
#x = arr[:200]
#y = y[x]
# chart it
def mychart(*args):
# pass some 2d n x 1 arrays, x, y, z
# 1st array is independent vars
# reshape to 1 dimensional array
x = args[0].reshape(-1)
# following are dependent vars plotted on y axis
data = []
for i in range(1, len(args)):
data.append(Scatter(x=x,
y=args[i].reshape(-1),
mode = 'markers'))
layout = Layout(
yaxis=dict(
autorange=True))
fig = Figure(data=data, layout=layout)
return iplot(fig) # png to save notebook w/static image
mychart(x,y)
mychart(x2,y1, y2)
import pandas as pd
pd.read_csv('splines.csv')
Unnamed: 0 | seq.from...1..to...1000. | x | y | fit2 | fit3 | fit4 | fit5 | fit6 | fit20 | fit40 | fit80 | fit160 | fit240 | fitcv | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | 7 | 7 | 0.056605 | -0.894776 | -0.027209 | -0.231666 | -0.719842 | -1.027753 | -1.117166 | -0.879794 | -0.781292 | -0.858112 | -0.892830 | -0.892830 | -1.046363 |
1 | 14 | 14 | 0.122645 | -0.639564 | -0.019815 | -0.212231 | -0.676326 | -0.971100 | -1.059450 | -0.880533 | -0.784838 | -0.700174 | -0.645704 | -0.645704 | -1.004173 |
2 | 29 | 29 | 0.264158 | -0.858491 | -0.003971 | -0.170590 | -0.583081 | -0.849676 | -0.935686 | -0.877862 | -0.843645 | -0.775587 | -0.809460 | -0.809460 | -0.913081 |
3 | 32 | 32 | 0.292461 | -0.971105 | -0.000803 | -0.162264 | -0.564433 | -0.825382 | -0.910901 | -0.875433 | -0.863846 | -0.815897 | -0.824983 | -0.824983 | -0.894611 |
4 | 34 | 34 | 0.311329 | -0.441006 | 0.001310 | -0.156713 | -0.552002 | -0.809184 | -0.894369 | -0.873216 | -0.878066 | -0.846558 | -0.836715 | -0.836715 | -0.882226 |
5 | 35 | 35 | 0.320763 | -0.812196 | 0.002366 | -0.153938 | -0.545787 | -0.801084 | -0.886100 | -0.871895 | -0.885199 | -0.863802 | -0.847200 | -0.847200 | -0.876009 |
6 | 37 | 37 | 0.339632 | -1.107151 | 0.004479 | -0.148389 | -0.533356 | -0.784882 | -0.869555 | -0.868770 | -0.899093 | -0.902248 | -0.881082 | -0.881082 | -0.863520 |
7 | 42 | 42 | 0.386803 | -1.002422 | 0.009760 | -0.134517 | -0.502283 | -0.744365 | -0.828147 | -0.857574 | -0.925840 | -0.994434 | -0.994687 | -0.994687 | -0.831932 |
8 | 45 | 45 | 0.415105 | -1.195662 | 0.012929 | -0.126197 | -0.483642 | -0.720046 | -0.803265 | -0.848041 | -0.931322 | -1.022434 | -1.038721 | -1.038721 | -0.812685 |
9 | 48 | 48 | 0.443408 | -0.967867 | 0.016097 | -0.117879 | -0.465006 | -0.695720 | -0.778353 | -0.836071 | -0.925555 | -1.014256 | -1.035864 | -1.035864 | -0.793187 |
10 | 53 | 53 | 0.490579 | -0.757764 | 0.021378 | -0.104024 | -0.433956 | -0.655163 | -0.736759 | -0.810934 | -0.893917 | -0.940020 | -0.951372 | -0.951372 | -0.760116 |
11 | 60 | 60 | 0.556619 | -0.882360 | 0.028772 | -0.084649 | -0.390525 | -0.598362 | -0.678368 | -0.766014 | -0.817432 | -0.794839 | -0.783219 | -0.783219 | -0.712597 |
12 | 61 | 61 | 0.566053 | -0.747627 | 0.029828 | -0.081884 | -0.384326 | -0.590246 | -0.670011 | -0.758767 | -0.804596 | -0.777175 | -0.764024 | -0.764024 | -0.705691 |
13 | 66 | 66 | 0.613224 | -0.823935 | 0.035109 | -0.068069 | -0.353349 | -0.549667 | -0.628167 | -0.719926 | -0.736604 | -0.707282 | -0.696022 | -0.696022 | -0.670730 |
14 | 72 | 72 | 0.669829 | -0.523625 | 0.041445 | -0.051521 | -0.316238 | -0.500979 | -0.577829 | -0.668861 | -0.651602 | -0.642244 | -0.648028 | -0.648028 | -0.627878 |
15 | 92 | 92 | 0.858513 | -0.622196 | 0.062566 | 0.003297 | -0.193244 | -0.338960 | -0.409136 | -0.486504 | -0.415746 | -0.399757 | -0.434665 | -0.434665 | -0.478897 |
16 | 94 | 94 | 0.877382 | -0.275332 | 0.064678 | 0.008742 | -0.181025 | -0.322799 | -0.392199 | -0.468776 | -0.399977 | -0.378763 | -0.402004 | -0.402004 | -0.463549 |
17 | 98 | 98 | 0.915119 | -0.238423 | 0.068901 | 0.019608 | -0.156638 | -0.290509 | -0.358291 | -0.434364 | -0.374523 | -0.342332 | -0.328025 | -0.328025 | -0.432644 |
18 | 100 | 100 | 0.933987 | -0.123531 | 0.071013 | 0.025028 | -0.144473 | -0.274381 | -0.341322 | -0.417781 | -0.365058 | -0.328908 | -0.291046 | -0.291046 | -0.417092 |
19 | 101 | 101 | 0.943421 | -0.386109 | 0.072069 | 0.027735 | -0.138398 | -0.266322 | -0.332833 | -0.409645 | -0.361105 | -0.325847 | -0.280151 | -0.280151 | -0.409291 |
20 | 103 | 103 | 0.962290 | -0.727077 | 0.074181 | 0.033142 | -0.126262 | -0.250213 | -0.315848 | -0.393661 | -0.354619 | -0.332577 | -0.291137 | -0.291137 | -0.393637 |
21 | 105 | 105 | 0.981158 | -0.092702 | 0.076292 | 0.038539 | -0.114149 | -0.234118 | -0.298853 | -0.378022 | -0.349691 | -0.358963 | -0.356338 | -0.356338 | -0.377915 |
22 | 106 | 106 | 0.990592 | -0.178196 | 0.077348 | 0.041233 | -0.108100 | -0.226076 | -0.290352 | -0.370308 | -0.347513 | -0.374982 | -0.398651 | -0.398651 | -0.370026 |
23 | 109 | 109 | 1.018895 | -0.648567 | 0.080515 | 0.049303 | -0.089988 | -0.201973 | -0.264834 | -0.347455 | -0.340412 | -0.403746 | -0.490174 | -0.490174 | -0.346246 |
24 | 113 | 113 | 1.056632 | -0.366328 | 0.084738 | 0.060024 | -0.065924 | -0.169893 | -0.230780 | -0.317344 | -0.326463 | -0.352344 | -0.408403 | -0.408403 | -0.314260 |
25 | 115 | 115 | 1.075500 | -0.486962 | 0.086849 | 0.065368 | -0.053930 | -0.153881 | -0.213741 | -0.302373 | -0.318127 | -0.315264 | -0.327709 | -0.327709 | -0.298144 |
26 | 118 | 118 | 1.103803 | -0.263969 | 0.090016 | 0.073360 | -0.035991 | -0.129900 | -0.188169 | -0.279950 | -0.304810 | -0.282838 | -0.231644 | -0.231644 | -0.273814 |
27 | 120 | 120 | 1.122671 | 0.053620 | 0.092127 | 0.078673 | -0.024067 | -0.113939 | -0.171114 | -0.264966 | -0.295037 | -0.275382 | -0.191377 | -0.191377 | -0.257487 |
28 | 128 | 128 | 1.198145 | -0.612992 | 0.100570 | 0.099790 | 0.023316 | -0.050340 | -0.102857 | -0.203814 | -0.241514 | -0.288249 | -0.248066 | -0.248066 | -0.191293 |
29 | 130 | 130 | 1.217013 | -0.087615 | 0.102681 | 0.105033 | 0.035078 | -0.034507 | -0.085788 | -0.188020 | -0.222981 | -0.286090 | -0.319283 | -0.319283 | -0.174515 |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
270 | 883 | 883 | 8.320975 | 1.090773 | 0.893429 | 0.977505 | 1.064809 | 1.134710 | 1.186539 | 1.277704 | 1.207429 | 1.028304 | 0.953941 | 0.953941 | 1.270916 |
271 | 885 | 885 | 8.339844 | 1.000214 | 0.895564 | 0.984115 | 1.077449 | 1.150509 | 1.203084 | 1.298065 | 1.242501 | 1.059962 | 0.978819 | 0.978819 | 1.287540 |
272 | 889 | 889 | 8.377580 | 1.070650 | 0.899835 | 0.997383 | 1.102828 | 1.182170 | 1.236144 | 1.339964 | 1.329076 | 1.247071 | 1.206912 | 1.206912 | 1.320577 |
273 | 897 | 897 | 8.453054 | 1.666208 | 0.908379 | 1.024105 | 1.153949 | 1.245697 | 1.302093 | 1.423501 | 1.510355 | 1.728304 | 1.809082 | 1.809082 | 1.385514 |
274 | 902 | 902 | 8.500225 | 2.164331 | 0.913720 | 1.040920 | 1.186119 | 1.285510 | 1.343161 | 1.471858 | 1.587764 | 1.832285 | 1.896199 | 1.896199 | 1.425103 |
275 | 910 | 910 | 8.575699 | 1.598121 | 0.922266 | 1.067986 | 1.237899 | 1.349337 | 1.408590 | 1.540403 | 1.640360 | 1.720218 | 1.699297 | 1.699297 | 1.486653 |
276 | 912 | 912 | 8.594567 | 1.674038 | 0.924403 | 1.074781 | 1.250897 | 1.365312 | 1.424890 | 1.555704 | 1.641260 | 1.671724 | 1.646877 | 1.646877 | 1.501679 |
277 | 914 | 914 | 8.613436 | 1.414879 | 0.926539 | 1.081586 | 1.263914 | 1.381293 | 1.441165 | 1.570257 | 1.638037 | 1.625459 | 1.610072 | 1.610072 | 1.516558 |
278 | 916 | 916 | 8.632304 | 1.742757 | 0.928676 | 1.088402 | 1.276950 | 1.397280 | 1.457418 | 1.584188 | 1.632486 | 1.585905 | 1.584810 | 1.584810 | 1.531294 |
279 | 926 | 926 | 8.726646 | 1.570245 | 0.939362 | 1.122621 | 1.342384 | 1.477277 | 1.538342 | 1.648121 | 1.609439 | 1.527509 | 1.535119 | 1.535119 | 1.603040 |
280 | 928 | 928 | 8.745515 | 1.643629 | 0.941499 | 1.129490 | 1.355517 | 1.493288 | 1.554462 | 1.660238 | 1.609349 | 1.537043 | 1.539531 | 1.539531 | 1.617032 |
281 | 932 | 932 | 8.783252 | 1.318655 | 0.945774 | 1.143251 | 1.381823 | 1.525316 | 1.586643 | 1.684328 | 1.617467 | 1.568683 | 1.562898 | 1.562898 | 1.644693 |
282 | 933 | 933 | 8.792686 | 1.618969 | 0.946843 | 1.146695 | 1.388408 | 1.533324 | 1.594676 | 1.690366 | 1.621550 | 1.578440 | 1.571771 | 1.571771 | 1.651545 |
283 | 950 | 950 | 8.953067 | 2.012024 | 0.965012 | 1.205484 | 1.500747 | 1.669515 | 1.730547 | 1.791749 | 1.773085 | 1.792390 | 1.810661 | 1.810661 | 1.764372 |
284 | 960 | 960 | 9.047409 | 1.624429 | 0.975702 | 1.240220 | 1.567092 | 1.749627 | 1.809931 | 1.845555 | 1.864767 | 1.904327 | 1.900205 | 1.900205 | 1.827779 |
285 | 961 | 961 | 9.056844 | 1.996643 | 0.976771 | 1.243698 | 1.573734 | 1.757636 | 1.817850 | 1.850507 | 1.871202 | 1.912575 | 1.902017 | 1.902017 | 1.834010 |
286 | 962 | 962 | 9.066278 | 1.777070 | 0.977840 | 1.247176 | 1.580376 | 1.765645 | 1.825766 | 1.855370 | 1.876986 | 1.919627 | 1.903155 | 1.903155 | 1.840221 |
287 | 965 | 965 | 9.094581 | 1.932855 | 0.981047 | 1.257615 | 1.600309 | 1.789670 | 1.849495 | 1.869428 | 1.890573 | 1.928173 | 1.912138 | 1.912138 | 1.858747 |
288 | 967 | 967 | 9.113449 | 2.122920 | 0.983185 | 1.264578 | 1.613602 | 1.805684 | 1.865300 | 1.878369 | 1.896750 | 1.919566 | 1.929082 | 1.929082 | 1.871011 |
289 | 971 | 971 | 9.151186 | 1.849789 | 0.987461 | 1.278507 | 1.640196 | 1.837708 | 1.896880 | 1.895414 | 1.905165 | 1.883622 | 1.952431 | 1.952431 | 1.895362 |
290 | 973 | 973 | 9.170054 | 2.085739 | 0.989599 | 1.285474 | 1.653497 | 1.853718 | 1.912656 | 1.903625 | 1.908997 | 1.867122 | 1.934527 | 1.934527 | 1.907461 |
291 | 978 | 978 | 9.217225 | 1.845186 | 0.994944 | 1.302898 | 1.686757 | 1.893738 | 1.952067 | 1.923569 | 1.921577 | 1.858799 | 1.789746 | 1.789746 | 1.937531 |
292 | 980 | 980 | 9.236094 | 1.432716 | 0.997082 | 1.309869 | 1.700063 | 1.909744 | 1.967820 | 1.931353 | 1.927733 | 1.868867 | 1.738972 | 1.738972 | 1.949500 |
293 | 986 | 986 | 9.292699 | 2.110915 | 1.003496 | 1.330786 | 1.739989 | 1.957758 | 2.015055 | 1.954210 | 1.948939 | 1.941426 | 1.871168 | 1.871168 | 1.985259 |
294 | 988 | 988 | 9.311567 | 2.150585 | 1.005634 | 1.337760 | 1.753298 | 1.973761 | 2.030794 | 1.961701 | 1.956536 | 1.976895 | 2.053107 | 2.053107 | 1.997144 |
295 | 990 | 990 | 9.330436 | 1.970253 | 1.007772 | 1.344733 | 1.766609 | 1.989764 | 2.046531 | 1.969119 | 1.963709 | 2.006993 | 2.207295 | 2.207295 | 2.009016 |
296 | 993 | 993 | 9.358738 | 2.289554 | 1.010979 | 1.355194 | 1.786574 | 2.013768 | 2.070134 | 1.980067 | 1.972180 | 2.016287 | 2.120013 | 2.120013 | 2.026805 |
297 | 994 | 994 | 9.368173 | 1.766312 | 1.012048 | 1.358681 | 1.793230 | 2.021769 | 2.078001 | 1.983659 | 1.974087 | 2.004609 | 1.957363 | 1.957363 | 2.032730 |
298 | 995 | 995 | 9.377607 | 1.850753 | 1.013117 | 1.362168 | 1.799885 | 2.029770 | 2.085867 | 1.987228 | 1.975583 | 1.986822 | 1.770286 | 1.770286 | 2.038654 |
299 | 999 | 999 | 9.415344 | 1.948860 | 1.017394 | 1.376116 | 1.826506 | 2.061773 | 2.117333 | 2.001399 | 1.979900 | 1.906551 | 1.950956 | 1.950956 | 2.062340 |
300 rows × 15 columns
# fit with sklearn MLPRegressor
layer1_sizes=[1,2,4,8]
layer2_sizes=[1,2,4,8]
import itertools
from plotly import tools
def run_grid(build_model_fn, layer1_sizes, layer2_sizes, x, y, epochs=None):
nrows = len(layer1_sizes)
ncols = len(layer2_sizes)
hyperparameter_list = list(itertools.product(layer1_sizes, layer2_sizes))
subplot_titles = ["%d units, %d units" %
(layer1_size, layer2_size) for (layer1_size, layer2_size) in hyperparameter_list]
fig = tools.make_subplots(rows=nrows,
cols=ncols,
subplot_titles=subplot_titles)
for count, (layer1_size, layer2_size) in enumerate(hyperparameter_list):
print("Layer 1 units: %d, Layer 2 units %d:" % (layer1_size, layer2_size))
print("Running experiment %d of %d : %d %d" % (count+1, len(hyperparameter_list), layer1_size, layer2_size))
model = build_model_fn(hidden_layer_sizes=(layer1_size, layer2_size),
activation='tanh',
max_iter=10000, tol=1e-10,
solver='lbfgs'
)
x = x.reshape(-1,1)
if epochs:
model.fit(x,y, epochs=EPOCHS)
else:
model.fit(x,y)
y_pred = model.predict(x)
train_score = mean_squared_error(y, y_pred)
print(train_score)
print("%s Train MSE: %s" % (time.strftime("%H:%M:%S"), str(train_score)))
print("%s Train R-squared: %.6f" % (time.strftime("%H:%M:%S"), 1-train_score/y.var()))
z = model.predict(x)
trace = Scatter(
x = x.reshape(-1),
y = z.reshape(-1),
name = 'fit',
mode = 'markers',
marker = dict(size = 2)
)
fig.append_trace(trace, count // nrows + 1, count % ncols +1)
return(iplot(fig))
run_grid(MLPRegressor, layer1_sizes, layer2_sizes, x, y)
This is the format of your plot grid: [ (1,1) x1,y1 ] [ (1,2) x2,y2 ] [ (1,3) x3,y3 ] [ (1,4) x4,y4 ] [ (2,1) x5,y5 ] [ (2,2) x6,y6 ] [ (2,3) x7,y7 ] [ (2,4) x8,y8 ] [ (3,1) x9,y9 ] [ (3,2) x10,y10 ] [ (3,3) x11,y11 ] [ (3,4) x12,y12 ] [ (4,1) x13,y13 ] [ (4,2) x14,y14 ] [ (4,3) x15,y15 ] [ (4,4) x16,y16 ] Layer 1 units: 1, Layer 2 units 1: Running experiment 1 of 16 : 1 1 0.45123687843071003 10:59:18 Train MSE: 0.45123687843071003 10:59:18 Train R-squared: 0.360194 Layer 1 units: 1, Layer 2 units 2: Running experiment 2 of 16 : 1 2 0.4501787438606833 10:59:18 Train MSE: 0.4501787438606833 10:59:18 Train R-squared: 0.361694 Layer 1 units: 1, Layer 2 units 4: Running experiment 3 of 16 : 1 4 0.449447662622746 10:59:19 Train MSE: 0.449447662622746 10:59:19 Train R-squared: 0.362731 Layer 1 units: 1, Layer 2 units 8: Running experiment 4 of 16 : 1 8 0.44960316888835167 10:59:19 Train MSE: 0.44960316888835167 10:59:19 Train R-squared: 0.362511 Layer 1 units: 2, Layer 2 units 1: Running experiment 5 of 16 : 2 1 0.4493035300677745 10:59:19 Train MSE: 0.4493035300677745 10:59:19 Train R-squared: 0.362935 Layer 1 units: 2, Layer 2 units 2: Running experiment 6 of 16 : 2 2 0.3769890459200242 10:59:19 Train MSE: 0.3769890459200242 10:59:19 Train R-squared: 0.465470 Layer 1 units: 2, Layer 2 units 4: Running experiment 7 of 16 : 2 4 0.06425241086551657 10:59:20 Train MSE: 0.06425241086551657 10:59:20 Train R-squared: 0.908897 Layer 1 units: 2, Layer 2 units 8: Running experiment 8 of 16 : 2 8 0.06460457742750453 10:59:20 Train MSE: 0.06460457742750453 10:59:20 Train R-squared: 0.908398 Layer 1 units: 4, Layer 2 units 1: Running experiment 9 of 16 : 4 1 0.0646415331604307 10:59:20 Train MSE: 0.0646415331604307 10:59:20 Train R-squared: 0.908345 Layer 1 units: 4, Layer 2 units 2: Running experiment 10 of 16 : 4 2 0.44931457571832617 10:59:20 Train MSE: 0.44931457571832617 10:59:20 Train R-squared: 0.362920 Layer 1 units: 4, Layer 2 units 4: Running experiment 11 of 16 : 4 4 0.06389679796982471 10:59:20 Train MSE: 0.06389679796982471 10:59:20 Train R-squared: 0.909401 Layer 1 units: 4, Layer 2 units 8: Running experiment 12 of 16 : 4 8 0.0636990840102081 10:59:21 Train MSE: 0.0636990840102081 10:59:21 Train R-squared: 0.909681 Layer 1 units: 8, Layer 2 units 1: Running experiment 13 of 16 : 8 1 0.4493031378114045 10:59:21 Train MSE: 0.4493031378114045 10:59:21 Train R-squared: 0.362936 Layer 1 units: 8, Layer 2 units 2: Running experiment 14 of 16 : 8 2 0.06367152233486137 10:59:21 Train MSE: 0.06367152233486137 10:59:21 Train R-squared: 0.909721 Layer 1 units: 8, Layer 2 units 4: Running experiment 15 of 16 : 8 4 0.06352021208219046 10:59:22 Train MSE: 0.06352021208219046 10:59:22 Train R-squared: 0.909935 Layer 1 units: 8, Layer 2 units 8: Running experiment 16 of 16 : 8 8 0.06369116044117601 10:59:22 Train MSE: 0.06369116044117601 10:59:22 Train R-squared: 0.909693