AdSense

Friday, July 17, 2020

Understanding Principal Component Analysis in Python

Understanding Principal Component Analysis in Python



0_MacOS_Python_setup.txt
# Install on Terminal of MacOS

# 1. NumPy
#pip3 install -U numpy

# 2. pandas
#pip3 install -U pandas

# 3. matplotlib
#pip3 install -U matplotlib

# 4. scikit-learn (sklearn)
#pip3 install -U scikit-learn

# 5. seaborn
#pip3 install -U seaborn


1_MacOS_Terminal.txt
########## Run Terminal on MacOS and execute
### TO UPDATE
cd "YOUR_WORKING_DIRECTORY"


python3 pca01.py X.csv 2 x1 x2



Input data files



X.csv
x1,x2
-0.6253016177147778,-0.1700636571125488
0.9606950333275389,0.5909005970088841
-0.5985433850075956,-0.40259339311989306
-2.2280593766522214,-0.5325767401368825
-0.4614300598767516,-0.4988672444678538
-0.9589290278490424,-0.2693310237319169
-0.6730799091546266,-0.33830854748013184
1.3050186115481948,0.5913578455364443
0.37454559743575305,-0.09854420488874384
-1.8262862664844461,-0.40617025383032
0.6682622844811562,0.33687739585814436
-0.582646676021939,-0.17736921748075757
-0.4181289762061614,-0.37381138862526
0.17220937064575279,0.2646688363445262
0.3771166872946183,0.18844296908147612
-0.6793962296056062,-0.1316019778374699
1.0314895989546813,0.42555001754472105
0.336041798802642,0.03909827210839272
0.7057459850229555,0.4887306489211889
0.8395115474671283,0.15212587178745451
1.4988289811446753,0.47138080860235554
0.28835663844440446,0.03313347136138862
-0.5029350109723469,-0.3686654262572918
1.4792106688843745,0.7404457237454994
-0.4443824292899965,-0.16501936382926874
-0.5334642282766763,-0.06022219108193748
-0.6162294222666116,-0.2117839215105133
0.0746598965247852,-0.06143210770485435
-0.11363701103744236,0.07328776784924641
-0.020071729783539105,0.060974458601556945
0.18958296683876208,0.19976936885949087
0.9384661030448095,0.5417311316419882
-0.3666979887816997,-0.03649713755342033
-0.893528485543383,-0.37281401282437254
-1.3175957070283486,-0.35758107302395725
-0.38180278184117733,-0.20125246497579233
-0.513790215738037,-0.5059740860360368
0.9037379337854844,0.24951826048596157
-0.438875079467635,-0.15064812956892226
-0.5941709069988491,-0.09412241094079501
-0.3129439309072686,-0.0006573129658781017
-0.3002290892889903,-0.42205027177225224
-0.5992393195183092,-0.2603981800756424
0.22037003644161027,0.20391441508487246
0.5251648442025796,0.18702636260776917
0.5566250109115648,-0.1337450936293308
1.3542815721236108,0.5473974215855587
-0.6831598059463396,-0.3946931871585983
-0.12332510881871668,-0.10426653884571879
-0.313175458181276,-0.06983066191967202
-2.4883827672198118,-0.844571248983918
1.2499658587687152,0.585738114758799
0.17676443083245547,0.11073020815351371
-0.5078085381859656,-0.3157463002557443
1.8273850163544514,0.6202325024679164
0.810156740130441,0.17717119383530391
0.42849439812666407,0.12989240692362633
-0.0681124709347002,-0.18341414945223794
-0.23808571181520388,0.03202830530282251
-0.5765113112191823,-0.46111955845296854
0.786579790722323,0.24042095069142833
-0.3940283640280845,-0.11323899454137867
0.3002810160826966,0.04058301029479762
0.8343544924408379,0.36341169543257545
0.677025394441299,0.08621805880954383
0.8228587852634555,0.07952628678979308
1.1106022591442504,0.08384472094572175
-1.1105631980164716,-0.22185161445683957
-3.255811716715632e-05,0.25258628730620064
0.9013460042196363,0.466550032273377
0.6133252822589438,0.22942997155559533
1.0028032134638973,0.2676369700820755
-0.5003006472680256,-0.2651946778474655
-1.4683907966664256,-0.2625916324829058
-1.1655921035052261,-0.43565877729570385
1.232905714340008,0.3726449404855601
-0.08713741656430898,-0.07682053624162678
1.3261173087851847,0.42330557530293655
-0.655945864481047,-0.2364227993382929
-0.3988689687977965,-0.13229617799556867
0.14653723286275153,0.028871450029168452
0.834743613889784,0.2786312693918669
0.1264552008350165,0.0183894434782219
0.20170755625559836,0.06384350191987659
-0.2677757195594144,0.004920115066358239
0.2850850619265961,0.05361280309889841
-0.7534185494319212,-0.3375313194877926
0.5293134165710455,0.024596083148549396
0.36557771876219164,-0.05627178109385569
0.03628223499348943,-0.01715871426325706
0.19813999803683843,0.14880645440940787
-0.7566737721004273,-0.20586422817915984
0.11573326248000207,-0.02550109224389107
-0.18194453723128004,-0.08989333726537009
0.15719830623491635,0.12612466052713817
0.5834703702968259,0.23726671623004414
-0.9467702130017053,-0.2889270900141682
0.7131403526818803,0.177219645253482
1.3015718732510495,0.6245024454537768
-0.5490637095425713,-0.4446386052870516
-0.42972825662244346,-0.25094185555443876
-0.3867663078355488,-0.26615940072768385
-0.6576432577750558,-0.08450556251223318
1.2396141475684104,0.4906595122345453
0.31844659565746924,0.00412165000038993
-0.898118948974779,-0.21013758118618273
0.3368813191969155,0.18796100443519898
-0.41845291136338925,-0.18133671667206272
0.23660115895408176,0.33948703902729255
0.35131972759867486,0.0923282159145785
1.3532394247496584,0.4199700271922287
-0.8348195176596596,-0.2000442134064468
2.329386000927877,0.9163669216952257
0.04671744343525102,0.2491486988635611
0.45537323798218987,0.197896475733475
-0.7106849190944476,-0.01566073872219537
-0.05500951648619287,-0.2192433817766162
-0.45455451107980727,-0.26230796883093094
-0.10821361083201804,-0.04111319037942166
-0.8975524067664928,-0.24112928429837818
0.7348024907303494,0.08560776090645643
0.22526650079886087,-0.24953117491168642
-0.32743823994506444,0.1875610244254125
1.2042254843133238,0.28920812059234957
0.17033197408830542,-0.21321241421310294
1.0000793029093251,0.3606265072324854
-0.6715165633594682,-0.0720747636351032
1.1922161563991507,0.34937990369445265
0.2401742050919358,0.13245126629075182
0.5565923888898513,0.339247335512669
-1.2311390888032698,-0.30157224742195027
0.15265992580652274,-0.03222433313903002
1.3761179554699428,0.43890501948806576
-0.705213114119547,-0.18692398635416027
-1.2503541532412021,-0.6158527467053451
-1.8765101084780167,-0.587438868639975
-1.4710568984778531,-0.7576869379635249
-1.5034644679660039,-0.6391752913522825
-0.3743174720024021,-0.12447326608637561
0.8785248258922201,0.3865290473020056
0.043796065302285726,-0.13360093707284143
0.5459828435062852,0.09789868532533491
0.14156912654701717,-0.03319345268900965
0.0635824494337492,0.0026003742177041914
-0.0887046212662992,-0.05084308715641993
0.13241180824589355,-0.052589664467018696
0.42914314386081237,0.1394515202742384
-0.5624501660995315,-0.3554597997348278
0.5982938287413969,0.30535887865961914
-0.3526694737890958,0.2778729045016903
0.29270813774315824,-0.05885337230364588
1.492163246956624,0.24374680132654183
-0.32113688351077574,-0.21198878371371824
-0.42874183870380655,-0.16243147412180026
0.055660218158465974,0.047228046018850105
-0.1866690795903888,-0.05752362868355279
-0.31597688972246374,-0.13566613265746788
-0.9522872924809886,-0.2034312762147448
-0.6172168705296928,-0.16860795605042092
0.26072412351108243,0.28382726780703793
-1.2960687227320984,-0.5875015083327422
0.13020443641657642,0.10653659974038616
0.03620269153100732,-0.0714259522021235
0.4514068288039194,0.22002174620909223
0.7823436171641021,0.15583634261331558
-1.915279395290569,-0.8412263811413127
1.3347052662111705,0.1770082747074838
-0.5572415535553438,0.09782177032582899
-0.25403015749338176,0.006535856664278692
0.03834264146846239,-0.141592253201906
1.6690068187623457,0.25826388380318754
-0.4012013017360435,-0.12490065133457608
1.339802976104429,0.5546998947511015
0.31216077538561204,0.17060543600074968
2.0951768340115513,0.6464873446375877
-0.6856530796067453,-0.23752537496307333
-0.9812321593591005,-0.5307957332980906
0.3815161508468817,0.21606760064890315
0.08353861810384086,0.257654663999833
0.15622704295661638,0.010726371074333238
-0.898591325618534,-0.4651697690633494
0.21667377802103935,-0.13526539099053692
0.3990912763057593,0.1868245239609231
-0.390120283917219,-0.0557531130260491
0.1511367907084867,-0.035001733813416036
-0.025994727909912866,-0.053019427125884186
-0.5556780881206816,-0.2823207194372078
-0.4144372026664577,-0.1611577687020534
-0.5436960940610972,-0.4311614567758672
1.427591447499168,0.5345541401448728
0.45186186734666195,-0.14350285930798962
0.807369404052474,0.1445241918910205
-0.46125120826088006,-0.3088740278796747
-0.10635944761748291,0.24002562874429395
-1.0981033386108714,-0.5662486871643857
0.8384085456738412,0.2783800067455952
0.3127083097370869,-0.010600123267777883
1.9558758522219668,0.6382241135386741
-1.1083035213676484,-0.39505365784405105
0.39864720557220024,0.02318655255760607




Python files

pca01.py
#################### Principal Component Analysis in Python ####################

#Run this script on Terminal of MacOS as follows:
#
#python3 pca01.py X.csv 2 x1 x2
#python3 pca01.py (X file that includes columns x1 and x2) (number of PCA components) (column x1) (column x2)

#Reference
#
#In Depth: Principal Component Analysis
#https://jakevdp.github.io/PythonDataScienceHandbook/05.09-principal-component-analysis.html


########## import
import sys
#
#print(sys.argv[0])
#pca01.py
#
Xfilename = sys.argv[1]
n = int(sys.argv[2])
x1name = sys.argv[3]
x2name = sys.argv[4]

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns; sns.set()

from sklearn.decomposition import PCA
##########


########## raw data plotting

#rng = np.random.RandomState(1)
#
#X = np.dot(rng.rand(2, 2), rng.randn(2, 200)).T
#
#
#print(type(X))
#<class 'numpy.ndarray'>
#print(X.shape)
#
#colnames=['x1', 'x2']
#pd.DataFrame(X).to_csv('X.csv', header=False, index=False)
#
#add x1, x2 to the columns of X.csv

X = pd.read_csv(Xfilename, header=0)
#
#print(X)
#print(X[x1name])
#print(X[x2name])

#plt.scatter(X[:, 0], X[:, 1])
plt.scatter(X[x1name], X[x2name])
plt.xlabel(x1name)
plt.ylabel(x2name)
plt.axis('equal')
plt.savefig('Fig_1.png')
plt.show()
plt.close()
##########


########## PCA fitting

pca = PCA(n_components=n)
pca.fit(X)

print(pca.components_)
'''
[[-0.94446029 -0.32862557]
 [-0.32862557  0.94446029]]
'''
'''
       x1           x2
pc1    -0.94446029 -0.32862557
pc2    -0.32862557  0.94446029

Namely,
pc1 (calculated) = ((-0.94446029) * x1) + ((-0.32862557) * x2))
pc2 (calculated) = ((-0.32862557) * x1) + (( 0.94446029) * x2))
'''

print(pca.explained_variance_)
#[0.7625315 0.0184779]

print(pca.explained_variance_ratio_)
#[0.97634101 0.02365899]

print(np.cumsum(pca.explained_variance_ratio_))
#[0.97634101 1.        ]


########## vector drawing function

def draw_vector(v0, v1, ax=None):
    ax = ax or plt.gca()
    arrowprops=dict(arrowstyle='->',
                    linewidth=2,
                    color='k',
                    shrinkA=0, shrinkB=0)
    ax.annotate('', v1, v0, arrowprops=arrowprops)
##########


########## plot data x1 & x2 and principal components PC1 & PC2

fig, ax = plt.subplots(1, 2, figsize=(16, 6))
fig.subplots_adjust(left=0.0625, right=0.95, wspace=0.1)

##### PCA
X_pca = pca.transform(X)

#print(type(X_pca[:, 0]))
#<class 'numpy.ndarray'>
#
#print(max(X_pca[:, 0]))
#2.6580358349697173
#
#print(np.argmax(X_pca[:, 0]))
#50
#
#print(X_pca[:, 0][np.argmax(X_pca[:, 0])])
#2.6580358349697173
#
#print(max(X_pca[:, 1]))
#0.393203001154575
#
#print(X_pca[:, 1][np.argmax(X_pca[:, 1])])
#0.393203001154575


##### raw data

#X[:, 0]
#print(X[x1name][np.argmax(X_pca[:, 0])])
#-2.488382767219812
#
#print(X[x2name][np.argmax(X_pca[:, 0])])
#-0.844571248983918

#X[:, 1]
#print(X[x1name][np.argmax(X_pca[:, 1])])
#-0.3526694737890958
#
#print(X[x2name][np.argmax(X_pca[:, 1])])
#0.2778729045016903


##### validation: PCA data and raw data

###pc1
#
#print(((X[x1name][np.argmax(X_pca[:, 0])]) ** 2) + ((X[x2name][np.argmax(X_pca[:, 0])]) ** 2))
#6.905349390806785
#(-2.488382767219812) ** 2 + (-0.844571248983918) ** 2
#
#print(max(X_pca[:, 0]) ** 2)
#7.065154499983162
#(2.6580358349697173) ** 2

###pc2
#
#print(((X[x1name][np.argmax(X_pca[:, 1])]) ** 2) + ((X[x2name][np.argmax(X_pca[:, 1])]) ** 2))
#0.2015891087988832
#(-0.3526694737890958) ** 2 + (0.2778729045016903) ** 2
#
#print((max(X_pca[:, 1])) ** 2)
#0.15460860011696473
#(0.393203001154575) ** 2


##### plot raw data

#ax[0].scatter(X[:, 0], X[:, 1], alpha=0.2)
ax[0].scatter(X[x1name], X[x2name], alpha=0.2)
for length, vector in zip(pca.explained_variance_, pca.components_):
    v = vector * 3 * np.sqrt(length)
    draw_vector(pca.mean_, pca.mean_ + v, ax=ax[0])
#
ax[0].axis('equal')
#ax[0].set(xlabel='x', ylabel='y', title='input')
ax[0].set(xlabel=x1name, ylabel=x2name, title='Raw Data')
#
ax[0].text(X[x1name][np.argmax(X_pca[:, 0])] * 1.00, X[x2name][np.argmax(X_pca[:, 0])] * 1.15, '(' + str(X[x1name][np.argmax(X_pca[:, 0])]) + ', ' + str(X[x2name][np.argmax(X_pca[:, 0])]) + ')')
ax[0].text(X[x1name][np.argmax(X_pca[:, 1])] * 1.10, X[x2name][np.argmax(X_pca[:, 1])] * 1.30, '(' + str(X[x1name][np.argmax(X_pca[:, 1])]) + ', ' + str(X[x2name][np.argmax(X_pca[:, 1])]) + ')')
#


##### plot principal components

ax[1].scatter(X_pca[:, 0], X_pca[:, 1], alpha=0.2)
#
#draw_vector([0, 0], [0, 3], ax=ax[1])
draw_vector([0, 0], [0, max(X_pca[:, 1])], ax=ax[1])
#
#draw_vector([0, 0], [3, 0], ax=ax[1])
draw_vector([0, 0], [max(X_pca[:, 0]),0], ax=ax[1])
#
ax[1].axis('equal')
'''
ax[1].set(xlabel='PC1', ylabel='PC2',
          title='Principal Components',
          xlim=(-5, 5), ylim=(-3, 3.1))
'''
#print(max(X_pca[:, 0]))
#print(min(X_pca[:, 0]))
#print(max(X_pca[:, 1]))
#print(min(X_pca[:, 1]))
#
ax[1].set(
          xlabel='PC1',
          ylabel='PC2',
          title='Principal Components',
          xlim=(min(X_pca[:, 0]), max(X_pca[:, 0])),
          ylim=(min(X_pca[:, 1]), max(X_pca[:, 1]))
          )
#
ax[1].text(max(X_pca[:, 0])/2, 0.05, '(' + str(max(X_pca[:, 0])) + ', 0)')
ax[1].text(0.05, max(X_pca[:, 1])/2, '(0, ' + str(max(X_pca[:, 1])) + ')')
#
plt.savefig('Fig_2.png')
plt.show()
plt.close()


########## output

##### output principal components

pc1 = pd.DataFrame(X_pca[:, 0])
pc1.rename({0:'pc1'},axis=1,inplace=True)

pc2 = pd.DataFrame(X_pca[:, 1])
pc2.rename({0:'pc2'},axis=1,inplace=True)

Xpca = pd.concat(
    [
        pc1,
        pc2
    ],
    axis=1
)

#print(Xpca)
Xpca.to_csv('Xpca.csv', header=True, index=False)


##### output raw data and principal components

XXpca = pd.concat(
    [
        X,
        Xpca
    ],
    axis=1
)

#print(XXpca)
XXpca.to_csv('XXpca.csv', header=True, index=False)





Figures
Fig_1.png


Fig_2.png


Tuesday, July 14, 2020

Package: Data Generation, Data Loading and Standardization, Initial Data Analysis, Multiple Linear Regression, Polynominal Liner Regression in Python

Package: Data Generation, Data Loading and Standardization, Initial Data Analysis, Multiple Linear Regression, Polynominal Liner Regression in Python



0_MacOS_Python_setup.txt
# Install on Terminal of MacOS

# 1. pandas
#pip3 install -U pandas

# 2. NumPy
#pip3 install -U numpy

# 3. matplotlib
#pip3 install -U matplotlib

# 4. scikit-learn (sklearn)
#pip3 install -U scikit-learn

# 5. statsmodels
#pip3 install -U statsmodels

1_MacOS_Terminal.txt
########## Run Terminal on MacOS and execute
### TO UPDATE
cd "YOUR_WORKING_DIRECTORY"


python3 stpkg00.py


#python3 stpkg01.py
python3 stpkg01.py yX.csv y


python3 stpkg02.py yXSTD.csv


#Multiple Linear Regression
python3 stpkg03.py yXSTD.csv y x1 x2 x3


#Polynomial SINGLE Linear Regression
python3 stpkg04.py yXSTD.csv y x1 3



Input data files



N/A
NaN




Python files

stpkg00.py
#################### Data Generation ####################

'''

x1raw = σ(1,1)
x2raw = σ(2,2)
x3raw = x1 + σ(2,2)

x1 = σ(0,1)
x2 = σ(0,1)
x3 = σ(0,1)

y = 1.0 + (2.0 * x1) + (0.5 * x2) + (0.1 * x3) + (0.05 * x1 * x2) + (0.2 * (x1)^2) + (0.02 * (x1)^3) + σ(0,1)
'''

import scipy as sp
import pandas as pd
from sklearn.preprocessing import StandardScaler
#import math
import numpy as np


sp.random.seed(0)

#numbe of samples
n = 100

x1 = sp.random.normal(1,1,n)
x2 = sp.random.normal(2,2,n)
x3 = x1 + sp.random.normal(2,2,n)
#
#print(x1)
#[ 2.76405235 ...
#print(type(x1))
#<class 'numpy.ndarray'>
#
#print(x2)
#[ 5.76630139e+00 ...
#
#print(x3)
#[ 4.02568867 ...





x1 = pd.DataFrame(x1)
x1 = x1.rename(columns={0: 'x1'})
#print(x1)

x2 = pd.DataFrame(x2)
x2 = x2.rename(columns={0: 'x2'})
#print(x2)

x3 = pd.DataFrame(x3)
x3 = x3.rename(columns={0: 'x3'})
#print(x3)

X = pd.concat([x1, x2, x3], axis=1)
#print(X)
#print(X.describe())    #standard deviation (sample, NOT population)

X.to_csv('X.csv', header=True, index=False)



#scaler = StandardScaler()
scaler = StandardScaler()
scaler.fit(X)

#print(scaler.mean_) # mean
#[1.05980802 2.16402594 2.94134349]
#
#print(scaler.var_) # variance
#print(type(scaler.var_))
#<class 'numpy.ndarray'>
#print((scaler.var_) ** 0.5) # standard deviation (population)
#[1.00788224 2.06933399 2.22108027]

XSTD = pd.DataFrame(scaler.transform(X))
XSTD = XSTD.rename(columns={0: 'x1', 1: 'x2', 2: 'x3'})
XSTD.to_csv('XSTD.csv', header=True, index=False)
#print(XSTD)
'''
          x1        x2        x3
0   1.690916  1.740790  0.488206
1   0.337687 -1.381867 -0.008980
2   0.911743 -1.307182  1.457270
3   2.164028  0.857652  1.625370
4   1.793612 -1.213082  1.443657
..       ...       ...       ...
95  0.641707 -0.245064  1.368259
96 -0.048922  0.666666  0.119134
97  1.712564  0.716647  1.355392
98  0.066579  2.011491 -0.276140
99  0.339505  1.212482  0.540619

[100 rows x 3 columns]
#
print(XSTD.describe())
'''
'''
                 x1            x2            x3
count  1.000000e+02  1.000000e+02  1.000000e+02
mean  -1.143530e-16  1.576517e-16 -5.440093e-17
std    1.005038e+00  1.005038e+00  1.005038e+00
min   -2.592364e+00 -2.228172e+00 -2.412135e+00
25%   -6.981616e-01 -7.997189e-01 -6.266296e-01
50%    3.401995e-02 -5.543630e-02  1.232380e-02
75%    6.719727e-01  7.398196e-01  6.364589e-01
max    2.192663e+00  2.224031e+00  2.300629e+00
'''

#print(XSTD['x1'])
#print(type(XSTD['x1']))
#<class 'pandas.core.series.Series'>
#
#print(np.array(XSTD['x1']))
#print(type(np.array(XSTD['x1'])))
#<class 'numpy.ndarray'>

tmpy = (2.0 * np.array(XSTD['x1']))
tmpy = tmpy + (0.5 * np.array(XSTD['x2']))
tmpy = tmpy + (0.1 * np.array(XSTD['x3']))
tmpy = tmpy + (0.05 * np.array(XSTD['x1']) * np.array(XSTD['x2']))
tmpy = tmpy + (0.2 * (np.array(XSTD['x1']) ** 2))
tmpy = tmpy + (0.02 * (np.array(XSTD['x1']) ** 3))
tmpy = tmpy + sp.random.normal(0,1,n)
tmpy = tmpy + 1.0
#print(tmpy)
#print(type(tmpy))
#<class 'numpy.ndarray'>
y = pd.DataFrame(tmpy)
y = y.rename(columns={0: 'y'})
y.to_csv('y.csv', header=True, index=False)

#print(y)
#print(type(y))
#<class 'pandas.core.frame.DataFrame'>

#y = 1.0 + (2.0 * x1) + (0.5 * x2) + (0.1 * x3) + (0.05 * x1 * x2) + (0.2 * (x1)^2) + (0.02 * (x1)^3) + σ(0,1)
#y = 1.0 + (2.0 * x1) + (0.5 * x2) + (0.1 * x3) + (0.05 * x1 * x2) + (0.2 * (x1**2)) + (0.02 * (x1**3)) + sp.random.normal(0,1,n)
#y = 1.0 + (2.0 * 1.690916) + (0.5 * 1.740790) + (0.1 * 0.488206) + (0.05 * 1.690916 * 1.740790) + (0.2 * (1.690916**2)) + (0.02 * (1.690916**3)) + 0
#print(1.0 + (2.0 * 1.690916) + (0.5 * 1.740790) + (0.1 * 0.488206) + (0.05 * 1.690916 * 1.740790) + (0.2 * (1.690916**2)) + (0.02 * (1.690916**3)) + 0)
#6.11675670334485


yX = pd.concat([y, x1, x2, x3], axis=1)
yX.to_csv('yX.csv', header=True, index=False)



stpkg01.py
#################### Data Loading and Standardization

########## data file(s) to load
# yX.csv (which includes dependent variable y and indepdent variables X: x1, x2, ...)
##########



########## Run this code as follows
#python3 stpkg01.py yX.csv y
#python3 stpkg01.py (data file to load) (dependent variable/target in yX.csv)



########## import
import sys
import pandas as pd
from sklearn.preprocessing import StandardScaler

#print(sys.argv[0])
#stpkg01.py
#
dfname = sys.argv[1]
yname = sys.argv[2]
##########



########## loading data
#yX = pd.read_csv('yX.csv', header = 0)
yX = pd.read_csv(dfname, header = 0)
#
print(yX)
#print(yX.describe())

#X = yX.drop(['y'], axis=1)
X = yX.drop([yname], axis=1)
#print(X)
X.to_csv('X.csv', header=True, index=False)

#y = yX['y']
y = yX[yname]
y.to_csv('y.csv', header=True, index=False)


scaler = StandardScaler()
scaler.fit(X)

#print(scaler.mean_) # mean
#[1.05980802 2.16402594 2.94134349]
#
#print((scaler.var_) ** 0.5) # standard deviation (population)
#[1.00788224 2.06933399 2.22108027]

XSTD = pd.DataFrame(scaler.transform(X), columns = X.columns)
print(XSTD.describe())
'''
                 x1            x2            x3
count  1.000000e+02  1.000000e+02  1.000000e+02
mean  -1.187939e-16  1.620926e-16 -2.320366e-16
std    1.005038e+00  1.005038e+00  1.005038e+00
min   -2.592364e+00 -2.228172e+00 -2.412135e+00
25%   -6.981616e-01 -7.997189e-01 -6.266296e-01
50%    3.401995e-02 -5.543630e-02  1.232380e-02
75%    6.719727e-01  7.398196e-01  6.364589e-01
max    2.192663e+00  2.224031e+00  2.300629e+00
'''
XSTD.to_csv('XSTD.csv', header=True, index=False)



yXSTD = pd.concat([y, XSTD], axis=1)
yXSTD.to_csv('yXSTD.csv', header=True, index=False)





stpkg02.py
#################### Initial Data Analysis

########## Run this code as follows
#python3 stpkg02.py yXSTD.csv
#python3 stpkg02.py (data file to load)


########## import
import sys
import pandas as pd
from pandas.plotting import scatter_matrix
import matplotlib.pyplot as plt
import seaborn as sns

#print(sys.argv[0])
#stpkg02.py
#
dfname = sys.argv[1]
##########



########## loading data
#yXSTD = pd.read_csv('yXSTD.csv', header = 0)
yXSTD = pd.read_csv(dfname, header = 0)

#print(yXSTD.describe())
'''
                y            x1            x2            x3
count  100.000000  1.000000e+02  1.000000e+02  1.000000e+02
mean     1.008893 -1.232348e-16  1.576517e-16 -2.364775e-16
std      2.384226  1.005038e+00  1.005038e+00  1.005038e+00
min     -4.001685 -2.592364e+00 -2.228172e+00 -2.412135e+00
25%     -0.432215 -6.981616e-01 -7.997189e-01 -6.266296e-01
50%      0.755357  3.401995e-02 -5.543630e-02  1.232380e-02
75%      2.321260  6.719727e-01  7.398196e-01  6.364589e-01
max      6.808487  2.192663e+00  2.224031e+00  2.300629e+00
'''



########## scatter matrix

#scatter_matrix(yXSTD, figsize=(6.4, 4.8))    #640 x 480
scatter_matrix(yXSTD, figsize=(800/100, 600/100))
plt.suptitle('Scatter Matrix')
plt.savefig("Fig_02_1.png")
plt.show()
plt.close()


########## correlation


cor = yXSTD.corr()

#print(cor)

ax = sns.heatmap(
    cor,
    annot=True,
    fmt='.4f',    #f for fixed number of decimal places (4 in this case); g is for variable numbers
    vmin=-1, vmax=1, center=0,
    #cmap='Blues',
    cmap=sns.diverging_palette(20, 220, n=200),
    square=True
)
ax.set_xticklabels(
    ax.get_xticklabels(),
    #rotation=45,
    horizontalalignment='right'
);

plt.suptitle('Correlation Heatmap')
plt.savefig("Fig_02_2.png")
plt.show()
plt.close()




stpkg03.py
#################### Multiple Linear Regression


########## Run this code as follows
#python3 stpkg03.py yXSTD.csv y x1 x2 x3
#python3 stpkg03.py (data file to load) (dependent variable/target in yXSTD.csv) (3 specified independent variables in yXSTD.csv for multiple linear regression)


########## import
import sys
import pandas as pd
from sklearn.model_selection import train_test_split
#from sklearn.linear_model import LinearRegression
from sklearn import linear_model
import statsmodels.api as sm
import matplotlib.pyplot as plt
#from pandas.plotting import scatter_matrix
#import seaborn as sns

#print(sys.argv[0])
#stpkg03.py
#
dfname = sys.argv[1]
yname = sys.argv[2]
x1name = sys.argv[3]
x2name = sys.argv[4]
x3name = sys.argv[5]
##########



########## loading data
#yXSTD = pd.read_csv('yXSTD.csv', header = 0)
yXSTD = pd.read_csv(dfname, header = 0)

#print(yXSTD.describe())
'''
                y            x1            x2            x3
count  100.000000  1.000000e+02  1.000000e+02  1.000000e+02
mean     1.008893 -1.232348e-16  1.576517e-16 -2.364775e-16
std      2.384226  1.005038e+00  1.005038e+00  1.005038e+00
min     -4.001685 -2.592364e+00 -2.228172e+00 -2.412135e+00
25%     -0.432215 -6.981616e-01 -7.997189e-01 -6.266296e-01
50%      0.755357  3.401995e-02 -5.543630e-02  1.232380e-02
75%      2.321260  6.719727e-01  7.398196e-01  6.364589e-01
max      6.808487  2.192663e+00  2.224031e+00  2.300629e+00
'''


XSTD = yXSTD.drop([yname], axis=1)
#print(XSTD)
#print(type(XSTD))
#X.to_csv('XSTD.csv', header=True, index=False)

y = yXSTD[yname]
y = pd.DataFrame(y)
#print(y)
#print(type(y))
#y.to_csv('y.csv', header=True, index=False)
##########


########## creating training and test data

XSTD_train, XSTD_test, y_train, y_test = train_test_split(XSTD, y, test_size=0.2, random_state = 0)

#print(XSTD_train.shape)
#print(XSTD_test.shape)
#print(y_train.shape)
#print(y_test.shape)
'''
(80, 3)
(20, 3)
(80, 1)
(20, 1)
'''
lr = linear_model.LinearRegression().fit(XSTD_train, y_train)
print(f"R2 of Training Data: {lr.score(XSTD_train, y_train):.4}")
print(f"R2 of Test Data (based on a model by Training Data): {lr.score(XSTD_test, y_test):.4}")



########## Multiple Linear Regression (with Training Data)

smXSTD_train = sm.add_constant(XSTD_train)

model = sm.OLS(y_train, smXSTD_train)

results = model.fit()

print(results.summary())
'''
                            OLS Regression Results                          
==============================================================================
Dep. Variable:                      y   R-squared:                       0.839
Model:                            OLS   Adj. R-squared:                  0.832
Method:                 Least Squares   F-statistic:                     131.5
Date:                Sun, 12 Jul 2020   Prob (F-statistic):           5.23e-30
Time:                        18:49:50   Log-Likelihood:                -111.79
No. Observations:                  80   AIC:                             231.6
Df Residuals:                      76   BIC:                             241.1
Df Model:                           3                                      
Covariance Type:            nonrobust                                      
==============================================================================
                 coef    std err          t      P>|t|      [0.025      0.975]
------------------------------------------------------------------------------
const          1.0679      0.113      9.487      0.000       0.844       1.292
x1             1.8865      0.137     13.767      0.000       1.614       2.159
x2             0.5003      0.111      4.490      0.000       0.278       0.722
x3             0.3011      0.135      2.230      0.029       0.032       0.570
==============================================================================
Omnibus:                        0.606   Durbin-Watson:                   2.329
Prob(Omnibus):                  0.739   Jarque-Bera (JB):                0.733
Skew:                          -0.178   Prob(JB):                        0.693
Kurtosis:                       2.694   Cond. No.                         1.96
==============================================================================

Warnings:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
'''
with open('results.summary.txt', 'w') as f:
  print(results.summary(), file=f)

#print(results.params)
#print(type(results.params))
#<class 'pandas.core.series.Series'>
#
#print(results.params.shape)
#(4,)
#
#print(len(results.params))
#4

b0 = results.params['const']
b1 = results.params[x1name]
b2 = results.params[x2name]
b3 = results.params[x3name]

#print(b0)    #1.0679105398301059
#print(b1)    #1.8865026849796247
#print(b2)    #0.5003158682861151
#print(b3)    #0.3010874982341183

#print('R2: ', results.rsquared)
#R2:  0.8385140722177117

#################### traing data integration

#print(pd.concat([y_train, XSTD_train], axis=1))
yXypred_train = pd.concat([y_train, XSTD_train, b0 + (b1 * XSTD_train[x1name]) + (b2 * XSTD_train[x2name]) + (b3 * XSTD_train[x3name])], axis=1)
yXypred_train = yXypred_train.rename(columns={0: 'ypred'})

yXypred_train = pd.concat([yXypred_train, yXypred_train[yname] - yXypred_train['ypred']], axis=1)
yXypred_train = yXypred_train.rename(columns={0: 'residual'})

yXypred_train.to_csv('yXypred_train.csv', header=True, index=False)

#print(yXypred_train.columns)
#Index(['y', 'x1', 'x2', 'x3', 'ypred'], dtype='object')


########## plot: training data (x1, y) and predicted data (x1, ypred)

#print(max(yXypred_train[x1name]))
#2.164027787075764
#
#print(min(yXypred_train[x1name]))
#-4.001684888412238
#
#print(max(yXypred_train[yname]))
#6.808486625095364
#
#print(min(yXypred_train[yname]))
#-2.5923641824877004


plt.figure(figsize=(8, 8))
#
plt.scatter(yXypred_train[x1name], yXypred_train[yname], color = 'blue', label='Training Data')
#
plt.scatter(yXypred_train[x1name], yXypred_train['ypred'], color = 'red', label='Predicted Data')
#
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title(x1name + ": Training Data, " + yname + ": Training Data and Predicted Data")
plt.xlabel(x1name)
plt.ylabel(yname)
plt.grid(True)
#
plt.text(min(yXypred_train[x1name]), max(yXypred_train[yname]) * 1.00, "y = b0 + (b1 * x1) + (b2 * x2) + (b3 * x3)", size = 10, color = "black")
plt.text(min(yXypred_train[x1name]), max(yXypred_train[yname]) * 0.90, "b0 = " + str(b0), size = 10, color = "black")
plt.text(min(yXypred_train[x1name]), max(yXypred_train[yname]) * 0.80, "b1 = " + str(b1), size = 10, color = "black")
plt.text(min(yXypred_train[x1name]), max(yXypred_train[yname]) * 0.70, "b2 = " + str(b2), size = 10, color = "black")
plt.text(min(yXypred_train[x1name]), max(yXypred_train[yname]) * 0.60, "b3 = " + str(b3), size = 10, color = "black")
plt.text(min(yXypred_train[x1name]), max(yXypred_train[yname]) * 0.50, "R2 = " + str(results.rsquared), size = 10, color = "black")
#
plt.savefig('Fig_03_1a.png')
plt.show()


########## plot: training data (x1) and residual data (y - ypred)

plt.figure(figsize=(8, 8))
#
plt.scatter(yXypred_train[x1name], yXypred_train['residual'], color = 'green', label='Residual (actual - predicted)')
#
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title(x1name + ": Training Data, " + 'residual' + ": Residual (actual - predicted)")
plt.xlabel(x1name)
plt.ylabel('Residual')
plt.grid(True)
#
plt.savefig('Fig_03_1b.png')
plt.show()



#################### test data integration

yXypred_test = pd.concat([y_test, XSTD_test, b0 + (b1 * XSTD_test[x1name]) + (b2 * XSTD_test[x2name]) + (b3 * XSTD_test[x3name])], axis=1)
yXypred_test = yXypred_test.rename(columns={0: 'ypred'})

yXypred_test = pd.concat([yXypred_test, yXypred_test[yname] - yXypred_test['ypred']], axis=1)
yXypred_test = yXypred_test.rename(columns={0: 'residual'})

yXypred_test.to_csv('yXypred_test.csv', header=True, index=False)

#print(yXypred_test.columns)
#Index(['y', 'x1', 'x2', 'x3', 'ypred'], dtype='object')


########## plot: test data (x1, y) and predicted data (x1, ypred)

plt.figure(figsize=(8, 8))
#
plt.scatter(yXypred_test[x1name], yXypred_test[yname], color = 'blue', label='Test Data')
#
plt.scatter(yXypred_test[x1name], yXypred_test['ypred'], color = 'red', label='Predicted Data')
#
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title(x1name + ": Test Data, " + yname + ": Test Data and Predicted Data (based on Training Data)")
plt.xlabel(x1name)
plt.ylabel(yname)
plt.grid(True)
#
plt.text(min(yXypred_test[x1name]), max(yXypred_test[yname]) * 1.00, "y = b0 + (b1 * x1) + (b2 * x2) + (b3 * x3)", size = 10, color = "black")
plt.text(min(yXypred_test[x1name]), max(yXypred_test[yname]) * 0.90, "b0 = " + str(b0), size = 10, color = "black")
plt.text(min(yXypred_test[x1name]), max(yXypred_test[yname]) * 0.80, "b1 = " + str(b1), size = 10, color = "black")
plt.text(min(yXypred_test[x1name]), max(yXypred_test[yname]) * 0.70, "b2 = " + str(b2), size = 10, color = "black")
plt.text(min(yXypred_test[x1name]), max(yXypred_test[yname]) * 0.60, "b3 = " + str(b3), size = 10, color = "black")
#
plt.savefig('Fig_03_2a.png')
plt.show()


########## plot: test data (x1) and residual data (y - ypred)

plt.figure(figsize=(8, 8))
#
plt.scatter(yXypred_test[x1name], yXypred_test['residual'], color = 'green', label='Residual (actual - predicted)')
#
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title(x1name + ": Training Data, " + 'residual' + ": Residual (actual - predicted)")
plt.xlabel(x1name)
plt.ylabel('Residual')
plt.grid(True)
#
plt.savefig('Fig_03_2b.png')
plt.show()






stpkg04.py

#################### Multiple Linear Regression


########## Run this code as follows
#python3 stpkg04.py yXSTD.csv y x1 3
#python3 stpkg04.py (data file to load) (dependent variable/target in yXSTD.csv) (one specified independent variables in yXSTD.csv for polynominal linear regression)


########## import
import sys
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn.metrics import r2_score
import matplotlib.pyplot as plt

#print(sys.argv[0])
#stpkg04.py
#
dfname = sys.argv[1]
yname = sys.argv[2]
x1name = sys.argv[3]
x1dim = int(sys.argv[4])
#x2name = sys.argv[4]
#x3name = sys.argv[5]
##########



########## loading data
#yXSTD = pd.read_csv('yXSTD.csv', header = 0)
yXSTD = pd.read_csv(dfname, header = 0)

#print(yXSTD.describe())
'''
                y            x1            x2            x3
count  100.000000  1.000000e+02  1.000000e+02  1.000000e+02
mean     1.008893 -1.232348e-16  1.576517e-16 -2.364775e-16
std      2.384226  1.005038e+00  1.005038e+00  1.005038e+00
min     -4.001685 -2.592364e+00 -2.228172e+00 -2.412135e+00
25%     -0.432215 -6.981616e-01 -7.997189e-01 -6.266296e-01
50%      0.755357  3.401995e-02 -5.543630e-02  1.232380e-02
75%      2.321260  6.719727e-01  7.398196e-01  6.364589e-01
max      6.808487  2.192663e+00  2.224031e+00  2.300629e+00
'''


XSTD = yXSTD.drop([yname], axis=1)
#print(XSTD)
#print(type(XSTD))
#X.to_csv('XSTD.csv', header=True, index=False)


x1 = XSTD[x1name]
x1 = pd.DataFrame(x1)
#print(x1)
#print(type(x1))
#<class 'pandas.core.frame.DataFrame'>


y = yXSTD[yname]
y = pd.DataFrame(y)
#print(y)
#print(type(y))
#y.to_csv('y.csv', header=True, index=False)
##########


########## creating training and test data

x1_train, x1_test, y_train, y_test = train_test_split(x1, y, test_size=0.2, random_state = 0)

#print(XSTD_train.shape)
#print(XSTD_test.shape)
#print(y_train.shape)
#print(y_test.shape)
'''
(80, 3)
(20, 3)
(80, 1)
(20, 1)
'''
#lr = linear_model.LinearRegression().fit(XSTD_train, y_train)
#print(f"R2 of Training Data: {lr.score(XSTD_train, y_train):.4}")
#print(f"R2 of Test Data (based on a model by Training Data): {lr.score(XSTD_test, y_test):.4}")



########## Polynominal Linear Regression (for Training Data)
clf = LinearRegression()

clf.fit(x1_train, y_train)

#dimension
#print(x1dim)

pf = PolynomialFeatures(degree = x1dim, include_bias = False)
x_poly = pf.fit_transform(x1_train)

poly_reg = LinearRegression()
poly_reg_fit = poly_reg.fit(x_poly, y_train)

#print(poly_reg_fit.coef_)
#[[2.05425368 0.11273714 0.03563863]]
#print(type(poly_reg_fit.coef_))
#<class 'numpy.ndarray'>
#print(poly_reg_fit.coef_[0][0])
#2.0542536782376684
#
#print(poly_reg_fit.intercept_)
#[0.99017672]

polypred = poly_reg.predict(x_poly)

#predicted y
#ypred = clf.predict(y_train)
#ypred = clf.predict(x1_train)
#print(ypred)
#print(type(ypred))
#<class 'numpy.ndarray'>


##### output data

ypred_train = poly_reg_fit.intercept_[0] + (poly_reg_fit.coef_[0][0] * x1_train) + (poly_reg_fit.coef_[0][1] * (x1_train ** 2)) + (poly_reg_fit.coef_[0][2] * (x1_train **3))
ypred_train = ypred_train.rename(columns={x1name: 'ypred'})
#print(ypred_test)

yx1ypred_train = pd.concat([y_train, x1_train, ypred_train], axis=1)
yx1ypred_train.to_csv('yx1ypred_train.csv', header=True, index=False)

yx1ypredres_train = pd.concat([yx1ypred_train, yx1ypred_train[yname] - yx1ypred_train['ypred']], axis=1)
yx1ypredres_train = yx1ypredres_train.rename(columns={0: 'residual'})
yx1ypredres_train.to_csv('yx1ypredres_train.csv', header=True, index=False)


##### plot

plt.scatter(yx1ypredres_train[x1name], yx1ypredres_train[yname], c = 'blue', label = 'Training Data')
plt.scatter(yx1ypredres_train[x1name], yx1ypredres_train['ypred'], c = 'red', label = 'Predicted Data')
#
plt.text(min(yx1ypredres_train[x1name]), max(yx1ypredres_train[yname]) * 1.00, "y = b0 + (b1 * x1) + (b2 * x1^2) + (b3 * x1^3)", size = 10, color = "black")
plt.text(min(yx1ypredres_train[x1name]), max(yx1ypredres_train[yname]) * 0.90, "b0 = " + str(poly_reg_fit.intercept_[0]), size = 10, color = "black")
plt.text(min(yx1ypredres_train[x1name]), max(yx1ypredres_train[yname]) * 0.80, "b1 = " + str(poly_reg_fit.coef_[0][0]), size = 10, color = "black")
plt.text(min(yx1ypredres_train[x1name]), max(yx1ypredres_train[yname]) * 0.70, "b2 = " + str(poly_reg_fit.coef_[0][1]), size = 10, color = "black")
plt.text(min(yx1ypredres_train[x1name]), max(yx1ypredres_train[yname]) * 0.60, "b3 = " + str(poly_reg_fit.coef_[0][2]), size = 10, color = "black")
plt.text(min(yx1ypredres_train[x1name]), max(yx1ypredres_train[yname]) * 0.50, "R^2={}".format(r2_score(y_train, polypred)), size = 10, color = "black")
#
plt.xlabel(x1name + ': Training Data')
plt.ylabel(yname)
plt.grid(True)
#
#plt.legend()
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title("Polynominal Regression: " + x1name)
plt.savefig('Fig_04_1a.png')
plt.show()

'''
#plt.scatter(x1_train, y_train, c = 'blue', label = "R^2={}".format(r2_score(y_train, polypred)))
plt.scatter(x1_train, y_train, c = 'blue', label = 'Training Data')
plt.scatter(x1_train, polypred, c = 'red', label = 'Predicted Data')
#plt.plot(x1_train, polypred, c = 'red')
#
plt.text(min(x1_train[x1name]), max(y_train[yname]) * 1.00, "y = b0 + (b1 * x1) + (b2 * x1^2) + (b3 * x1^3)", size = 10, color = "black")
plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.90, "b0 = " + str(poly_reg_fit.intercept_[0]), size = 10, color = "black")
plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.80, "b1 = " + str(poly_reg_fit.coef_[0][0]), size = 10, color = "black")
plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.70, "b2 = " + str(poly_reg_fit.coef_[0][1]), size = 10, color = "black")
plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.60, "b3 = " + str(poly_reg_fit.coef_[0][2]), size = 10, color = "black")
plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.50, "R^2={}".format(r2_score(y_train, polypred)), size = 10, color = "black")
#
plt.xlabel(x1name + ': Training Data')
plt.ylabel(yname)
plt.grid(True)
#
#plt.legend()
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title("Polynominal Regression: " + x1name)
plt.savefig('Fig_04_1a.png')
plt.show()
'''

##### residual plot

plt.scatter(yx1ypredres_train[x1name], yx1ypredres_train['residual'], c = 'green', label = 'Redisual: Training Data')
#
plt.xlabel(x1name + ': Training Data')
plt.ylabel(yname)
plt.grid(True)
#
#plt.legend()
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title("Polynominal Regression, Residual: " + x1name)
plt.savefig('Fig_04_1b.png')
plt.show()

'''
ypred = pd.DataFrame(ypred)
#print(ypred)
#print(type(ypred))
#<class 'pandas.core.frame.DataFrame'>
ypred = ypred.rename(columns={0: 'ypred'})
#
#print(ypred)
#
#print(y_train)
#print(type(y_train))
#
#print(x1_train)
#print(type(x1_train))

y_train.reset_index(drop=True, inplace=True)
y_train = y_train.rename(columns={0: 'y'})
#print(y_train)

x1_train.reset_index(drop=True, inplace=True)
x1_train = x1_train.rename(columns={0: 'x1'})
#print(x1_train)

#yx1ypred_train = pd.concat([y_train, x1_train, ypred], axis=1, ignore_index = True)
yx1ypred_train = pd.concat([y_train, x1_train, ypred], axis=1)
yx1ypred_train.to_csv('yx1ypred_train.csv', header=True, index=False)

yx1ypredres_train = pd.concat([yx1ypred_train, yx1ypred_train[yname] - yx1ypred_train['ypred']], axis=1)
yx1ypredres_train = yx1ypredres_train.rename(columns={0: 'residual'})
yx1ypredres_train.to_csv('yx1ypredres_train.csv', header=True, index=False)



plt.scatter(yx1ypredres_train[x1name], yx1ypredres_train['residual'], c = 'green', label = 'Redisual: Training Data')
#plt.scatter(x1_train, polypred, c = 'red', label = 'Predicted Data')
#plt.plot(x1_train, polypred, c = 'red')
#
#plt.text(min(x1_train[x1name]), max(y_train[yname]) * 1.00, "y = b0 + (b1 * x1) + (b2 * x1^2) + (b3 * x1^3)", size = 10, color = "black")
#plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.90, "b0 = " + str(poly_reg_fit.intercept_[0]), size = 10, color = "black")
#plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.80, "b1 = " + str(poly_reg_fit.coef_[0][0]), size = 10, color = "black")
#plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.70, "b2 = " + str(poly_reg_fit.coef_[0][1]), size = 10, color = "black")
#plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.60, "b3 = " + str(poly_reg_fit.coef_[0][2]), size = 10, color = "black")
#plt.text(min(x1_train[x1name]), max(y_train[yname]) * 0.50, "R^2={}".format(r2_score(y_train, polypred)), size = 10, color = "black")
#
plt.xlabel(x1name + ': Training Data')
plt.ylabel(yname)
plt.grid(True)
#
#plt.legend()
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title("Polynominal Regression, Residual: " + x1name)
plt.savefig('Fig_04_1b.png')
plt.show()
'''



########## Polynominal Linear Regression (for Test Data)

##### output data

#x1_test
#y_test

ypred_test = poly_reg_fit.intercept_[0] + (poly_reg_fit.coef_[0][0] * x1_test) + (poly_reg_fit.coef_[0][1] * (x1_test ** 2)) + (poly_reg_fit.coef_[0][2] * (x1_test **3))
ypred_test = ypred_test.rename(columns={x1name: 'ypred'})
#print(ypred_test)

yx1ypred_test = pd.concat([y_test, x1_test, ypred_test], axis=1)
yx1ypred_test.to_csv('yx1ypred_test.csv', header=True, index=False)

yx1ypredres_test = pd.concat([yx1ypred_test, yx1ypred_test[yname] - yx1ypred_test['ypred']], axis=1)
yx1ypredres_test = yx1ypredres_test.rename(columns={0: 'residual'})
yx1ypredres_test.to_csv('yx1ypredres_test.csv', header=True, index=False)


##### plot

plt.scatter(yx1ypredres_test[x1name], yx1ypredres_test[yname], c = 'blue', label = 'Test Data')
plt.scatter(yx1ypredres_test[x1name], yx1ypredres_test['ypred'], c = 'red', label = 'Predicted Data')
#
plt.text(min(yx1ypredres_test[x1name]), max(yx1ypredres_test[yname]) * 1.00, "y = b0 + (b1 * x1) + (b2 * x1^2) + (b3 * x1^3)", size = 10, color = "black")
plt.text(min(yx1ypredres_test[x1name]), max(yx1ypredres_test[yname]) * 0.90, "b0 = " + str(poly_reg_fit.intercept_[0]), size = 10, color = "black")
plt.text(min(yx1ypredres_test[x1name]), max(yx1ypredres_test[yname]) * 0.80, "b1 = " + str(poly_reg_fit.coef_[0][0]), size = 10, color = "black")
plt.text(min(yx1ypredres_test[x1name]), max(yx1ypredres_test[yname]) * 0.70, "b2 = " + str(poly_reg_fit.coef_[0][1]), size = 10, color = "black")
plt.text(min(yx1ypredres_test[x1name]), max(yx1ypredres_test[yname]) * 0.60, "b3 = " + str(poly_reg_fit.coef_[0][2]), size = 10, color = "black")
#plt.text(min(yx1ypredres_test[x1name]), max(yx1ypredres_test[yname]) * 0.50, "R^2={}".format(r2_score(y_train, polypred)), size = 10, color = "black")
#
plt.xlabel(x1name + ': Test Data')
plt.ylabel(yname)
plt.grid(True)
#
#plt.legend()
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title("Polynominal Regression: " + x1name)
plt.savefig('Fig_04_2a.png')
plt.show()

##### residual plot

plt.scatter(yx1ypredres_test[x1name], yx1ypredres_test['residual'], c = 'green', label = 'Redisual: Test Data')
#
plt.xlabel(x1name + ': Test Data')
plt.ylabel(yname)
plt.grid(True)
#
#plt.legend()
plt.legend(bbox_to_anchor=(1, 0), loc='lower right', borderaxespad=1, fontsize=10)
plt.title("Polynominal Regression, Residual: " + x1name)
plt.savefig('Fig_04_2b.png')
plt.show()


Figures
Fig_02_1.png

Fig_02_2.png

Fig_03_1a.png

Fig_03_1b.png

Fig_03_2a.png

Fig_03_2b.png

Fig_04_1a.png

Fig_04_1b.png

Fig_04_2a.png

Fig_04_2b.png


References


Training/Test Split and K-Folds Cross Validation in Python

Training/Test Split and  K-Folds Cross Validation in Python



0_MacOS_Python_setup.txt
# Install on Terminal of MacOS

# 1. pandas
#pip3 install -U pandas

# 2. NumPy
#pip3 install -U numpy

# 3. matplotlib
#pip3 install -U matplotlib

# 4. scikit-learn (sklearn)
#pip3 install -U scikit-learn

# 5. statsmodels
#pip3 install -U statsmodels

1_MacOS_Terminal.txt
########## Run Terminal on MacOS and execute
### TO UPDATE
cd "YOUR_WORKING_DIRECTORY"

python3 ttscv01.py

python3 ttscv02.py 2

python3 ttscv03.py 2
#python3 ttscv03.py 3



Input data files



X.csv
age,sex,bmi,map,tc,ldl,hdl,tch,ltg,glu
0.0380759064334241,0.0506801187398187,0.0616962065186885,0.0218723549949558,-0.0442234984244464,-0.0348207628376986,-0.0434008456520269,-0.00259226199818282,0.0199084208763183,-0.0176461251598052
-0.00188201652779104,-0.044641636506989,-0.0514740612388061,-0.0263278347173518,-0.00844872411121698,-0.019163339748222,0.0744115640787594,-0.0394933828740919,-0.0683297436244215,-0.09220404962683
0.0852989062966783,0.0506801187398187,0.0444512133365941,-0.00567061055493425,-0.0455994512826475,-0.0341944659141195,-0.0323559322397657,-0.00259226199818282,0.00286377051894013,-0.0259303389894746
-0.0890629393522603,-0.044641636506989,-0.0115950145052127,-0.0366564467985606,0.0121905687618,0.0249905933641021,-0.0360375700438527,0.0343088588777263,0.0226920225667445,-0.0093619113301358
0.00538306037424807,-0.044641636506989,-0.0363846922044735,0.0218723549949558,0.00393485161259318,0.0155961395104161,0.0081420836051921,-0.00259226199818282,-0.0319914449413559,-0.0466408735636482
-0.0926954778032799,-0.044641636506989,-0.0406959404999971,-0.0194420933298793,-0.0689906498720667,-0.0792878444118122,0.0412768238419757,-0.076394503750001,-0.0411803851880079,-0.0963461565416647
-0.0454724779400257,0.0506801187398187,-0.0471628129432825,-0.015999222636143,-0.040095639849843,-0.0248000120604336,0.000778807997017968,-0.0394933828740919,-0.0629129499162512,-0.0383566597339788
0.063503675590561,0.0506801187398187,-0.00189470584028465,0.0666296740135272,0.0906198816792644,0.108914381123697,0.0228686348215404,0.0177033544835672,-0.0358167281015492,0.00306440941436832
0.0417084448844436,0.0506801187398187,0.0616962065186885,-0.0400993174922969,-0.0139525355440215,0.00620168565673016,-0.0286742944356786,-0.00259226199818282,-0.0149564750249113,0.0113486232440377
-0.0709002470971626,-0.044641636506989,0.0390621529671896,-0.0332135761048244,-0.0125765826858204,-0.034507614375909,-0.0249926566315915,-0.00259226199818282,0.0677363261102861,-0.0135040182449705
-0.0963280162542995,-0.044641636506989,-0.0838084234552331,0.0081008722200108,-0.103389471327095,-0.0905611890362353,-0.0139477432193303,-0.076394503750001,-0.0629129499162512,-0.0342145528191441
0.0271782910803654,0.0506801187398187,0.0175059114895716,-0.0332135761048244,-0.00707277125301585,0.0459715403040008,-0.0654906724765493,0.0712099797536354,-0.096433222891784,-0.0590671943081523
0.0162806757273067,-0.044641636506989,-0.0288400076873072,-0.00911348124867051,-0.00432086553661359,-0.00976888589453599,0.0449584616460628,-0.0394933828740919,-0.0307512098645563,-0.0424987666488135
0.00538306037424807,0.0506801187398187,-0.00189470584028465,0.0081008722200108,-0.00432086553661359,-0.0157187066685371,-0.0029028298070691,-0.00259226199818282,0.0383932482116977,-0.0135040182449705
0.0453409833354632,-0.044641636506989,-0.0256065714656645,-0.0125563519424068,0.0176943801946045,-6.12835790604833e-05,0.0817748396869335,-0.0394933828740919,-0.0319914449413559,-0.0756356219674911
-0.0527375548420648,0.0506801187398187,-0.0180618869484982,0.0804011567884723,0.0892439288210632,0.107661787276539,-0.0397192078479398,0.108111100629544,0.0360557900898319,-0.0424987666488135
-0.00551455497881059,-0.044641636506989,0.0422955891888323,0.0494153205448459,0.0245741444856101,-0.0238605666750649,0.0744115640787594,-0.0394933828740919,0.0522799997967812,0.0279170509033766
0.0707687524926,0.0506801187398187,0.0121168511201671,0.0563010619323185,0.034205814493018,0.0494161733836856,-0.0397192078479398,0.0343088588777263,0.027367707542609,-0.00107769750046639
-0.0382074010379866,-0.044641636506989,-0.0105172024313319,-0.0366564467985606,-0.0373437341334407,-0.0194764882100115,-0.0286742944356786,-0.00259226199818282,-0.0181182673078967,-0.0176461251598052
-0.0273097856849279,-0.044641636506989,-0.0180618869484982,-0.0400993174922969,-0.00294491267841247,-0.0113346282034837,0.0375951860378887,-0.0394933828740919,-0.0089440189577978,-0.0549250873933176
-0.0491050163910452,-0.044641636506989,-0.0568631216082106,-0.0435421881860331,-0.0455994512826475,-0.043275771306016,0.000778807997017968,-0.0394933828740919,-0.0119006848015081,0.0154907301588724
-0.0854304009012408,0.0506801187398187,-0.0223731352440218,0.00121513083253827,-0.0373437341334407,-0.0263657543693812,0.0155053592133662,-0.0394933828740919,-0.072128454601956,-0.0176461251598052
-0.0854304009012408,-0.044641636506989,-0.00405032998804645,-0.00911348124867051,-0.00294491267841247,0.00776742796567782,0.0228686348215404,-0.0394933828740919,-0.0611765950943345,-0.0135040182449705
0.0453409833354632,0.0506801187398187,0.0606183944448076,0.0310533436263482,0.0287020030602135,-0.0473467013092799,-0.0544457590642881,0.0712099797536354,0.133598980013008,0.135611830689079
-0.0636351701951234,-0.044641636506989,0.0358287167455469,-0.0228849640236156,-0.0304639698424351,-0.0188501912864324,-0.00658446761115617,-0.00259226199818282,-0.0259524244351894,-0.0549250873933176
-0.067267708646143,0.0506801187398187,-0.0126728265790937,-0.0400993174922969,-0.0153284884022226,0.0046359433477825,-0.0581273968683752,0.0343088588777263,0.0191990330785671,-0.0342145528191441
-0.107225631607358,-0.044641636506989,-0.0773415510119477,-0.0263278347173518,-0.0896299427450836,-0.0961978613484469,0.0265502726256275,-0.076394503750001,-0.0425721049227942,-0.0052198044153011
-0.0236772472339084,-0.044641636506989,0.0595405823709267,-0.0400993174922969,-0.0428475455662452,-0.0435889197678055,0.0118237214092792,-0.0394933828740919,-0.0159982677581387,0.0403433716478807
0.0526060602375023,-0.044641636506989,-0.0212953231701409,-0.0745280244296595,-0.040095639849843,-0.0376390989938044,-0.00658446761115617,-0.0394933828740919,-0.000609254186102297,-0.0549250873933176
0.0671362140415805,0.0506801187398187,-0.00620595413580824,0.063186803319791,-0.0428475455662452,-0.0958847128866574,0.052321737254237,-0.076394503750001,0.0594238004447941,0.0527696923923848
-0.0600026317441039,-0.044641636506989,0.0444512133365941,-0.0194420933298793,-0.00982467696941811,-0.00757684666200928,0.0228686348215404,-0.0394933828740919,-0.0271286455543265,-0.0093619113301358
-0.0236772472339084,-0.044641636506989,-0.0654856181992578,-0.081413765817132,-0.0387196869916418,-0.0536096705450705,0.0596850128624111,-0.076394503750001,-0.0371283460104736,-0.0424987666488135
0.0344433679824045,0.0506801187398187,0.125287118877662,0.0287580963824284,-0.0538551684318543,-0.0129003705124313,-0.10230705051742,0.108111100629544,0.000271485727907132,0.0279170509033766
0.030810829531385,-0.044641636506989,-0.0503962491649252,-0.00222773986119799,-0.0442234984244464,-0.0899348921126563,0.118591217727804,-0.076394503750001,-0.0181182673078967,0.00306440941436832
0.0162806757273067,-0.044641636506989,-0.063329994051496,-0.0573136709609782,-0.0579830270064577,-0.0489124436182275,0.0081420836051921,-0.0394933828740919,-0.0594726974107223,-0.0673514081378217
0.0489735217864827,0.0506801187398187,-0.030995631835069,-0.0492803060204031,0.0493412959332305,-0.00413221358232442,0.133317768944152,-0.0535158088069373,0.0213108465682448,0.0196328370737072
0.0126481372762872,-0.044641636506989,0.0228949718589761,0.0528581912385822,0.00806271018719657,-0.0285577936019079,0.0375951860378887,-0.0394933828740919,0.0547240033481791,-0.0259303389894746
-0.00914709342983014,-0.044641636506989,0.0110390390462862,-0.0573136709609782,-0.0249601584096305,-0.0429626228442264,0.0302319104297145,-0.0394933828740919,0.01703713241478,-0.0052198044153011
-0.00188201652779104,0.0506801187398187,0.0713965151836166,0.0976155102571536,0.0878679759628621,0.0754074957122168,-0.0213110188275045,0.0712099797536354,0.0714240327805764,0.0237749439885419
-0.00188201652779104,0.0506801187398187,0.0142724752679289,-0.0745280244296595,0.00255889875439205,0.00620168565673016,-0.0139477432193303,-0.00259226199818282,0.0191990330785671,0.00306440941436832
0.00538306037424807,0.0506801187398187,-0.00836157828357004,0.0218723549949558,0.054845107366035,0.07321545647969,-0.0249926566315915,0.0343088588777263,0.0125531528133893,0.094190761540732
-0.099960554705319,-0.044641636506989,-0.0676412423470196,-0.108956731367022,-0.0744944613048712,-0.072711726714232,0.0155053592133662,-0.0394933828740919,-0.0498684677352306,-0.0093619113301358
-0.0600026317441039,0.0506801187398187,-0.0105172024313319,-0.0148515990830405,-0.0497273098572509,-0.0235474182132754,-0.0581273968683752,0.0158582984397717,-0.00991895736315477,-0.0342145528191441
0.0199132141783263,-0.044641636506989,-0.0234509473179027,-0.0710851537359232,0.0204462859110067,-0.0100820343563255,0.118591217727804,-0.076394503750001,-0.0425721049227942,0.0734802269665584
0.0453409833354632,0.0506801187398187,0.068163078961974,0.0081008722200108,-0.0167044412604238,0.0046359433477825,-0.0765355858888105,0.0712099797536354,0.0324332257796019,-0.0176461251598052
0.0271782910803654,0.0506801187398187,-0.0353068801305926,0.0322009670761646,-0.0112006298276192,0.00150445872988718,-0.0102661054152432,-0.00259226199818282,-0.0149564750249113,-0.0507829804784829
-0.0563700932930843,-0.044641636506989,-0.0115950145052127,-0.0332135761048244,-0.0469754041408486,-0.0476598497710694,0.00446044580110504,-0.0394933828740919,-0.00797939755454164,-0.0880619427119953
-0.0781653239992017,-0.044641636506989,-0.0730303027164241,-0.0573136709609782,-0.0841261313122791,-0.0742774690231797,-0.0249926566315915,-0.0394933828740919,-0.0181182673078967,-0.0839198357971606
0.0671362140415805,0.0506801187398187,-0.041773752573878,0.0115437429137471,0.00255889875439205,0.00588853719494063,0.0412768238419757,-0.0394933828740919,-0.0594726974107223,-0.0217882320746399
-0.0418399394890061,0.0506801187398187,0.0142724752679289,-0.00567061055493425,-0.0125765826858204,0.00620168565673016,-0.0728539480847234,0.0712099797536354,0.0354619386607697,-0.0135040182449705
0.0344433679824045,-0.044641636506989,-0.00728376620968916,0.0149866136074833,-0.0442234984244464,-0.0373259505320149,-0.0029028298070691,-0.0394933828740919,-0.02139368094036,0.00720651632920303
0.0598711371395414,0.0506801187398187,0.0164280994156907,0.0287580963824284,-0.0414715927080441,-0.029184090525487,-0.0286742944356786,-0.00259226199818282,-0.00239668149341427,-0.0217882320746399
-0.0527375548420648,-0.044641636506989,-0.00943939035745095,-0.00567061055493425,0.0397096259258226,0.0447189464568426,0.0265502726256275,-0.00259226199818282,-0.0181182673078967,-0.0135040182449705
-0.00914709342983014,-0.044641636506989,-0.0159062628007364,0.0700725447072635,0.0121905687618,0.0221722572079963,0.0155053592133662,-0.00259226199818282,-0.0332487872476258,0.0486275854775501
-0.0491050163910452,-0.044641636506989,0.0250505960067379,0.0081008722200108,0.0204462859110067,0.0177881787429428,0.052321737254237,-0.0394933828740919,-0.0411803851880079,0.00720651632920303
-0.0418399394890061,-0.044641636506989,-0.0493184370910443,-0.0366564467985606,-0.00707277125301585,-0.0226079728279068,0.0854564774910206,-0.0394933828740919,-0.0664881482228354,0.00720651632920303
-0.0418399394890061,-0.044641636506989,0.0412177771149514,-0.0263278347173518,-0.0318399227006362,-0.0304366843726451,-0.0360375700438527,0.00294290613320356,0.0336568129023847,-0.0176461251598052
-0.0273097856849279,-0.044641636506989,-0.063329994051496,-0.0504279295735057,-0.0896299427450836,-0.104339721354975,0.052321737254237,-0.076394503750001,-0.0561575730950062,-0.0673514081378217
0.0417084448844436,-0.044641636506989,-0.064407806125377,0.0356438377699009,0.0121905687618,-0.057993749010124,0.181179060397284,-0.076394503750001,-0.000609254186102297,-0.0507829804784829
0.063503675590561,0.0506801187398187,-0.0256065714656645,0.0115437429137471,0.0644767773734429,0.048476727998317,0.0302319104297145,-0.00259226199818282,0.0383932482116977,0.0196328370737072
-0.0709002470971626,-0.044641636506989,-0.00405032998804645,-0.0400993174922969,-0.0662387441556644,-0.0786615474882331,0.052321737254237,-0.076394503750001,-0.0514005352605825,-0.0342145528191441
-0.0418399394890061,0.0506801187398187,0.00457216660300077,-0.0538708002672419,-0.0442234984244464,-0.0273051997547498,-0.0802172236928976,0.0712099797536354,0.0366457977933988,0.0196328370737072
-0.0273097856849279,0.0506801187398187,-0.00728376620968916,-0.0400993174922969,-0.0112006298276192,-0.0138398158977999,0.0596850128624111,-0.0394933828740919,-0.0823814832581028,-0.0259303389894746
-0.034574862586967,-0.044641636506989,-0.0374625042783544,-0.0607565416547144,0.0204462859110067,0.0434663526096845,-0.0139477432193303,-0.00259226199818282,-0.0307512098645563,-0.0714935150526564
0.0671362140415805,0.0506801187398187,-0.0256065714656645,-0.0400993174922969,-0.0634868384392622,-0.0598726397808612,-0.0029028298070691,-0.0394933828740919,-0.0191970476139445,0.0113486232440377
-0.0454724779400257,0.0506801187398187,-0.0245287593917836,0.0597439326260547,0.00531080447079431,0.0149698425868371,-0.0544457590642881,0.0712099797536354,0.0423448954496075,0.0154907301588724
-0.00914709342983014,0.0506801187398187,-0.0180618869484982,-0.0332135761048244,-0.0208322998350272,0.0121515064307313,-0.0728539480847234,0.0712099797536354,0.000271485727907132,0.0196328370737072
0.0417084448844436,0.0506801187398187,-0.0148284507268555,-0.0171468461892456,-0.00569681839481472,0.00839372488925688,-0.0139477432193303,-0.00185423958066465,-0.0119006848015081,0.00306440941436832
0.0380759064334241,0.0506801187398187,-0.0299178197611881,-0.0400993174922969,-0.0332158755588373,-0.0241737151368545,-0.0102661054152432,-0.00259226199818282,-0.0129079422541688,0.00306440941436832
0.0162806757273067,-0.044641636506989,-0.0460850008694016,-0.00567061055493425,-0.0758704141630723,-0.0614383820898088,-0.0139477432193303,-0.0394933828740919,-0.0514005352605825,0.0196328370737072
-0.00188201652779104,-0.044641636506989,-0.0697968664947814,-0.0125563519424068,-0.000193006962010205,-0.00914258897095694,0.0707299262746723,-0.0394933828740919,-0.0629129499162512,0.0403433716478807
-0.00188201652779104,-0.044641636506989,0.0336730925977851,0.125158475807044,0.0245741444856101,0.0262431872112602,-0.0102661054152432,-0.00259226199818282,0.0267142576335128,0.0610539062220542
0.063503675590561,0.0506801187398187,-0.00405032998804645,-0.0125563519424068,0.103003457403075,0.0487898764601065,0.056003375058324,-0.00259226199818282,0.0844952822124031,-0.0176461251598052
0.0126481372762872,0.0506801187398187,-0.02021751109626,-0.00222773986119799,0.0383336730676214,0.05317395492516,-0.00658446761115617,0.0343088588777263,-0.00514530798026311,-0.0093619113301358
0.0126481372762872,0.0506801187398187,0.00241654245523897,0.0563010619323185,0.0273260502020124,0.0171618818193638,0.0412768238419757,-0.0394933828740919,0.00371173823343597,0.0734802269665584
-0.00914709342983014,0.0506801187398187,-0.030995631835069,-0.0263278347173518,-0.0112006298276192,-0.00100072896442909,-0.0213110188275045,-0.00259226199818282,0.0062093156165054,0.0279170509033766
-0.0309423241359475,0.0506801187398187,0.0282840322283806,0.0700725447072635,-0.126780669916514,-0.106844909049291,-0.0544457590642881,-0.047980640675551,-0.0307512098645563,0.0154907301588724
-0.0963280162542995,-0.044641636506989,-0.0363846922044735,-0.0745280244296595,-0.0387196869916418,-0.0276183482165393,0.0155053592133662,-0.0394933828740919,-0.0740888714915354,-0.00107769750046639
0.00538306037424807,-0.044641636506989,-0.0579409336820915,-0.0228849640236156,-0.0676146970138656,-0.0683276482491785,-0.0544457590642881,-0.00259226199818282,0.0428956878925287,-0.0839198357971606
-0.103593093156339,-0.044641636506989,-0.0374625042783544,-0.0263278347173518,0.00255889875439205,0.0199802179754696,0.0118237214092792,-0.00259226199818282,-0.0683297436244215,-0.0259303389894746
0.0707687524926,-0.044641636506989,0.0121168511201671,0.0425295791573734,0.0713565416644485,0.0534871033869495,0.052321737254237,-0.00259226199818282,0.0253931349154494,-0.0052198044153011
0.0126481372762872,0.0506801187398187,-0.0223731352440218,-0.0297707054110881,0.0108146159035988,0.0284352264437869,-0.0213110188275045,0.0343088588777263,-0.00608024819631442,-0.00107769750046639
-0.0164121703318693,-0.044641636506989,-0.0353068801305926,-0.0263278347173518,0.0328298616348169,0.0171618818193638,0.100183028707369,-0.0394933828740919,-0.0702093127286876,-0.0797777288823259
-0.0382074010379866,-0.044641636506989,0.00996122697240527,-0.0469850588797694,-0.0593589798646588,-0.0529833736214915,-0.0102661054152432,-0.0394933828740919,-0.0159982677581387,-0.0424987666488135
0.00175052192322852,-0.044641636506989,-0.0396181284261162,-0.100923366426447,-0.0290880169842339,-0.0301235359108556,0.0449584616460628,-0.0501947079281055,-0.0683297436244215,-0.129483011860342
0.0453409833354632,-0.044641636506989,0.0713965151836166,0.00121513083253827,-0.00982467696941811,-0.00100072896442909,0.0155053592133662,-0.0394933828740919,-0.0411803851880079,-0.0714935150526564
-0.0709002470971626,0.0506801187398187,-0.0751859268641859,-0.0400993174922969,-0.051103262715452,-0.015092409744958,-0.0397192078479398,-0.00259226199818282,-0.096433222891784,-0.0342145528191441
0.0453409833354632,-0.044641636506989,-0.00620595413580824,0.0115437429137471,0.0631008245152418,0.0162224364339952,0.0965013909032818,-0.0394933828740919,0.0428956878925287,-0.0383566597339788
-0.0527375548420648,0.0506801187398187,-0.0406959404999971,-0.067642283042187,-0.0318399227006362,-0.0370128020702253,0.0375951860378887,-0.0394933828740919,-0.0345237153303495,0.0693381200517237
-0.0454724779400257,-0.044641636506989,-0.0482406250171634,-0.0194420933298793,-0.000193006962010205,-0.0160318551303266,0.0670482884705852,-0.0394933828740919,-0.0247911874324607,0.0196328370737072
0.0126481372762872,-0.044641636506989,-0.0256065714656645,-0.0400993174922969,-0.0304639698424351,-0.0451546620767532,0.0780932018828464,-0.076394503750001,-0.072128454601956,0.0113486232440377
0.0453409833354632,-0.044641636506989,0.0519958978537604,-0.0538708002672419,0.0631008245152418,0.0647604480113727,-0.0102661054152432,0.0343088588777263,0.0372320112089689,0.0196328370737072
-0.0200447087828888,-0.044641636506989,0.00457216660300077,0.0976155102571536,0.00531080447079431,-0.0207290820571696,0.0633666506664982,-0.0394933828740919,0.0125531528133893,0.0113486232440377
-0.0491050163910452,-0.044641636506989,-0.064407806125377,-0.10207098997955,-0.00294491267841247,-0.0154055582067476,0.0633666506664982,-0.0472426182580328,-0.0332487872476258,-0.0549250873933176
-0.0781653239992017,-0.044641636506989,-0.0169840748746173,-0.0125563519424068,-0.000193006962010205,-0.0135266674360104,0.0707299262746723,-0.0394933828740919,-0.0411803851880079,-0.09220404962683
-0.0709002470971626,-0.044641636506989,-0.0579409336820915,-0.081413765817132,-0.0455994512826475,-0.0288709420636975,-0.0434008456520269,-0.00259226199818282,0.00114379737951254,-0.0052198044153011
0.0562385986885218,0.0506801187398187,0.00996122697240527,0.0494153205448459,-0.00432086553661359,-0.0122740735888523,-0.0434008456520269,0.0343088588777263,0.060787754150744,0.0320591578182113
-0.0273097856849279,-0.044641636506989,0.088641508365711,-0.0251802111642493,0.0218222387692079,0.0425269072243159,-0.0323559322397657,0.0343088588777263,0.00286377051894013,0.0776223338813931
0.00175052192322852,0.0506801187398187,-0.00512814206192736,-0.0125563519424068,-0.0153284884022226,-0.0138398158977999,0.0081420836051921,-0.0394933828740919,-0.00608024819631442,-0.0673514081378217
-0.00188201652779104,-0.044641636506989,-0.064407806125377,0.0115437429137471,0.0273260502020124,0.0375165318356834,-0.0139477432193303,0.0343088588777263,0.0117839003835759,-0.0549250873933176
0.0162806757273067,-0.044641636506989,0.0175059114895716,-0.0228849640236156,0.0603489187988395,0.0444057979950531,0.0302319104297145,-0.00259226199818282,0.0372320112089689,-0.00107769750046639
0.0162806757273067,0.0506801187398187,-0.0450071887955207,0.063186803319791,0.0108146159035988,-0.00037443204085002,0.0633666506664982,-0.0394933828740919,-0.0307512098645563,0.036201264733046
-0.0926954778032799,-0.044641636506989,0.0282840322283806,-0.015999222636143,0.0369577202094203,0.0249905933641021,0.056003375058324,-0.0394933828740919,-0.00514530798026311,-0.00107769750046639
0.0598711371395414,0.0506801187398187,0.0412177771149514,0.0115437429137471,0.0410855787840237,0.0707102687853738,-0.0360375700438527,0.0343088588777263,-0.0109044358473771,-0.0300724459043093
-0.0273097856849279,-0.044641636506989,0.0649296427403312,-0.00222773986119799,-0.0249601584096305,-0.0172844489774848,0.0228686348215404,-0.0394933828740919,-0.0611765950943345,-0.063209301222987
0.0235457526293458,0.0506801187398187,-0.0320734439089499,-0.0400993174922969,-0.0318399227006362,-0.0216685274425382,-0.0139477432193303,-0.00259226199818282,-0.0109044358473771,0.0196328370737072
-0.0963280162542995,-0.044641636506989,-0.0762637389380668,-0.0435421881860331,-0.0455994512826475,-0.0348207628376986,0.0081420836051921,-0.0394933828740919,-0.0594726974107223,-0.0839198357971606
0.0271782910803654,-0.044641636506989,0.0498402737059986,-0.0550184238203444,-0.00294491267841247,0.0406480164535787,-0.0581273968683752,0.0527594193156808,-0.0529587932392004,-0.0052198044153011
0.0199132141783263,0.0506801187398187,0.045529025410475,0.0299057198322448,-0.062110885581061,-0.0558017097775973,-0.0728539480847234,0.0269286347025444,0.0456008084141249,0.0403433716478807
0.0380759064334241,0.0506801187398187,-0.00943939035745095,0.0023627543856408,0.00118294589619092,0.0375165318356834,-0.0544457590642881,0.0501763408543672,-0.0259524244351894,0.106617082285236
0.0417084448844436,0.0506801187398187,-0.0320734439089499,-0.0228849640236156,-0.0497273098572509,-0.0401442866881206,0.0302319104297145,-0.0394933828740919,-0.126097385560409,0.0154907301588724
0.0199132141783263,-0.044641636506989,0.00457216660300077,-0.0263278347173518,0.023198191627409,0.0102726156599941,0.0670482884705852,-0.0394933828740919,-0.0236445575721341,-0.0466408735636482
-0.0854304009012408,-0.044641636506989,0.0207393477112143,-0.0263278347173518,0.00531080447079431,0.01966706951368,-0.0029028298070691,-0.00259226199818282,-0.0236445575721341,0.00306440941436832
0.0199132141783263,0.0506801187398187,0.0142724752679289,0.063186803319791,0.0149424744782022,0.0202933664372591,-0.0470824834561139,0.0343088588777263,0.0466607723568145,0.0900486546258972
0.0235457526293458,-0.044641636506989,0.110197749843329,0.063186803319791,0.0135665216200011,-0.0329418720669614,-0.0249926566315915,0.0206554441536399,0.09924022573399,0.0237749439885419
-0.0309423241359475,0.0506801187398187,0.00133873038135806,-0.00567061055493425,0.0644767773734429,0.0494161733836856,-0.0470824834561139,0.108111100629544,0.0837967663655224,0.00306440941436832
0.0489735217864827,0.0506801187398187,0.0584627702970458,0.0700725447072635,0.0135665216200011,0.0206065148990486,-0.0213110188275045,0.0343088588777263,0.0220040504561505,0.0279170509033766
0.0598711371395414,-0.044641636506989,-0.0212953231701409,0.0872868981759448,0.0452134373586271,0.0315667110616823,-0.0470824834561139,0.0712099797536354,0.0791210813896579,0.135611830689079
-0.0563700932930843,0.0506801187398187,-0.0105172024313319,0.0253152256886921,0.023198191627409,0.0400217195299996,-0.0397192078479398,0.0343088588777263,0.0206123307213641,0.0569117993072195
0.0162806757273067,-0.044641636506989,-0.0471628129432825,-0.00222773986119799,-0.019456346976826,-0.0429626228442264,0.0339135482338016,-0.0394933828740919,0.027367707542609,0.0279170509033766
-0.0491050163910452,-0.044641636506989,0.00457216660300077,0.0115437429137471,-0.0373437341334407,-0.0185370428246429,-0.0176293810234174,-0.00259226199818282,-0.0398095943643375,-0.0217882320746399
0.063503675590561,-0.044641636506989,0.0175059114895716,0.0218723549949558,0.00806271018719657,0.0215459602844172,-0.0360375700438527,0.0343088588777263,0.0199084208763183,0.0113486232440377
0.0489735217864827,0.0506801187398187,0.0810968238485447,0.0218723549949558,0.0438374845004259,0.0641341510877936,-0.0544457590642881,0.0712099797536354,0.0324332257796019,0.0486275854775501
0.00538306037424807,0.0506801187398187,0.034750904671666,-0.00108011630809546,0.152537760298315,0.198787989657293,-0.0618090346724622,0.185234443260194,0.0155668445407018,0.0734802269665584
-0.00551455497881059,-0.044641636506989,0.023972783932857,0.0081008722200108,-0.0345918284170385,-0.0388916928409625,0.0228686348215404,-0.0394933828740919,-0.0159982677581387,-0.0135040182449705
-0.00551455497881059,0.0506801187398187,-0.00836157828357004,-0.00222773986119799,-0.0332158755588373,-0.0636304213223356,-0.0360375700438527,-0.00259226199818282,0.0805854642386665,0.00720651632920303
-0.0890629393522603,-0.044641636506989,-0.0611743699037342,-0.0263278347173518,-0.0552311212900554,-0.0545491159304391,0.0412768238419757,-0.076394503750001,-0.0939356455087147,-0.0549250873933176
0.0344433679824045,0.0506801187398187,-0.00189470584028465,-0.0125563519424068,0.0383336730676214,0.0137172487396789,0.0780932018828464,-0.0394933828740919,0.00455189046612778,-0.0963461565416647
-0.0527375548420648,-0.044641636506989,-0.0622521819776151,-0.0263278347173518,-0.00569681839481472,-0.005071658967693,0.0302319104297145,-0.0394933828740919,-0.0307512098645563,-0.0714935150526564
0.00901559882526763,-0.044641636506989,0.0164280994156907,0.00465800152627453,0.0094386630453977,0.0105857641217836,-0.0286742944356786,0.0343088588777263,0.0389683660308856,0.11904340302974
-0.0636351701951234,0.0506801187398187,0.0961861928828773,0.104501251644626,-0.00294491267841247,-0.00475851050590347,-0.00658446761115617,-0.00259226199818282,0.0226920225667445,0.0734802269665584
-0.0963280162542995,-0.044641636506989,-0.0697968664947814,-0.067642283042187,-0.019456346976826,-0.0107083312799046,0.0155053592133662,-0.0394933828740919,-0.0468794828442166,-0.0797777288823259
0.0162806757273067,0.0506801187398187,-0.0212953231701409,-0.00911348124867051,0.034205814493018,0.047850431074738,0.000778807997017968,-0.00259226199818282,-0.0129079422541688,0.0237749439885419
-0.0418399394890061,0.0506801187398187,-0.0536296853865679,-0.0400993174922969,-0.0841261313122791,-0.0717722813288634,-0.0029028298070691,-0.0394933828740919,-0.072128454601956,-0.0300724459043093
-0.0745327855481821,-0.044641636506989,0.0433734012627132,-0.0332135761048244,0.0121905687618,0.000251864882729031,0.0633666506664982,-0.0394933828740919,-0.0271286455543265,-0.0466408735636482
-0.00551455497881059,-0.044641636506989,0.056307146149284,-0.0366564467985606,-0.0483513569990498,-0.0429626228442264,-0.0728539480847234,0.0379989709653172,0.0507815133629732,0.0569117993072195
-0.0926954778032799,-0.044641636506989,-0.0816527993074713,-0.0573136709609782,-0.0607349327228599,-0.068014499787389,0.0486400994501499,-0.076394503750001,-0.0664881482228354,-0.0217882320746399
0.00538306037424807,-0.044641636506989,0.0498402737059986,0.0976155102571536,-0.0153284884022226,-0.0163450035921162,-0.00658446761115617,-0.00259226199818282,0.01703713241478,-0.0135040182449705
0.0344433679824045,0.0506801187398187,0.11127556191721,0.076958286094736,-0.0318399227006362,-0.03388131745233,-0.0213110188275045,-0.00259226199818282,0.028016506523264,0.0734802269665584
0.0235457526293458,-0.044641636506989,0.0616962065186885,0.0528581912385822,-0.0345918284170385,-0.0489124436182275,-0.0286742944356786,-0.00259226199818282,0.0547240033481791,-0.0052198044153011
0.0417084448844436,0.0506801187398187,0.0142724752679289,0.0425295791573734,-0.0304639698424351,-0.00131387742621863,-0.0434008456520269,-0.00259226199818282,-0.0332487872476258,0.0154907301588724
-0.0273097856849279,-0.044641636506989,0.0476846495582368,-0.0469850588797694,0.034205814493018,0.0572448849284239,-0.0802172236928976,0.13025177315509,0.0450661683362615,0.131469723774244
0.0417084448844436,0.0506801187398187,0.0121168511201671,0.0390867084636372,0.054845107366035,0.0444057979950531,0.00446044580110504,-0.00259226199818282,0.0456008084141249,-0.00107769750046639
-0.0309423241359475,-0.044641636506989,0.00564997867688165,-0.00911348124867051,0.0190703330528056,0.00682798258030921,0.0744115640787594,-0.0394933828740919,-0.0411803851880079,-0.0424987666488135
0.030810829531385,0.0506801187398187,0.0466068374843559,-0.015999222636143,0.0204462859110067,0.0506687672308438,-0.0581273968683752,0.0712099797536354,0.0062093156165054,0.00720651632920303
-0.0418399394890061,-0.044641636506989,0.128520555099304,0.063186803319791,-0.0332158755588373,-0.0326287236051719,0.0118237214092792,-0.0394933828740919,-0.0159982677581387,-0.0507829804784829
-0.0309423241359475,0.0506801187398187,0.0595405823709267,0.00121513083253827,0.0121905687618,0.0315667110616823,-0.0434008456520269,0.0343088588777263,0.0148227108412663,0.00720651632920303
-0.0563700932930843,-0.044641636506989,0.0929527566612346,-0.0194420933298793,0.0149424744782022,0.0234248510551544,-0.0286742944356786,0.0254525898675081,0.0260560896336847,0.0403433716478807
-0.0600026317441039,0.0506801187398187,0.0153502873418098,-0.0194420933298793,0.0369577202094203,0.0481635795365275,0.0191869970174533,-0.00259226199818282,-0.0307512098645563,-0.00107769750046639
-0.0491050163910452,0.0506801187398187,-0.00512814206192736,-0.0469850588797694,-0.0208322998350272,-0.0204159335953801,-0.0691723102806364,0.0712099797536354,0.061237907519701,-0.0383566597339788
0.0235457526293458,-0.044641636506989,0.0703187031097357,0.0253152256886921,-0.0345918284170385,-0.014466112821379,-0.0323559322397657,-0.00259226199818282,-0.0191970476139445,-0.0093619113301358
0.00175052192322852,-0.044641636506989,-0.00405032998804645,-0.00567061055493425,-0.00844872411121698,-0.0238605666750649,0.052321737254237,-0.0394933828740919,-0.0089440189577978,-0.0135040182449705
-0.034574862586967,0.0506801187398187,-0.000816893766403737,0.0700725447072635,0.0397096259258226,0.0669524872438994,-0.0654906724765493,0.108111100629544,0.0267142576335128,0.0734802269665584
0.0417084448844436,0.0506801187398187,-0.0439293767216398,0.063186803319791,-0.00432086553661359,0.0162224364339952,-0.0139477432193303,-0.00259226199818282,-0.0345237153303495,0.0113486232440377
0.0671362140415805,0.0506801187398187,0.0207393477112143,-0.00567061055493425,0.0204462859110067,0.0262431872112602,-0.0029028298070691,-0.00259226199818282,0.00864028293306308,0.00306440941436832
-0.0273097856849279,0.0506801187398187,0.0606183944448076,0.0494153205448459,0.0851160702464598,0.0863676918748504,-0.0029028298070691,0.0343088588777263,0.0378144788263439,0.0486275854775501
-0.0164121703318693,-0.044641636506989,-0.0105172024313319,0.00121513083253827,-0.0373437341334407,-0.0357602082230672,0.0118237214092792,-0.0394933828740919,-0.02139368094036,-0.0342145528191441
-0.00188201652779104,0.0506801187398187,-0.0331512559828308,-0.0182944697767768,0.0314539087766158,0.0428400556861055,-0.0139477432193303,0.0199174217361217,0.0102256424049578,0.0279170509033766
-0.0127796318808497,-0.044641636506989,-0.0654856181992578,-0.0699375301828207,0.00118294589619092,0.0168487333575743,-0.0029028298070691,-0.00702039650329191,-0.0307512098645563,-0.0507829804784829
-0.00551455497881059,-0.044641636506989,0.0433734012627132,0.0872868981759448,0.0135665216200011,0.00714113104209875,-0.0139477432193303,-0.00259226199818282,0.0423448954496075,-0.0176461251598052
-0.00914709342983014,-0.044641636506989,-0.0622521819776151,-0.0745280244296595,-0.0235842055514294,-0.0132135189742209,0.00446044580110504,-0.0394933828740919,-0.0358167281015492,-0.0466408735636482
-0.0454724779400257,0.0506801187398187,0.0638518306664503,0.0700725447072635,0.133274420283499,0.131461070372543,-0.0397192078479398,0.108111100629544,0.0757375884575476,0.0859065477110625
-0.0527375548420648,-0.044641636506989,0.0304396563761424,-0.0745280244296595,-0.0235842055514294,-0.0113346282034837,-0.0029028298070691,-0.00259226199818282,-0.0307512098645563,-0.00107769750046639
0.0162806757273067,0.0506801187398187,0.0724743272574975,0.076958286094736,-0.00844872411121698,0.00557538873315109,-0.00658446761115617,-0.00259226199818282,-0.0236445575721341,0.0610539062220542
0.0453409833354632,-0.044641636506989,-0.019139699022379,0.0218723549949558,0.0273260502020124,-0.0135266674360104,0.100183028707369,-0.0394933828740919,0.0177634778671173,-0.0135040182449705
-0.0418399394890061,-0.044641636506989,-0.0665634302731387,-0.0469850588797694,-0.0373437341334407,-0.043275771306016,0.0486400994501499,-0.0394933828740919,-0.0561575730950062,-0.0135040182449705
-0.0563700932930843,0.0506801187398187,-0.0600965578298533,-0.0366564467985606,-0.0882539898868825,-0.0708328359434948,-0.0139477432193303,-0.0394933828740919,-0.0781409106690696,-0.104630370371334
0.0707687524926,-0.044641636506989,0.0692408910358548,0.0379390850138207,0.0218222387692079,0.00150445872988718,-0.0360375700438527,0.0391060045915944,0.0776327891955595,0.106617082285236
0.00175052192322852,0.0506801187398187,0.0595405823709267,-0.00222773986119799,0.0617248716570406,0.063194705702425,-0.0581273968683752,0.108111100629544,0.0689822116363026,0.12732761685941
-0.00188201652779104,-0.044641636506989,-0.0266843835395454,0.0494153205448459,0.0589729659406384,-0.0160318551303266,-0.0470824834561139,0.0712099797536354,0.133598980013008,0.0196328370737072
0.0235457526293458,0.0506801187398187,-0.02021751109626,-0.0366564467985606,-0.0139525355440215,-0.015092409744958,0.0596850128624111,-0.0394933828740919,-0.096433222891784,-0.0176461251598052
-0.0200447087828888,-0.044641636506989,-0.0460850008694016,-0.0986281192858133,-0.0758704141630723,-0.0598726397808612,-0.0176293810234174,-0.0394933828740919,-0.0514005352605825,-0.0466408735636482
0.0417084448844436,0.0506801187398187,0.0713965151836166,0.0081008722200108,0.0383336730676214,0.0159092879722056,-0.0176293810234174,0.0343088588777263,0.0734100780491161,0.0859065477110625
-0.0636351701951234,0.0506801187398187,-0.0794971751597095,-0.00567061055493425,-0.071742555588469,-0.0664487574784414,-0.0102661054152432,-0.0394933828740919,-0.0181182673078967,-0.0549250873933176
0.0162806757273067,0.0506801187398187,0.00996122697240527,-0.0435421881860331,-0.0965097070360893,-0.0946321190394993,-0.0397192078479398,-0.0394933828740919,0.01703713241478,0.00720651632920303
0.0671362140415805,-0.044641636506989,-0.0385403163522353,-0.0263278347173518,-0.0318399227006362,-0.0263657543693812,0.0081420836051921,-0.0394933828740919,-0.0271286455543265,0.00306440941436832
0.0453409833354632,0.0506801187398187,0.0196615356373334,0.0390867084636372,0.0204462859110067,0.0259300387494707,0.0081420836051921,-0.00259226199818282,-0.003303712578677,0.0196328370737072
0.0489735217864827,-0.044641636506989,0.0272062201544997,-0.0251802111642493,0.023198191627409,0.0184144756665219,-0.0618090346724622,0.0800662487638535,0.0722236508199124,0.0320591578182113
0.0417084448844436,-0.044641636506989,-0.00836157828357004,-0.0263278347173518,0.0245741444856101,0.0162224364339952,0.0707299262746723,-0.0394933828740919,-0.0483617248028919,-0.0300724459043093
-0.0236772472339084,-0.044641636506989,-0.0159062628007364,-0.0125563519424068,0.0204462859110067,0.0412743133771578,-0.0434008456520269,0.0343088588777263,0.0140724525157685,-0.0093619113301358
-0.0382074010379866,0.0506801187398187,0.00457216660300077,0.0356438377699009,-0.0112006298276192,0.00588853719494063,-0.0470824834561139,0.0343088588777263,0.0163049527999418,-0.00107769750046639
0.0489735217864827,-0.044641636506989,-0.0428515646477589,-0.0538708002672419,0.0452134373586271,0.0500424703072647,0.0339135482338016,-0.00259226199818282,-0.0259524244351894,-0.063209301222987
0.0453409833354632,0.0506801187398187,0.00564997867688165,0.0563010619323185,0.0644767773734429,0.0891860280309562,-0.0397192078479398,0.0712099797536354,0.0155668445407018,-0.0093619113301358
0.0453409833354632,0.0506801187398187,-0.0353068801305926,0.063186803319791,-0.00432086553661359,-0.00162702588800815,-0.0102661054152432,-0.00259226199818282,0.0155668445407018,0.0569117993072195
0.0162806757273067,-0.044641636506989,0.023972783932857,-0.0228849640236156,-0.0249601584096305,-0.0260526059075917,-0.0323559322397657,-0.00259226199818282,0.0372320112089689,0.0320591578182113
-0.0745327855481821,0.0506801187398187,-0.0180618869484982,0.0081008722200108,-0.019456346976826,-0.0248000120604336,-0.0654906724765493,0.0343088588777263,0.0673172179146849,-0.0176461251598052
-0.0817978624502212,0.0506801187398187,0.0422955891888323,-0.0194420933298793,0.0397096259258226,0.0575580333902134,-0.0691723102806364,0.108111100629544,0.0471861678860197,-0.0383566597339788
-0.067267708646143,-0.044641636506989,-0.0547074974604488,-0.0263278347173518,-0.0758704141630723,-0.082106180567918,0.0486400994501499,-0.076394503750001,-0.0868289932162924,-0.104630370371334
0.00538306037424807,-0.044641636506989,-0.00297251791416553,0.0494153205448459,0.0741084473808508,0.0707102687853738,0.0449584616460628,-0.00259226199818282,-0.00149858682029207,-0.0093619113301358
-0.00188201652779104,-0.044641636506989,-0.0665634302731387,0.00121513083253827,-0.00294491267841247,0.00307020103883484,0.0118237214092792,-0.00259226199818282,-0.0202887477516296,-0.0259303389894746
0.00901559882526763,-0.044641636506989,-0.0126728265790937,0.0287580963824284,-0.0180803941186249,-0.005071658967693,-0.0470824834561139,0.0343088588777263,0.0233748412798208,-0.0052198044153011
-0.00551455497881059,0.0506801187398187,-0.041773752573878,-0.0435421881860331,-0.0799982727376757,-0.0761563597939169,-0.0323559322397657,-0.0394933828740919,0.0102256424049578,-0.0093619113301358
0.0562385986885218,0.0506801187398187,-0.030995631835069,0.0081008722200108,0.0190703330528056,0.0212328118226277,0.0339135482338016,-0.0394933828740919,-0.0295276227417736,-0.0590671943081523
0.00901559882526763,0.0506801187398187,-0.00512814206192736,-0.0641994123484507,0.0699805888062474,0.0838625041805342,-0.0397192078479398,0.0712099797536354,0.0395398780720242,0.0196328370737072
-0.067267708646143,-0.044641636506989,-0.0590187457559724,0.0322009670761646,-0.051103262715452,-0.0495387405418066,-0.0102661054152432,-0.0394933828740919,0.00200784054982379,0.0237749439885419
0.0271782910803654,0.0506801187398187,0.0250505960067379,0.0149866136074833,0.0259500973438113,0.048476727998317,-0.0397192078479398,0.0343088588777263,0.00783714230182385,0.0237749439885419
-0.0236772472339084,-0.044641636506989,-0.0460850008694016,-0.0332135761048244,0.0328298616348169,0.0362639379885253,0.0375951860378887,-0.00259226199818282,-0.0332487872476258,0.0113486232440377
0.0489735217864827,0.0506801187398187,0.00349435452911985,0.0700725447072635,-0.00844872411121698,0.0134041002778894,-0.0544457590642881,0.0343088588777263,0.0133159679089277,0.036201264733046
-0.0527375548420648,-0.044641636506989,0.0541515220015222,-0.0263278347173518,-0.0552311212900554,-0.03388131745233,-0.0139477432193303,-0.0394933828740919,-0.0740888714915354,-0.0590671943081523
0.0417084448844436,-0.044641636506989,-0.0450071887955207,0.0344962143200845,0.0438374845004259,-0.0157187066685371,0.0375951860378887,-0.0144006206784737,0.089898693277671,0.00720651632920303
0.0562385986885218,-0.044641636506989,-0.0579409336820915,-0.00796585769556799,0.0520932016496327,0.0491030249218961,0.056003375058324,-0.0214118336448964,-0.0283202425479987,0.0444854785627154
-0.034574862586967,0.0506801187398187,-0.0557853095343297,-0.015999222636143,-0.00982467696941811,-0.00788999512379879,0.0375951860378887,-0.0394933828740919,-0.0529587932392004,0.0279170509033766
0.0816663678456587,0.0506801187398187,0.00133873038135806,0.0356438377699009,0.126394655992494,0.0910649188016934,0.0191869970174533,0.0343088588777263,0.0844952822124031,-0.0300724459043093
-0.00188201652779104,0.0506801187398187,0.0304396563761424,0.0528581912385822,0.0397096259258226,0.0566185880048449,-0.0397192078479398,0.0712099797536354,0.0253931349154494,0.0279170509033766
0.110726675453815,0.0506801187398187,0.00672779075076256,0.0287580963824284,-0.0277120641260328,-0.00726369820021974,-0.0470824834561139,0.0343088588777263,0.00200784054982379,0.0776223338813931
-0.0309423241359475,-0.044641636506989,0.0466068374843559,0.0149866136074833,-0.0167044412604238,-0.0470335528474903,0.000778807997017968,-0.00259226199818282,0.0634559213720654,-0.0259303389894746
0.00175052192322852,0.0506801187398187,0.0261284080806188,-0.00911348124867051,0.0245741444856101,0.038455977221052,-0.0213110188275045,0.0343088588777263,0.00943640914607987,0.00306440941436832
0.00901559882526763,-0.044641636506989,0.045529025410475,0.0287580963824284,0.0121905687618,-0.0138398158977999,0.0265502726256275,-0.0394933828740919,0.0461323310394148,0.036201264733046
0.030810829531385,-0.044641636506989,0.0401399650410705,0.076958286094736,0.0176943801946045,0.0378296802974729,-0.0286742944356786,0.0343088588777263,-0.00149858682029207,0.11904340302974
0.0380759064334241,0.0506801187398187,-0.0180618869484982,0.0666296740135272,-0.051103262715452,-0.0166581520539057,-0.0765355858888105,0.0343088588777263,-0.0119006848015081,-0.0135040182449705
0.00901559882526763,-0.044641636506989,0.0142724752679289,0.0149866136074833,0.054845107366035,0.0472241341511589,0.0707299262746723,-0.0394933828740919,-0.0332487872476258,-0.0590671943081523
0.0925639831987174,-0.044641636506989,0.0369065288194278,0.0218723549949558,-0.0249601584096305,-0.0166581520539057,0.000778807997017968,-0.0394933828740919,-0.0225121719296605,-0.0217882320746399
0.0671362140415805,-0.044641636506989,0.00349435452911985,0.0356438377699009,0.0493412959332305,0.0312535625998928,0.0707299262746723,-0.0394933828740919,-0.000609254186102297,0.0196328370737072
0.00175052192322852,-0.044641636506989,-0.0708746785686623,-0.0228849640236156,-0.00156895982021134,-0.00100072896442909,0.0265502726256275,-0.0394933828740919,-0.0225121719296605,0.00720651632920303
0.030810829531385,-0.044641636506989,-0.0331512559828308,-0.0228849640236156,-0.0469754041408486,-0.0811667351825494,0.103864666511456,-0.076394503750001,-0.0398095943643375,-0.0549250873933176
0.0271782910803654,0.0506801187398187,0.0940305687351156,0.0976155102571536,-0.0345918284170385,-0.0320024266815928,-0.0434008456520269,-0.00259226199818282,0.0366457977933988,0.106617082285236
0.0126481372762872,0.0506801187398187,0.0358287167455469,0.0494153205448459,0.0534691545078339,0.0741549018650587,-0.0691723102806364,0.145012221505454,0.0456008084141249,0.0486275854775501
0.0744012909436196,-0.044641636506989,0.0315174684500233,0.10105838095089,0.0465893902168282,0.0368902349121043,0.0155053592133662,-0.00259226199818282,0.0336568129023847,0.0444854785627154
-0.0418399394890061,-0.044641636506989,-0.0654856181992578,-0.0400993174922969,-0.00569681839481472,0.014343545663258,-0.0434008456520269,0.0343088588777263,0.00702686254915195,-0.0135040182449705
-0.0890629393522603,-0.044641636506989,-0.041773752573878,-0.0194420933298793,-0.0662387441556644,-0.0742774690231797,0.0081420836051921,-0.0394933828740919,0.00114379737951254,-0.0300724459043093
0.0235457526293458,0.0506801187398187,-0.0396181284261162,-0.00567061055493425,-0.0483513569990498,-0.0332550205287509,0.0118237214092792,-0.0394933828740919,-0.101643547945512,-0.0673514081378217
-0.0454724779400257,-0.044641636506989,-0.0385403163522353,-0.0263278347173518,-0.0153284884022226,0.000878161806308105,-0.0323559322397657,-0.00259226199818282,0.00114379737951254,-0.0383566597339788
-0.0236772472339084,0.0506801187398187,-0.0256065714656645,0.0425295791573734,-0.0538551684318543,-0.0476598497710694,-0.0213110188275045,-0.0394933828740919,0.00114379737951254,0.0196328370737072
-0.099960554705319,-0.044641636506989,-0.0234509473179027,-0.0641994123484507,-0.0579830270064577,-0.0601857882426507,0.0118237214092792,-0.0394933828740919,-0.0181182673078967,-0.0507829804784829
-0.0273097856849279,-0.044641636506989,-0.0665634302731387,-0.112399602060758,-0.0497273098572509,-0.0413968805352788,0.000778807997017968,-0.0394933828740919,-0.0358167281015492,-0.0093619113301358
0.030810829531385,0.0506801187398187,0.0325952805239042,0.0494153205448459,-0.040095639849843,-0.0435889197678055,-0.0691723102806364,0.0343088588777263,0.0630166151147464,0.00306440941436832
-0.103593093156339,0.0506801187398187,-0.0460850008694016,-0.0263278347173518,-0.0249601584096305,-0.0248000120604336,0.0302319104297145,-0.0394933828740919,-0.0398095943643375,-0.0549250873933176
0.0671362140415805,0.0506801187398187,-0.0299178197611881,0.0574486853821349,-0.000193006962010205,-0.0157187066685371,0.0744115640787594,-0.0505637191368646,-0.0384591123013538,0.00720651632920303
-0.0527375548420648,-0.044641636506989,-0.0126728265790937,-0.0607565416547144,-0.000193006962010205,0.00808057642746734,0.0118237214092792,-0.00259226199818282,-0.0271286455543265,-0.0507829804784829
-0.0273097856849279,0.0506801187398187,-0.0159062628007364,-0.0297707054110881,0.00393485161259318,-0.000687580502639557,0.0412768238419757,-0.0394933828740919,-0.0236445575721341,0.0113486232440377
-0.0382074010379866,0.0506801187398187,0.0713965151836166,-0.0573136709609782,0.153913713156516,0.155886650392127,0.000778807997017968,0.0719480021711535,0.0502764933899896,0.0693381200517237
0.00901559882526763,-0.044641636506989,-0.030995631835069,0.0218723549949558,0.00806271018719657,0.00870687335104641,0.00446044580110504,-0.00259226199818282,0.00943640914607987,0.0113486232440377
0.0126481372762872,0.0506801187398187,0.000260918307477141,-0.0114087283893043,0.0397096259258226,0.0572448849284239,-0.0397192078479398,0.0560805201945126,0.024052583226893,0.0320591578182113
0.0671362140415805,-0.044641636506989,0.0369065288194278,-0.0504279295735057,-0.0235842055514294,-0.034507614375909,0.0486400994501499,-0.0394933828740919,-0.0259524244351894,-0.0383566597339788
0.0453409833354632,-0.044641636506989,0.0390621529671896,0.0459724498511097,0.00668675732899544,-0.0241737151368545,0.0081420836051921,-0.0125555646346783,0.0643282330236709,0.0569117993072195
0.0671362140415805,0.0506801187398187,-0.0148284507268555,0.0585963091762383,-0.0593589798646588,-0.034507614375909,-0.0618090346724622,0.012906208769699,-0.00514530798026311,0.0486275854775501
0.0271782910803654,-0.044641636506989,0.00672779075076256,0.0356438377699009,0.0796122588136553,0.0707102687853738,0.0155053592133662,0.0343088588777263,0.0406722637144977,0.0113486232440377
0.0562385986885218,-0.044641636506989,-0.0687190544209005,-0.0687899065952895,-0.000193006962010205,-0.00100072896442909,0.0449584616460628,-0.0376483268302965,-0.0483617248028919,-0.00107769750046639
0.0344433679824045,0.0506801187398187,-0.00943939035745095,0.0597439326260547,-0.0359677812752396,-0.00757684666200928,-0.0765355858888105,0.0712099797536354,0.0110081010458725,-0.0217882320746399
0.0235457526293458,-0.044641636506989,0.0196615356373334,-0.0125563519424068,0.0837401173882587,0.0387691256828415,0.0633666506664982,-0.00259226199818282,0.0660482061630984,0.0486275854775501
0.0489735217864827,0.0506801187398187,0.0746299514052593,0.0666296740135272,-0.00982467696941811,-0.00225332281158722,-0.0434008456520269,0.0343088588777263,0.0336568129023847,0.0196328370737072
0.030810829531385,0.0506801187398187,-0.00836157828357004,0.00465800152627453,0.0149424744782022,0.0274957810584184,0.0081420836051921,-0.00812743012956918,-0.0295276227417736,0.0569117993072195
-0.103593093156339,0.0506801187398187,-0.0234509473179027,-0.0228849640236156,-0.0868780370286814,-0.0677013513255995,-0.0176293810234174,-0.0394933828740919,-0.0781409106690696,-0.0714935150526564
0.0162806757273067,0.0506801187398187,-0.0460850008694016,0.0115437429137471,-0.0332158755588373,-0.0160318551303266,-0.0102661054152432,-0.00259226199818282,-0.0439854025655911,-0.0424987666488135
-0.0600026317441039,0.0506801187398187,0.0541515220015222,-0.0194420933298793,-0.0497273098572509,-0.0489124436182275,0.0228686348215404,-0.0394933828740919,-0.0439854025655911,-0.0052198044153011
-0.0273097856849279,-0.044641636506989,-0.0353068801305926,-0.0297707054110881,-0.0566070741482565,-0.058620045933703,0.0302319104297145,-0.0394933828740919,-0.0498684677352306,-0.129483011860342
0.0417084448844436,-0.044641636506989,-0.0320734439089499,-0.061904165207817,0.0796122588136553,0.0509819156926333,0.056003375058324,-0.00997248617336464,0.0450661683362615,-0.0590671943081523
-0.0817978624502212,-0.044641636506989,-0.0816527993074713,-0.0400993174922969,0.00255889875439205,-0.0185370428246429,0.0707299262746723,-0.0394933828740919,-0.0109044358473771,-0.09220404962683
-0.0418399394890061,-0.044641636506989,0.0476846495582368,0.0597439326260547,0.127770608850695,0.128016437292858,-0.0249926566315915,0.108111100629544,0.0638931206368394,0.0403433716478807
-0.0127796318808497,-0.044641636506989,0.0606183944448076,0.0528581912385822,0.0479653430750293,0.0293746718291555,-0.0176293810234174,0.0343088588777263,0.0702112981933102,0.00720651632920303
0.0671362140415805,-0.044641636506989,0.056307146149284,0.0735154154009998,-0.0139525355440215,-0.039204841302752,-0.0323559322397657,-0.00259226199818282,0.0757375884575476,0.036201264733046
-0.0527375548420648,0.0506801187398187,0.098341817030639,0.0872868981759448,0.0603489187988395,0.0487898764601065,-0.0581273968683752,0.108111100629544,0.0844952822124031,0.0403433716478807
0.00538306037424807,-0.044641636506989,0.0595405823709267,-0.0561660474078757,0.0245741444856101,0.0528608064633705,-0.0434008456520269,0.0509143632718854,-0.00421985970694603,-0.0300724459043093
0.0816663678456587,-0.044641636506989,0.0336730925977851,0.0081008722200108,0.0520932016496327,0.0566185880048449,-0.0176293810234174,0.0343088588777263,0.0348641930961596,0.0693381200517237
0.030810829531385,0.0506801187398187,0.056307146149284,0.076958286094736,0.0493412959332305,-0.0122740735888523,-0.0360375700438527,0.0712099797536354,0.120053382001538,0.0900486546258972
0.00175052192322852,-0.044641636506989,-0.0654856181992578,-0.00567061055493425,-0.00707277125301585,-0.0194764882100115,0.0412768238419757,-0.0394933828740919,-0.003303712578677,0.00720651632920303
-0.0491050163910452,-0.044641636506989,0.160854917315731,-0.0469850588797694,-0.0290880169842339,-0.019789636671801,-0.0470824834561139,0.0343088588777263,0.028016506523264,0.0113486232440377
-0.0273097856849279,0.0506801187398187,-0.0557853095343297,0.0253152256886921,-0.00707277125301585,-0.0235474182132754,0.052321737254237,-0.0394933828740919,-0.00514530798026311,-0.0507829804784829
0.0780338293946392,0.0506801187398187,-0.0245287593917836,-0.0423945646329306,0.00668675732899544,0.0528608064633705,-0.0691723102806364,0.0808042711813717,-0.0371283460104736,0.0569117993072195
0.0126481372762872,-0.044641636506989,-0.0363846922044735,0.0425295791573734,-0.0139525355440215,0.0129343775852051,-0.0268334755336351,0.00515697338575809,-0.0439854025655911,0.00720651632920303
0.0417084448844436,-0.044641636506989,-0.00836157828357004,-0.0573136709609782,0.00806271018719657,-0.0313761297580137,0.151725957964588,-0.076394503750001,-0.0802365402489018,-0.0176461251598052
0.0489735217864827,-0.044641636506989,-0.041773752573878,0.104501251644626,0.0355817673512192,-0.0257394574458021,0.177497422593197,-0.076394503750001,-0.0129079422541688,0.0154907301588724
-0.0164121703318693,0.0506801187398187,0.127442743025423,0.0976155102571536,0.0163184273364034,0.0174750302811533,-0.0213110188275045,0.0343088588777263,0.0348641930961596,0.00306440941436832
-0.0745327855481821,0.0506801187398187,-0.0773415510119477,-0.0469850588797694,-0.0469754041408486,-0.0326287236051719,0.00446044580110504,-0.0394933828740919,-0.072128454601956,-0.0176461251598052
0.0344433679824045,0.0506801187398187,0.0282840322283806,-0.0332135761048244,-0.0455994512826475,-0.00976888589453599,-0.050764121260201,-0.00259226199818282,-0.0594726974107223,-0.0217882320746399
-0.034574862586967,0.0506801187398187,-0.0256065714656645,-0.0171468461892456,0.00118294589619092,-0.00287961973516629,0.0081420836051921,-0.015507654304751,0.0148227108412663,0.0403433716478807
-0.0527375548420648,0.0506801187398187,-0.0622521819776151,0.0115437429137471,-0.00844872411121698,-0.0366996536084358,0.122272855531891,-0.076394503750001,-0.0868289932162924,0.00306440941436832
0.0598711371395414,-0.044641636506989,-0.000816893766403737,-0.0848566365108683,0.075484400239052,0.0794784257154807,0.00446044580110504,0.0343088588777263,0.0233748412798208,0.0279170509033766
0.063503675590561,0.0506801187398187,0.088641508365711,0.0700725447072635,0.0204462859110067,0.0375165318356834,-0.050764121260201,0.0712099797536354,0.0293004132685869,0.0734802269665584
0.00901559882526763,-0.044641636506989,-0.0320734439089499,-0.0263278347173518,0.0424615316422248,-0.0103951828181151,0.159089233572762,-0.076394503750001,-0.0119006848015081,-0.0383566597339788
0.00538306037424807,0.0506801187398187,0.0304396563761424,0.0838440274822086,-0.0373437341334407,-0.0473467013092799,0.0155053592133662,-0.0394933828740919,0.00864028293306308,0.0154907301588724
0.0380759064334241,0.0506801187398187,0.00888341489852436,0.0425295791573734,-0.0428475455662452,-0.0210422305189592,-0.0397192078479398,-0.00259226199818282,-0.0181182673078967,0.00720651632920303
0.0126481372762872,-0.044641636506989,0.00672779075076256,-0.0561660474078757,-0.0758704141630723,-0.0664487574784414,-0.0213110188275045,-0.0376483268302965,-0.0181182673078967,-0.09220404962683
0.0744012909436196,0.0506801187398187,-0.02021751109626,0.0459724498511097,0.0741084473808508,0.0328193049088404,-0.0360375700438527,0.0712099797536354,0.106354276741726,0.036201264733046
0.0162806757273067,-0.044641636506989,-0.0245287593917836,0.0356438377699009,-0.00707277125301585,-0.00319276819695581,-0.0139477432193303,-0.00259226199818282,0.0155668445407018,0.0154907301588724
-0.00551455497881059,0.0506801187398187,-0.0115950145052127,0.0115437429137471,-0.0222082526932283,-0.0154055582067476,-0.0213110188275045,-0.00259226199818282,0.0110081010458725,0.0693381200517237
0.0126481372762872,-0.044641636506989,0.0261284080806188,0.063186803319791,0.125018703134293,0.0916912157252725,0.0633666506664982,-0.00259226199818282,0.057572856202426,-0.0217882320746399
-0.034574862586967,-0.044641636506989,-0.0590187457559724,0.00121513083253827,-0.0538551684318543,-0.078035250564654,0.0670482884705852,-0.076394503750001,-0.02139368094036,0.0154907301588724
0.0671362140415805,0.0506801187398187,-0.0363846922044735,-0.0848566365108683,-0.00707277125301585,0.01966706951368,-0.0544457590642881,0.0343088588777263,0.00114379737951254,0.0320591578182113
0.0380759064334241,0.0506801187398187,-0.0245287593917836,0.00465800152627453,-0.0263361112678317,-0.0263657543693812,0.0155053592133662,-0.0394933828740919,-0.0159982677581387,-0.0259303389894746
0.00901559882526763,0.0506801187398187,0.0185837235634525,0.0390867084636372,0.0176943801946045,0.0105857641217836,0.0191869970174533,-0.00259226199818282,0.0163049527999418,-0.0176461251598052
-0.0926954778032799,0.0506801187398187,-0.0902752958985185,-0.0573136709609782,-0.0249601584096305,-0.0304366843726451,-0.00658446761115617,-0.00259226199818282,0.024052583226893,0.00306440941436832
0.0707687524926,-0.044641636506989,-0.00512814206192736,-0.00567061055493425,0.0878679759628621,0.102964560349696,0.0118237214092792,0.0343088588777263,-0.0089440189577978,0.0279170509033766
-0.0164121703318693,-0.044641636506989,-0.052551873312687,-0.0332135761048244,-0.0442234984244464,-0.0363865051466462,0.0191869970174533,-0.0394933828740919,-0.0683297436244215,-0.0300724459043093
0.0417084448844436,0.0506801187398187,-0.0223731352440218,0.0287580963824284,-0.0662387441556644,-0.0451546620767532,-0.0618090346724622,-0.00259226199818282,0.00286377051894013,-0.0549250873933176
0.0126481372762872,-0.044641636506989,-0.02021751109626,-0.015999222636143,0.0121905687618,0.0212328118226277,-0.0765355858888105,0.108111100629544,0.0598807230654812,-0.0217882320746399
-0.0382074010379866,-0.044641636506989,-0.0547074974604488,-0.0779708951233958,-0.0332158755588373,-0.0864902590329714,0.140681044552327,-0.076394503750001,-0.0191970476139445,-0.0052198044153011
0.0453409833354632,-0.044641636506989,-0.00620595413580824,-0.015999222636143,0.125018703134293,0.125198101136752,0.0191869970174533,0.0343088588777263,0.0324332257796019,-0.0052198044153011
0.0707687524926,0.0506801187398187,-0.0169840748746173,0.0218723549949558,0.0438374845004259,0.0563054395430553,0.0375951860378887,-0.00259226199818282,-0.0702093127286876,-0.0176461251598052
-0.0745327855481821,0.0506801187398187,0.0552293340754031,-0.0400993174922969,0.0534691545078339,0.05317395492516,-0.0434008456520269,0.0712099797536354,0.061237907519701,-0.0342145528191441
0.0598711371395414,0.0506801187398187,0.0767855755530211,0.0253152256886921,0.00118294589619092,0.0168487333575743,-0.0544457590642881,0.0343088588777263,0.0299356483965325,0.0444854785627154
0.0744012909436196,-0.044641636506989,0.0185837235634525,0.063186803319791,0.0617248716570406,0.0428400556861055,0.0081420836051921,-0.00259226199818282,0.0580391276638951,-0.0590671943081523
0.00901559882526763,-0.044641636506989,-0.0223731352440218,-0.0320659525517218,-0.0497273098572509,-0.0686407967109681,0.0780932018828464,-0.0708593356186146,-0.0629129499162512,-0.0383566597339788
-0.0709002470971626,-0.044641636506989,0.0929527566612346,0.0126913664668496,0.0204462859110067,0.0425269072243159,0.000778807997017968,0.000359827671889909,-0.0545441527110952,-0.00107769750046639
0.0235457526293458,0.0506801187398187,-0.030995631835069,-0.00567061055493425,-0.0167044412604238,0.0177881787429428,-0.0323559322397657,-0.00259226199818282,-0.0740888714915354,-0.0342145528191441
-0.0527375548420648,0.0506801187398187,0.0390621529671896,-0.0400993174922969,-0.00569681839481472,-0.0129003705124313,0.0118237214092792,-0.0394933828740919,0.0163049527999418,0.00306440941436832
0.0671362140415805,-0.044641636506989,-0.0611743699037342,-0.0400993174922969,-0.0263361112678317,-0.024486863598644,0.0339135482338016,-0.0394933828740919,-0.0561575730950062,-0.0590671943081523
0.00175052192322852,-0.044641636506989,-0.00836157828357004,-0.0641994123484507,-0.0387196869916418,-0.024486863598644,0.00446044580110504,-0.0394933828740919,-0.0646830224644503,-0.0549250873933176
0.0235457526293458,0.0506801187398187,-0.0374625042783544,-0.0469850588797694,-0.0910058956032848,-0.0755300628703378,-0.0323559322397657,-0.0394933828740919,-0.0307512098645563,-0.0135040182449705
0.0380759064334241,0.0506801187398187,-0.0137506386529745,-0.015999222636143,-0.0359677812752396,-0.0219816759043277,-0.0139477432193303,-0.00259226199818282,-0.0259524244351894,-0.00107769750046639
0.0162806757273067,-0.044641636506989,0.0735521393313785,-0.0412469410453994,-0.00432086553661359,-0.0135266674360104,-0.0139477432193303,-0.00111621716314646,0.0428956878925287,0.0444854785627154
-0.00188201652779104,0.0506801187398187,-0.0245287593917836,0.0528581912385822,0.0273260502020124,0.0300009687527346,0.0302319104297145,-0.00259226199818282,-0.02139368094036,0.036201264733046
0.0126481372762872,-0.044641636506989,0.0336730925977851,0.0333485905259811,0.0300779559184146,0.0271826325966288,-0.0029028298070691,0.00884708547334898,0.0311929907028023,0.0279170509033766
0.0744012909436196,-0.044641636506989,0.034750904671666,0.0941726395634173,0.0575970130824372,0.0202933664372591,0.0228686348215404,-0.00259226199818282,0.0738021469200488,-0.0217882320746399
0.0417084448844436,0.0506801187398187,-0.0385403163522353,0.0528581912385822,0.0768603530972531,0.116429944206646,-0.0397192078479398,0.0712099797536354,-0.0225121719296605,-0.0135040182449705
-0.00914709342983014,0.0506801187398187,-0.0396181284261162,-0.0400993174922969,-0.00844872411121698,0.0162224364339952,-0.0654906724765493,0.0712099797536354,0.0177634778671173,-0.0673514081378217
0.00901559882526763,0.0506801187398187,-0.00189470584028465,0.0218723549949558,-0.0387196869916418,-0.0248000120604336,-0.00658446761115617,-0.0394933828740919,-0.0398095943643375,-0.0135040182449705
0.0671362140415805,0.0506801187398187,-0.030995631835069,0.00465800152627453,0.0245741444856101,0.0356376410649462,-0.0286742944356786,0.0343088588777263,0.0233748412798208,0.0817644407962278
0.00175052192322852,-0.044641636506989,-0.0460850008694016,-0.0332135761048244,-0.07311850844667,-0.0814798836443389,0.0449584616460628,-0.0693832907835783,-0.0611765950943345,-0.0797777288823259
-0.00914709342983014,0.0506801187398187,0.00133873038135806,-0.00222773986119799,0.0796122588136553,0.0700839718617947,0.0339135482338016,-0.00259226199818282,0.0267142576335128,0.0817644407962278
-0.00551455497881059,-0.044641636506989,0.0649296427403312,0.0356438377699009,-0.00156895982021134,0.0149698425868371,-0.0139477432193303,0.000728838880648992,-0.0181182673078967,0.0320591578182113
0.096196521649737,-0.044641636506989,0.0401399650410705,-0.0573136709609782,0.0452134373586271,0.0606895180081088,-0.0213110188275045,0.0361539149215217,0.0125531528133893,0.0237749439885419
-0.0745327855481821,-0.044641636506989,-0.0234509473179027,-0.00567061055493425,-0.0208322998350272,-0.0141529643595894,0.0155053592133662,-0.0394933828740919,-0.0384591123013538,-0.0300724459043093
0.0598711371395414,0.0506801187398187,0.0530737099276413,0.0528581912385822,0.0328298616348169,0.01966706951368,-0.0102661054152432,0.0343088588777263,0.0552050380896167,-0.00107769750046639
-0.0236772472339084,-0.044641636506989,0.0401399650410705,-0.0125563519424068,-0.00982467696941811,-0.00100072896442909,-0.0029028298070691,-0.00259226199818282,-0.0119006848015081,-0.0383566597339788
0.00901559882526763,-0.044641636506989,-0.02021751109626,-0.0538708002672419,0.0314539087766158,0.0206065148990486,0.056003375058324,-0.0394933828740919,-0.0109044358473771,-0.00107769750046639
0.0162806757273067,0.0506801187398187,0.0142724752679289,0.00121513083253827,0.00118294589619092,-0.0213553789807487,-0.0323559322397657,0.0343088588777263,0.0749683360277342,0.0403433716478807
0.0199132141783263,-0.044641636506989,-0.0342290680567117,0.055153438482502,0.0672286830898452,0.0741549018650587,-0.00658446761115617,0.0328328140426899,0.0247253233428045,0.0693381200517237
0.0889314447476978,-0.044641636506989,0.00672779075076256,0.0253152256886921,0.0300779559184146,0.00870687335104641,0.0633666506664982,-0.0394933828740919,0.00943640914607987,0.0320591578182113
0.0199132141783263,-0.044641636506989,0.00457216660300077,0.0459724498511097,-0.0180803941186249,-0.0545491159304391,0.0633666506664982,-0.0394933828740919,0.0286607203138089,0.0610539062220542
-0.0236772472339084,-0.044641636506989,0.0304396563761424,-0.00567061055493425,0.0823641645300576,0.092004364187062,-0.0176293810234174,0.0712099797536354,0.0330470723549341,0.00306440941436832
0.096196521649737,-0.044641636506989,0.0519958978537604,0.0792535333386559,0.054845107366035,0.0365770864503148,-0.0765355858888105,0.141322109417863,0.098646374304928,0.0610539062220542
0.0235457526293458,0.0506801187398187,0.0616962065186885,0.0620391798699746,0.0245741444856101,-0.0360733566848567,-0.0912621371051588,0.155344535350708,0.133395733837469,0.0817644407962278
0.0707687524926,0.0506801187398187,-0.00728376620968916,0.0494153205448459,0.0603489187988395,-0.00444536204411395,-0.0544457590642881,0.108111100629544,0.129019411600168,0.0569117993072195
0.030810829531385,-0.044641636506989,0.00564997867688165,0.0115437429137471,0.0782363059554542,0.077912683406533,-0.0434008456520269,0.108111100629544,0.0660482061630984,0.0196328370737072
-0.00188201652779104,-0.044641636506989,0.0541515220015222,-0.0664946594890845,0.0727324945226497,0.0566185880048449,-0.0434008456520269,0.0848633944777217,0.0844952822124031,0.0486275854775501
0.0453409833354632,0.0506801187398187,-0.00836157828357004,-0.0332135761048244,-0.00707277125301585,0.00119131026809764,-0.0397192078479398,0.0343088588777263,0.0299356483965325,0.0279170509033766
0.0744012909436196,-0.044641636506989,0.114508998138853,0.0287580963824284,0.0245741444856101,0.0249905933641021,0.0191869970174533,-0.00259226199818282,-0.000609254186102297,-0.0052198044153011
-0.0382074010379866,-0.044641636506989,0.067085266888093,-0.0607565416547144,-0.0290880169842339,-0.0232342697514859,-0.0102661054152432,-0.00259226199818282,-0.00149858682029207,0.0196328370737072
-0.0127796318808497,0.0506801187398187,-0.0557853095343297,-0.00222773986119799,-0.0277120641260328,-0.029184090525487,0.0191869970174533,-0.0394933828740919,-0.0170521046047435,0.0444854785627154
0.00901559882526763,0.0506801187398187,0.0304396563761424,0.0425295791573734,-0.00294491267841247,0.0368902349121043,-0.0654906724765493,0.0712099797536354,-0.0236445575721341,0.0154907301588724
0.0816663678456587,0.0506801187398187,-0.0256065714656645,-0.0366564467985606,-0.0703666027302678,-0.0464072559239113,-0.0397192078479398,-0.00259226199818282,-0.0411803851880079,-0.0052198044153011
0.030810829531385,-0.044641636506989,0.104808689473925,0.076958286094736,-0.0112006298276192,-0.0113346282034837,-0.0581273968683752,0.0343088588777263,0.0571041874478439,0.036201264733046
0.0271782910803654,0.0506801187398187,-0.00620595413580824,0.0287580963824284,-0.0167044412604238,-0.00162702588800815,-0.0581273968683752,0.0343088588777263,0.0293004132685869,0.0320591578182113
-0.0600026317441039,0.0506801187398187,-0.0471628129432825,-0.0228849640236156,-0.071742555588469,-0.0576806005483345,-0.00658446761115617,-0.0394933828740919,-0.0629129499162512,-0.0549250873933176
0.00538306037424807,-0.044641636506989,-0.0482406250171634,-0.0125563519424068,0.00118294589619092,-0.00663740127664067,0.0633666506664982,-0.0394933828740919,-0.0514005352605825,-0.0590671943081523
-0.0200447087828888,-0.044641636506989,0.0854080721440683,-0.0366564467985606,0.0919958345374655,0.0894991764927457,-0.0618090346724622,0.145012221505454,0.0809479135112756,0.0527696923923848
0.0199132141783263,0.0506801187398187,-0.0126728265790937,0.0700725447072635,-0.0112006298276192,0.00714113104209875,-0.0397192078479398,0.0343088588777263,0.00538436996854573,0.00306440941436832
-0.0636351701951234,-0.044641636506989,-0.0331512559828308,-0.0332135761048244,0.00118294589619092,0.0240511479787335,-0.0249926566315915,-0.00259226199818282,-0.0225121719296605,-0.0590671943081523
0.0271782910803654,-0.044641636506989,-0.00728376620968916,-0.0504279295735057,0.075484400239052,0.0566185880048449,0.0339135482338016,-0.00259226199818282,0.0434431722527813,0.0154907301588724
-0.0164121703318693,-0.044641636506989,-0.0137506386529745,0.132044217194516,-0.00982467696941811,-0.00381906512053488,0.0191869970174533,-0.0394933828740919,-0.0358167281015492,-0.0300724459043093
0.030810829531385,0.0506801187398187,0.0595405823709267,0.0563010619323185,-0.0222082526932283,0.00119131026809764,-0.0323559322397657,-0.00259226199818282,-0.0247911874324607,-0.0176461251598052
0.0562385986885218,0.0506801187398187,0.0218171597850952,0.0563010619323185,-0.00707277125301585,0.0181013272047324,-0.0323559322397657,-0.00259226199818282,-0.0236445575721341,0.0237749439885419
-0.0200447087828888,-0.044641636506989,0.0185837235634525,0.090729768869681,0.00393485161259318,0.00870687335104641,0.0375951860378887,-0.0394933828740919,-0.0578000656756125,0.00720651632920303
-0.107225631607358,-0.044641636506989,-0.0115950145052127,-0.0400993174922969,0.0493412959332305,0.0644472995495832,-0.0139477432193303,0.0343088588777263,0.00702686254915195,-0.0300724459043093
0.0816663678456587,0.0506801187398187,-0.00297251791416553,-0.0332135761048244,0.0424615316422248,0.057871181852003,-0.0102661054152432,0.0343088588777263,-0.000609254186102297,-0.00107769750046639
0.00538306037424807,0.0506801187398187,0.0175059114895716,0.0322009670761646,0.127770608850695,0.127390140369279,-0.0213110188275045,0.0712099797536354,0.062575181458056,0.0154907301588724
0.0380759064334241,0.0506801187398187,-0.0299178197611881,-0.0745280244296595,-0.0125765826858204,-0.0125872220506418,0.00446044580110504,-0.00259226199818282,0.00371173823343597,-0.0300724459043093
0.030810829531385,-0.044641636506989,-0.02021751109626,-0.00567061055493425,-0.00432086553661359,-0.0294972389872765,0.0780932018828464,-0.0394933828740919,-0.0109044358473771,-0.00107769750046639
0.00175052192322852,0.0506801187398187,-0.0579409336820915,-0.0435421881860331,-0.0965097070360893,-0.0470335528474903,-0.098625412713333,0.0343088588777263,-0.0611765950943345,-0.0714935150526564
-0.0273097856849279,0.0506801187398187,0.0606183944448076,0.107944122338362,0.0121905687618,-0.0175975974392743,-0.0029028298070691,-0.00259226199818282,0.0702112981933102,0.135611830689079
-0.0854304009012408,0.0506801187398187,-0.0406959404999971,-0.0332135761048244,-0.0813742255958769,-0.0695802420963367,-0.00658446761115617,-0.0394933828740919,-0.0578000656756125,-0.0424987666488135
0.0126481372762872,0.0506801187398187,-0.0719524906425432,-0.0469850588797694,-0.051103262715452,-0.0971373067338155,0.118591217727804,-0.076394503750001,-0.0202887477516296,-0.0383566597339788
-0.0527375548420648,-0.044641636506989,-0.0557853095343297,-0.0366564467985606,0.0892439288210632,-0.00319276819695581,0.0081420836051921,0.0343088588777263,0.132372649338676,0.00306440941436832
-0.0236772472339084,0.0506801187398187,0.045529025410475,0.0218723549949558,0.10988322169408,0.0888728795691667,0.000778807997017968,0.0343088588777263,0.0741925366900307,0.0610539062220542
-0.0745327855481821,0.0506801187398187,-0.00943939035745095,0.0149866136074833,-0.0373437341334407,-0.0216685274425382,-0.0139477432193303,-0.00259226199818282,-0.0332487872476258,0.0113486232440377
-0.00551455497881059,0.0506801187398187,-0.0331512559828308,-0.015999222636143,0.00806271018719657,0.0162224364339952,0.0155053592133662,-0.00259226199818282,-0.0283202425479987,-0.0756356219674911
-0.0600026317441039,0.0506801187398187,0.0498402737059986,0.0184294843012196,-0.0167044412604238,-0.0301235359108556,-0.0176293810234174,-0.00259226199818282,0.049768659920749,-0.0590671943081523
-0.0200447087828888,-0.044641636506989,-0.084886235529114,-0.0263278347173518,-0.0359677812752396,-0.0341944659141195,0.0412768238419757,-0.0516707527631419,-0.0823814832581028,-0.0466408735636482
0.0380759064334241,0.0506801187398187,0.00564997867688165,0.0322009670761646,0.00668675732899544,0.0174750302811533,-0.0249926566315915,0.0343088588777263,0.0148227108412663,0.0610539062220542
0.0162806757273067,-0.044641636506989,0.0207393477112143,0.0218723549949558,-0.0139525355440215,-0.0132135189742209,-0.00658446761115617,-0.00259226199818282,0.0133159679089277,0.0403433716478807
0.0417084448844436,-0.044641636506989,-0.00728376620968916,0.0287580963824284,-0.0428475455662452,-0.0482861466946485,0.052321737254237,-0.076394503750001,-0.072128454601956,0.0237749439885419
0.0199132141783263,0.0506801187398187,0.104808689473925,0.0700725447072635,-0.0359677812752396,-0.0266789028311707,-0.0249926566315915,-0.00259226199818282,0.00371173823343597,0.0403433716478807
-0.0491050163910452,0.0506801187398187,-0.0245287593917836,6.75072794357462e-05,-0.0469754041408486,-0.0282446451401184,-0.0654906724765493,0.0284046795375808,0.0191990330785671,0.0113486232440377
0.00175052192322852,0.0506801187398187,-0.00620595413580824,-0.0194420933298793,-0.00982467696941811,0.00494909180957202,-0.0397192078479398,0.0343088588777263,0.0148227108412663,0.0983328684555666
0.0344433679824045,-0.044641636506989,-0.0385403163522353,-0.0125563519424068,0.0094386630453977,0.00526224027136155,-0.00658446761115617,-0.00259226199818282,0.0311929907028023,0.0983328684555666
-0.0454724779400257,0.0506801187398187,0.137143051690352,-0.015999222636143,0.0410855787840237,0.0318798595234718,-0.0434008456520269,0.0712099797536354,0.0710215779459822,0.0486275854775501
-0.00914709342983014,0.0506801187398187,0.17055522598066,0.0149866136074833,0.0300779559184146,0.033758750294209,-0.0213110188275045,0.0343088588777263,0.0336568129023847,0.0320591578182113
-0.0164121703318693,0.0506801187398187,0.00241654245523897,0.0149866136074833,0.0218222387692079,-0.0100820343563255,-0.0249926566315915,0.0343088588777263,0.085533121187439,0.0817644407962278
-0.00914709342983014,-0.044641636506989,0.0379843408933087,-0.0400993174922969,-0.0249601584096305,-0.00381906512053488,-0.0434008456520269,0.0158582984397717,-0.00514530798026311,0.0279170509033766
0.0199132141783263,-0.044641636506989,-0.0579409336820915,-0.0573136709609782,-0.00156895982021134,-0.0125872220506418,0.0744115640787594,-0.0394933828740919,-0.0611765950943345,-0.0756356219674911
0.0526060602375023,0.0506801187398187,-0.00943939035745095,0.0494153205448459,0.0507172487914316,-0.019163339748222,-0.0139477432193303,0.0343088588777263,0.119343994203787,-0.0176461251598052
-0.0273097856849279,0.0506801187398187,-0.0234509473179027,-0.015999222636143,0.0135665216200011,0.0127778033543103,0.0265502726256275,-0.00259226199818282,-0.0109044358473771,-0.0217882320746399
-0.0745327855481821,-0.044641636506989,-0.0105172024313319,-0.00567061055493425,-0.0662387441556644,-0.0570543036247554,-0.0029028298070691,-0.0394933828740919,-0.0425721049227942,-0.00107769750046639
-0.107225631607358,-0.044641636506989,-0.0342290680567117,-0.067642283042187,-0.0634868384392622,-0.0705196874817053,0.0081420836051921,-0.0394933828740919,-0.000609254186102297,-0.0797777288823259
0.0453409833354632,0.0506801187398187,-0.00297251791416553,0.107944122338362,0.0355817673512192,0.0224854056697859,0.0265502726256275,-0.00259226199818282,0.028016506523264,0.0196328370737072
-0.00188201652779104,-0.044641636506989,0.068163078961974,-0.00567061055493425,0.119514891701488,0.130208476525385,-0.0249926566315915,0.0867084505215172,0.0461323310394148,-0.00107769750046639
0.0199132141783263,0.0506801187398187,0.00996122697240527,0.0184294843012196,0.0149424744782022,0.0447189464568426,-0.0618090346724622,0.0712099797536354,0.00943640914607987,-0.063209301222987
0.0162806757273067,0.0506801187398187,0.00241654245523897,-0.00567061055493425,-0.00569681839481472,0.0108989125835731,-0.050764121260201,0.0343088588777263,0.0226920225667445,-0.0383566597339788
-0.00188201652779104,-0.044641636506989,-0.0385403163522353,0.0218723549949558,-0.108893282759899,-0.115613065979398,0.0228686348215404,-0.076394503750001,-0.0468794828442166,0.0237749439885419
0.0162806757273067,-0.044641636506989,0.0261284080806188,0.0585963091762383,-0.0607349327228599,-0.0442152166913845,-0.0139477432193303,-0.0339582147427055,-0.0514005352605825,-0.0259303389894746
-0.0709002470971626,0.0506801187398187,-0.0891974838246376,-0.0745280244296595,-0.0428475455662452,-0.0257394574458021,-0.0323559322397657,-0.00259226199818282,-0.0129079422541688,-0.0549250873933176
0.0489735217864827,-0.044641636506989,0.0606183944448076,-0.0228849640236156,-0.0235842055514294,-0.072711726714232,-0.0434008456520269,-0.00259226199818282,0.104137611358979,0.036201264733046
0.00538306037424807,0.0506801187398187,-0.0288400076873072,-0.00911348124867051,-0.0318399227006362,-0.0288709420636975,0.0081420836051921,-0.0394933828740919,-0.0181182673078967,0.00720651632920303
0.0344433679824045,0.0506801187398187,-0.0299178197611881,0.00465800152627453,0.0933717873956666,0.0869939887984295,0.0339135482338016,-0.00259226199818282,0.024052583226893,-0.0383566597339788
0.0235457526293458,0.0506801187398187,-0.019139699022379,0.0494153205448459,-0.0634868384392622,-0.0611252336280193,0.00446044580110504,-0.0394933828740919,-0.0259524244351894,-0.0135040182449705
0.0199132141783263,-0.044641636506989,-0.0406959404999971,-0.015999222636143,-0.00844872411121698,-0.0175975974392743,0.052321737254237,-0.0394933828740919,-0.0307512098645563,0.00306440941436832
-0.0454724779400257,-0.044641636506989,0.0153502873418098,-0.0745280244296595,-0.0497273098572509,-0.0172844489774848,-0.0286742944356786,-0.00259226199818282,-0.104364820832166,-0.0756356219674911
0.0526060602375023,0.0506801187398187,-0.0245287593917836,0.0563010619323185,-0.00707277125301585,-0.005071658967693,-0.0213110188275045,-0.00259226199818282,0.0267142576335128,-0.0383566597339788
-0.00551455497881059,0.0506801187398187,0.00133873038135806,-0.0848566365108683,-0.0112006298276192,-0.0166581520539057,0.0486400994501499,-0.0394933828740919,-0.0411803851880079,-0.0880619427119953
0.00901559882526763,0.0506801187398187,0.0692408910358548,0.0597439326260547,0.0176943801946045,-0.0232342697514859,-0.0470824834561139,0.0343088588777263,0.103292264911524,0.0734802269665584
-0.0236772472339084,-0.044641636506989,-0.0697968664947814,-0.0641994123484507,-0.0593589798646588,-0.0504781859271752,0.0191869970174533,-0.0394933828740919,-0.0891368600793477,-0.0507829804784829
-0.0418399394890061,0.0506801187398187,-0.0299178197611881,-0.00222773986119799,0.0218222387692079,0.0365770864503148,0.0118237214092792,-0.00259226199818282,-0.0411803851880079,0.065196013136889
-0.0745327855481821,-0.044641636506989,-0.0460850008694016,-0.0435421881860331,-0.0290880169842339,-0.0232342697514859,0.0155053592133662,-0.0394933828740919,-0.0398095943643375,-0.0217882320746399
0.0344433679824045,-0.044641636506989,0.0185837235634525,0.0563010619323185,0.0121905687618,-0.0545491159304391,-0.0691723102806364,0.0712099797536354,0.130080609521753,0.00720651632920303
-0.0600026317441039,-0.044641636506989,0.00133873038135806,-0.0297707054110881,-0.00707277125301585,-0.0216685274425382,0.0118237214092792,-0.00259226199818282,0.0318152175007986,-0.0549250873933176
-0.0854304009012408,0.0506801187398187,-0.030995631835069,-0.0228849640236156,-0.0634868384392622,-0.0542359674686496,0.0191869970174533,-0.0394933828740919,-0.096433222891784,-0.0342145528191441
0.0526060602375023,-0.044641636506989,-0.00405032998804645,-0.0309183289641906,-0.0469754041408486,-0.0583068974719135,-0.0139477432193303,-0.0258399681500055,0.0360557900898319,0.0237749439885419
0.0126481372762872,-0.044641636506989,0.0153502873418098,-0.0332135761048244,0.0410855787840237,0.0321930079852613,-0.0029028298070691,-0.00259226199818282,0.0450661683362615,-0.0673514081378217
0.0598711371395414,0.0506801187398187,0.0228949718589761,0.0494153205448459,0.0163184273364034,0.0118383579689417,-0.0139477432193303,-0.00259226199818282,0.0395398780720242,0.0196328370737072
-0.0236772472339084,-0.044641636506989,0.045529025410475,0.090729768869681,-0.0180803941186249,-0.0354470597612776,0.0707299262746723,-0.0394933828740919,-0.0345237153303495,-0.0093619113301358
0.0162806757273067,-0.044641636506989,-0.0450071887955207,-0.0573136709609782,-0.0345918284170385,-0.05392281900686,0.0744115640787594,-0.076394503750001,-0.0425721049227942,0.0403433716478807
0.110726675453815,0.0506801187398187,-0.0331512559828308,-0.0228849640236156,-0.00432086553661359,0.0202933664372591,-0.0618090346724622,0.0712099797536354,0.0155668445407018,0.0444854785627154
-0.0200447087828888,-0.044641636506989,0.0972640049567582,-0.00567061055493425,-0.00569681839481472,-0.0238605666750649,-0.0213110188275045,-0.00259226199818282,0.0616858488238662,0.0403433716478807
-0.0164121703318693,-0.044641636506989,0.0541515220015222,0.0700725447072635,-0.0332158755588373,-0.0279314966783289,0.0081420836051921,-0.0394933828740919,-0.0271286455543265,-0.0093619113301358
0.0489735217864827,0.0506801187398187,0.1231314947299,0.0838440274822086,-0.104765424185296,-0.10089508827529,-0.0691723102806364,-0.00259226199818282,0.0366457977933988,-0.0300724459043093
-0.0563700932930843,-0.044641636506989,-0.0805749872335904,-0.0848566365108683,-0.0373437341334407,-0.0370128020702253,0.0339135482338016,-0.0394933828740919,-0.0561575730950062,-0.137767225690012
0.0271782910803654,-0.044641636506989,0.0929527566612346,-0.0527231767141394,0.00806271018719657,0.0397085710682101,-0.0286742944356786,0.021024455362399,-0.0483617248028919,0.0196328370737072
0.063503675590561,-0.044641636506989,-0.0503962491649252,0.107944122338362,0.0314539087766158,0.0193539210518905,-0.0176293810234174,0.0236075338237126,0.0580391276638951,0.0403433716478807
-0.0527375548420648,0.0506801187398187,-0.0115950145052127,0.0563010619323185,0.0562210602242361,0.0729023080179005,-0.0397192078479398,0.0712099797536354,0.0305664873984148,-0.0052198044153011
-0.00914709342983014,0.0506801187398187,-0.0277621956134263,0.0081008722200108,0.0479653430750293,0.0372033833738938,-0.0286742944356786,0.0343088588777263,0.0660482061630984,-0.0424987666488135
0.00538306037424807,-0.044641636506989,0.0584627702970458,-0.0435421881860331,-0.07311850844667,-0.0723985782524425,0.0191869970174533,-0.076394503750001,-0.0514005352605825,-0.0259303389894746
0.0744012909436196,-0.044641636506989,0.0854080721440683,0.063186803319791,0.0149424744782022,0.0130909518160999,0.0155053592133662,-0.00259226199818282,0.0062093156165054,0.0859065477110625
-0.0527375548420648,-0.044641636506989,-0.000816893766403737,-0.0263278347173518,0.0108146159035988,0.00714113104209875,0.0486400994501499,-0.0394933828740919,-0.0358167281015492,0.0196328370737072
0.0816663678456587,0.0506801187398187,0.00672779075076256,-0.00452298700183173,0.10988322169408,0.117056241130225,-0.0323559322397657,0.0918746074441444,0.0547240033481791,0.00720651632920303
-0.00551455497881059,-0.044641636506989,0.00888341489852436,-0.0504279295735057,0.0259500973438113,0.0472241341511589,-0.0434008456520269,0.0712099797536354,0.0148227108412663,0.00306440941436832
-0.0273097856849279,-0.044641636506989,0.0800190117746638,0.09876313370697,-0.00294491267841247,0.0181013272047324,-0.0176293810234174,0.00331191734196264,-0.0295276227417736,0.036201264733046
-0.0527375548420648,-0.044641636506989,0.0713965151836166,-0.0745280244296595,-0.0153284884022226,-0.00131387742621863,0.00446044580110504,-0.0214118336448964,-0.0468794828442166,0.00306440941436832
0.00901559882526763,-0.044641636506989,-0.0245287593917836,-0.0263278347173518,0.0988755988284711,0.0941964034195887,0.0707299262746723,-0.00259226199818282,-0.02139368094036,0.00720651632920303
-0.0200447087828888,-0.044641636506989,-0.0547074974604488,-0.0538708002672419,-0.0662387441556644,-0.0573674520865449,0.0118237214092792,-0.0394933828740919,-0.0740888714915354,-0.0052198044153011
0.0235457526293458,-0.044641636506989,-0.0363846922044735,6.75072794357462e-05,0.00118294589619092,0.0346981956795776,-0.0434008456520269,0.0343088588777263,-0.0332487872476258,0.0610539062220542
0.0380759064334241,0.0506801187398187,0.0164280994156907,0.0218723549949558,0.0397096259258226,0.0450320949186321,-0.0434008456520269,0.0712099797536354,0.049768659920749,0.0154907301588724
-0.0781653239992017,0.0506801187398187,0.077863387626902,0.0528581912385822,0.0782363059554542,0.0644472995495832,0.0265502726256275,-0.00259226199818282,0.0406722637144977,-0.0093619113301358
0.00901559882526763,0.0506801187398187,-0.0396181284261162,0.0287580963824284,0.0383336730676214,0.0735286049414796,-0.0728539480847234,0.108111100629544,0.0155668445407018,-0.0466408735636482
0.00175052192322852,0.0506801187398187,0.0110390390462862,-0.0194420933298793,-0.0167044412604238,-0.00381906512053488,-0.0470824834561139,0.0343088588777263,0.024052583226893,0.0237749439885419
-0.0781653239992017,-0.044641636506989,-0.0406959404999971,-0.081413765817132,-0.100637565610693,-0.112794729823292,0.0228686348215404,-0.076394503750001,-0.0202887477516296,-0.0507829804784829
0.030810829531385,0.0506801187398187,-0.0342290680567117,0.0436772026071898,0.0575970130824372,0.0688313780146366,-0.0323559322397657,0.057556565029549,0.0354619386607697,0.0859065477110625
-0.034574862586967,0.0506801187398187,0.00564997867688165,-0.00567061055493425,-0.07311850844667,-0.062690975936967,-0.00658446761115617,-0.0394933828740919,-0.045420957777041,0.0320591578182113
0.0489735217864827,0.0506801187398187,0.088641508365711,0.0872868981759448,0.0355817673512192,0.0215459602844172,-0.0249926566315915,0.0343088588777263,0.0660482061630984,0.131469723774244
-0.0418399394890061,-0.044641636506989,-0.0331512559828308,-0.0228849640236156,0.0465893902168282,0.0415874618389473,0.056003375058324,-0.0247329345237283,-0.0259524244351894,-0.0383566597339788
-0.00914709342983014,-0.044641636506989,-0.0568631216082106,-0.0504279295735057,0.0218222387692079,0.0453452433804217,-0.0286742944356786,0.0343088588777263,-0.00991895736315477,-0.0176461251598052
0.0707687524926,0.0506801187398187,-0.030995631835069,0.0218723549949558,-0.0373437341334407,-0.0470335528474903,0.0339135482338016,-0.0394933828740919,-0.0149564750249113,-0.00107769750046639
0.00901559882526763,-0.044641636506989,0.0552293340754031,-0.00567061055493425,0.0575970130824372,0.0447189464568426,-0.0029028298070691,0.0232385226149535,0.0556835477026737,0.106617082285236
-0.0273097856849279,-0.044641636506989,-0.0600965578298533,-0.0297707054110881,0.0465893902168282,0.0199802179754696,0.122272855531891,-0.0394933828740919,-0.0514005352605825,-0.0093619113301358
0.0162806757273067,-0.044641636506989,0.00133873038135806,0.0081008722200108,0.00531080447079431,0.0108989125835731,0.0302319104297145,-0.0394933828740919,-0.045420957777041,0.0320591578182113
-0.0127796318808497,-0.044641636506989,-0.0234509473179027,-0.0400993174922969,-0.0167044412604238,0.0046359433477825,-0.0176293810234174,-0.00259226199818282,-0.0384591123013538,-0.0383566597339788
-0.0563700932930843,-0.044641636506989,-0.074108114790305,-0.0504279295735057,-0.0249601584096305,-0.0470335528474903,0.0928197530991947,-0.076394503750001,-0.0611765950943345,-0.0466408735636482
0.0417084448844436,0.0506801187398187,0.0196615356373334,0.0597439326260547,-0.00569681839481472,-0.00256647127337676,-0.0286742944356786,-0.00259226199818282,0.0311929907028023,0.00720651632920303
-0.00551455497881059,0.0506801187398187,-0.0159062628007364,-0.067642283042187,0.0493412959332305,0.0791652772536912,-0.0286742944356786,0.0343088588777263,-0.0181182673078967,0.0444854785627154
0.0417084448844436,0.0506801187398187,-0.0159062628007364,0.0172818607481171,-0.0373437341334407,-0.0138398158977999,-0.0249926566315915,-0.0110795197996419,-0.0468794828442166,0.0154907301588724
-0.0454724779400257,-0.044641636506989,0.0390621529671896,0.00121513083253827,0.0163184273364034,0.0152829910486266,-0.0286742944356786,0.0265596234937854,0.0445283740214053,-0.0259303389894746
-0.0454724779400257,-0.044641636506989,-0.0730303027164241,-0.081413765817132,0.0837401173882587,0.0278089295202079,0.17381578478911,-0.0394933828740919,-0.00421985970694603,0.00306440941436832



y.csv
y
151.0
75.0
141.0
206.0
135.0
97.0
138.0
63.0
110.0
310.0
101.0
69.0
179.0
185.0
118.0
171.0
166.0
144.0
97.0
168.0
68.0
49.0
68.0
245.0
184.0
202.0
137.0
85.0
131.0
283.0
129.0
59.0
341.0
87.0
65.0
102.0
265.0
276.0
252.0
90.0
100.0
55.0
61.0
92.0
259.0
53.0
190.0
142.0
75.0
142.0
155.0
225.0
59.0
104.0
182.0
128.0
52.0
37.0
170.0
170.0
61.0
144.0
52.0
128.0
71.0
163.0
150.0
97.0
160.0
178.0
48.0
270.0
202.0
111.0
85.0
42.0
170.0
200.0
252.0
113.0
143.0
51.0
52.0
210.0
65.0
141.0
55.0
134.0
42.0
111.0
98.0
164.0
48.0
96.0
90.0
162.0
150.0
279.0
92.0
83.0
128.0
102.0
302.0
198.0
95.0
53.0
134.0
144.0
232.0
81.0
104.0
59.0
246.0
297.0
258.0
229.0
275.0
281.0
179.0
200.0
200.0
173.0
180.0
84.0
121.0
161.0
99.0
109.0
115.0
268.0
274.0
158.0
107.0
83.0
103.0
272.0
85.0
280.0
336.0
281.0
118.0
317.0
235.0
60.0
174.0
259.0
178.0
128.0
96.0
126.0
288.0
88.0
292.0
71.0
197.0
186.0
25.0
84.0
96.0
195.0
53.0
217.0
172.0
131.0
214.0
59.0
70.0
220.0
268.0
152.0
47.0
74.0
295.0
101.0
151.0
127.0
237.0
225.0
81.0
151.0
107.0
64.0
138.0
185.0
265.0
101.0
137.0
143.0
141.0
79.0
292.0
178.0
91.0
116.0
86.0
122.0
72.0
129.0
142.0
90.0
158.0
39.0
196.0
222.0
277.0
99.0
196.0
202.0
155.0
77.0
191.0
70.0
73.0
49.0
65.0
263.0
248.0
296.0
214.0
185.0
78.0
93.0
252.0
150.0
77.0
208.0
77.0
108.0
160.0
53.0
220.0
154.0
259.0
90.0
246.0
124.0
67.0
72.0
257.0
262.0
275.0
177.0
71.0
47.0
187.0
125.0
78.0
51.0
258.0
215.0
303.0
243.0
91.0
150.0
310.0
153.0
346.0
63.0
89.0
50.0
39.0
103.0
308.0
116.0
145.0
74.0
45.0
115.0
264.0
87.0
202.0
127.0
182.0
241.0
66.0
94.0
283.0
64.0
102.0
200.0
265.0
94.0
230.0
181.0
156.0
233.0
60.0
219.0
80.0
68.0
332.0
248.0
84.0
200.0
55.0
85.0
89.0
31.0
129.0
83.0
275.0
65.0
198.0
236.0
253.0
124.0
44.0
172.0
114.0
142.0
109.0
180.0
144.0
163.0
147.0
97.0
220.0
190.0
109.0
191.0
122.0
230.0
242.0
248.0
249.0
192.0
131.0
237.0
78.0
135.0
244.0
199.0
270.0
164.0
72.0
96.0
306.0
91.0
214.0
95.0
216.0
263.0
178.0
113.0
200.0
139.0
139.0
88.0
148.0
88.0
243.0
71.0
77.0
109.0
272.0
60.0
54.0
221.0
90.0
311.0
281.0
182.0
321.0
58.0
262.0
206.0
233.0
242.0
123.0
167.0
63.0
197.0
71.0
168.0
140.0
217.0
121.0
235.0
245.0
40.0
52.0
104.0
132.0
88.0
69.0
219.0
72.0
201.0
110.0
51.0
277.0
63.0
118.0
69.0
273.0
258.0
43.0
198.0
242.0
232.0
175.0
93.0
168.0
275.0
293.0
281.0
72.0
140.0
189.0
181.0
209.0
136.0
261.0
113.0
131.0
174.0
257.0
55.0
84.0
42.0
146.0
212.0
233.0
91.0
111.0
152.0
120.0
67.0
310.0
94.0
183.0
66.0
173.0
72.0
49.0
64.0
48.0
178.0
104.0
132.0
220.0
57.0




Python files

ttscv01.py
#################### Training/Test Split in Python ####################
#
#Input data files:
#y.csv    dependent variable (target) y
#X.csv    independent variables x1, x2, ... (before standardization)
#
#Reference
#https://towardsdatascience.com/train-test-split-and-cross-validation-in-python-80b61beca4b6



########## import
import pandas as pd
from sklearn import datasets, linear_model
from sklearn.model_selection import train_test_split
from matplotlib import pyplot as plt
import numpy as np
import statsmodels.api as sm
from sklearn.preprocessing import StandardScaler



########## Load the Diabetes dataset

##columns = “age sex bmi map tc ldl hdl tch ltg glu”.split() # Declare the columns names
#columns = ['age', 'sex', 'bmi', 'map', 'tc', 'ldl', 'hdl', 'tch', 'ltg', 'glu']
#print(columns)

#diabetes = datasets.load_diabetes() # Call the diabetes dataset from sklearn
#X = pd.DataFrame(diabetes.data, columns=columns) # load the dataset as a pandas data frame
#print(X)
'''
          age       sex       bmi  ...       tch       ltg       glu
0    0.038076  0.050680  0.061696  ... -0.002592  0.019908 -0.017646
1   -0.001882 -0.044642 -0.051474  ... -0.039493 -0.068330 -0.092204
2    0.085299  0.050680  0.044451  ... -0.002592  0.002864 -0.025930
3   -0.089063 -0.044642 -0.011595  ...  0.034309  0.022692 -0.009362
4    0.005383 -0.044642 -0.036385  ... -0.002592 -0.031991 -0.046641
..        ...       ...       ...  ...       ...       ...       ...
437  0.041708  0.050680  0.019662  ... -0.002592  0.031193  0.007207
438 -0.005515  0.050680 -0.015906  ...  0.034309 -0.018118  0.044485
439  0.041708  0.050680 -0.015906  ... -0.011080 -0.046879  0.015491
440 -0.045472 -0.044642  0.039062  ...  0.026560  0.044528 -0.025930
441 -0.045472 -0.044642 -0.073030  ... -0.039493 -0.004220  0.003064
'''
#print(type(X))
#<class 'pandas.core.frame.DataFrame'>
#
#X.to_csv('X.csv', header = True, index = False)


#y = diabetes.target # define the target variable (dependent variable) as y
#print(y)
#print(type(y))
#<class 'numpy.ndarray'>
#
#pd.DataFrame(y).to_csv('y.csv', header = True, index = False)


X = pd.read_csv('X.csv', header=0)
#print(type(X))
#print(X)

y = pd.read_csv('y.csv', header=0)
#y.rename(columns={'0': 'y'}, inplace = True)
#y = np.array(y)
#print(type(y))
#print(y)



########## Standardization of X
scaler = StandardScaler()
scaler.fit(X)

XSTD = pd.DataFrame(scaler.transform(X), columns = X.columns)
#print(XSTD.describe())
'''
                age           sex  ...           ltg           glu
count  4.420000e+02  4.420000e+02  ...  4.420000e+02  4.420000e+02
mean  -3.215126e-17  1.607563e-17  ...  8.037814e-18 -3.617016e-17
std    1.001133e+00  1.001133e+00  ...  1.001133e+00  1.001133e+00
min   -2.254290e+00 -9.385367e-01  ... -2.651046e+00 -2.896390e+00
25%   -7.841722e-01 -9.385367e-01  ... -6.990157e-01 -6.975491e-01
50%    1.131724e-01 -9.385367e-01  ... -4.094666e-02 -2.265729e-02
75%    8.005001e-01  1.065488e+00  ...  6.818695e-01  5.869224e-01
max    2.327895e+00  1.065488e+00  ...  2.808758e+00  2.851075e+00
'''
##### Note that original data of sex should be 0 or 1 (i.e., dummy variable).

XSTD.to_csv('XSTD.csv', header=True, index=False)

#y = pd.DataFrame(y)
#y.rename(columns={0: 'y'}, inplace = True)

yXSTD = pd.concat([y, XSTD], axis=1)
yXSTD.to_csv('yXSTD.csv', header=True, index=False)



########## Train/Test Split

# create training and testing vars
XSTD_train, XSTD_test, y_train, y_test = train_test_split(XSTD, y, test_size=0.2, random_state = 0)
#
#print (XSTD_train.shape, ySTD_train.shape)
#(353, 10) (353, 1)
#
#print (XSTD_test.shape, ySTD_test.shape)
#(89, 10) (89, 1)

#print(type(XSTD_train))
#<class 'pandas.core.frame.DataFrame'>

#print(type(y_train))
#<class 'pandas.core.frame.DataFrame'>



##########  multiple linear regression model fitting

lm = linear_model.LinearRegression()
model = lm.fit(XSTD_train, y_train)

#print(model.score(XSTD_train, y_train))
#0.5539285357415583

#print(model.score(XSTD_test, y_test))
#0.3322220326906514


#print(model.intercept_[0])
#152.53813351954003

#print(model.coef_)
'''
[[ -1.69126625 -11.56638059  26.7674803   14.52982275 -31.52559699
   15.4242019    1.17875634   8.10179878  34.80237853   2.04665552]]
'''

#print(type(model.predict(XSTD_train)))
#<class 'numpy.ndarray'>
#
#print(model.predict(XSTD_train)[0:5])
'''
[[145.9502111 ]
 [100.12253043]
 [123.30242341]
 [ 77.82273702]
 [148.66482879]]
 '''

ypred_train = model.predict(XSTD_train)
#print(type(ypred_train))
#<class 'numpy.ndarray'>

ypred_test = model.predict(XSTD_test)


########## Output: Training Data & Predictions

#y_train = pd.DataFrame(y_train)
#y_train.rename(columns={0: 'y'}, inplace = True)
y_train.reset_index(drop=True, inplace = True)
#print(y_train)

#print(XSTD_train)
XSTD_train.reset_index(drop=True, inplace = True)
#print(XSTD_train)

ypred_train = pd.DataFrame(ypred_train)
ypred_train.rename(columns={0: 'ypred'}, inplace = True)
#print(ypred_train)


yXSTDypred_train = pd.concat([y_train, XSTD_train, ypred_train], axis=1)
#print(yXSTDypred_train)
'''
         y       age       sex       bmi  ...       tch       ltg       glu       ypred
0     85.0  0.265912  1.065488  0.050805  ... -0.830301  0.078035  1.544833  145.950211
1    137.0 -2.254290 -0.938537 -1.626013  ... -1.606102 -0.895027 -0.109740  100.122530
2     53.0  0.571391  1.065488 -0.742285  ... -0.054499 -0.314442 -1.067651  123.302423
3     51.0 -0.115937  1.065488  0.028145  ... -0.830301 -0.865768 -1.851396   77.822737
4    197.0  1.411458  1.065488  0.436020  ... -0.054499  0.181652  0.064426  148.664829
..     ...       ...       ...       ...  ...       ...       ...       ...         ...
348  248.0  1.487828  1.065488 -0.153132  ...  2.272906  2.712478  1.196502  221.168821
349   91.0  1.182349  1.065488 -0.651646  ... -0.830301 -0.620783 -1.241817   87.459215
350  281.0  1.258719 -0.938537 -0.447709  ...  1.497104  1.663425  2.851075  230.902621
351  142.0 -1.643332 -0.938537 -1.535374  ... -0.830301 -0.380915 -1.764314  115.024538
352  295.0  0.876870  1.065488  1.501026  ...  0.721302  1.543359  1.806082  223.952368

[353 rows x 12 columns]
'''

yXSTDypred_train.to_csv('yXSTDypred_train.csv', header = True, index = False)



########## Output: Test Data & Predictions (of a model built with Training Data)

#y_test = pd.DataFrame(y_test)
#y_test.rename(columns={0: 'y'}, inplace = True)
y_test.reset_index(drop=True, inplace = True)
#print(y_test)

#print(XSTD_test)
XSTD_test.reset_index(drop=True, inplace = True)
#print(XSTD_test)

ypred_test = pd.DataFrame(ypred_test)
ypred_test.rename(columns={0: 'ypred'}, inplace = True)
#print(ypred_test)


yXSTDypred_test = pd.concat([y_test, XSTD_test, ypred_test], axis=1)

yXSTDypred_test.to_csv('yXSTDypred_test.csv', header = True, index = False)



########## plot

plt.scatter(y_train, ypred_train)
plt.xlabel('y (Training Data)')
plt.ylabel('ypred (Training Data)')
plt.savefig('Fig_1.png')
plt.show()


plt.scatter(y_test, ypred_test)
plt.xlabel('y (Test Data)')
plt.ylabel('ypred (A model built by Training Data is applied to Test Data)')
plt.savefig('Fig_2.png')
plt.show()


##########  multiple linear regression model fitting (reprise)

smXSTD_train = sm.add_constant(XSTD_train)

model2 = sm.OLS(y_train, smXSTD_train)

model2fit = model2.fit()

#print(model2fit.summary())
'''
                            OLS Regression Results                          
==============================================================================
Dep. Variable:                      y   R-squared:                       0.554
Model:                            OLS   Adj. R-squared:                  0.541
Method:                 Least Squares   F-statistic:                     42.47
Date:                Tue, 14 Jul 2020   Prob (F-statistic):           4.05e-54
Time:                        11:05:50   Log-Likelihood:                -1897.7
No. Observations:                 353   AIC:                             3817.
Df Residuals:                     342   BIC:                             3860.
Df Model:                          10                                      
Covariance Type:            nonrobust                                      
==============================================================================
                 coef    std err          t      P>|t|      [0.025      0.975]
------------------------------------------------------------------------------
const        152.5381      2.836     53.789      0.000     146.960     158.116
age           -1.6913      3.121     -0.542      0.588      -7.829       4.447
sex          -11.5664      3.155     -3.666      0.000     -17.772      -5.361
bmi           26.7675      3.411      7.848      0.000      20.059      33.476
map           14.5298      3.529      4.117      0.000       7.588      21.471
tc           -31.5256     20.866     -1.511      0.132     -72.567       9.516
ldl           15.4242     16.828      0.917      0.360     -17.675      48.524
hdl            1.1788     11.105      0.106      0.916     -20.665      23.022
tch            8.1018      8.627      0.939      0.348      -8.867      25.071
ltg           34.8024      8.729      3.987      0.000      17.633      51.972
glu            2.0467      3.359      0.609      0.543      -4.561       8.654
==============================================================================
Omnibus:                        2.147   Durbin-Watson:                   1.890
Prob(Omnibus):                  0.342   Jarque-Bera (JB):                1.789
Skew:                           0.003   Prob(JB):                        0.409
Kurtosis:                       2.651   Cond. No.                         21.4
==============================================================================

Warnings:
[1] Standard Errors assume that the covariance matrix of the errors is correctly specified.
'''

#print(model.intercept_[0])
#152.5381335195406

#print(model.coef_)
'''
[[ -1.69126625 -11.56638059  26.7674803   14.52982275 -31.52559699
   15.4242019    1.17875634   8.10179878  34.80237853   2.04665552]]
'''



with open('model2fit.summary.txt', 'w') as f:
  print(model2fit.summary(), file=f)




ttscv02.py
#################### K-Folds Cross Validation in Python (Example) ####################
#
# Run this script as follows:
# python3 ttscv02.py 2
# python3 ttscv02.py (number of subsets for K-Folds)
#
#Input data files:
#y.csv    dependent variable (target) y
#X.csv    independent variables x1, x2, ... (before standardization)
#
#Reference
#https://towardsdatascience.com/train-test-split-and-cross-validation-in-python-80b61beca4b6

##### import
import sys

#print(sys.argv[0])
#ttscv02.py
#
n = int(sys.argv[1])
#####


########## K-Folds Cross Validation

##### import
from sklearn.model_selection import KFold
import numpy as np
#####

#X = np.array([[1, 2], [3, 4], [1, 2], [3, 4]]) # create an array
X = np.array([[0, 10], [20, 30], [40, 50], [60, 70]]) # create an array
#y = np.array([1, 2, 3, 4]) # Create another array
y = np.array([0, 1, 2, 3]) # Create another array

#kf = KFold(n_splits=2) # Define the split - into 2 folds
#kf = KFold(n_splits=n) # Define the split - into n folds
kf = KFold(n_splits=n, random_state=None, shuffle=False) # Define the split - into n folds

#kf.get_n_splits(X) # returns the number of splitting iterations in the cross-validator
#print(kf.get_n_splits(X))
#2
#
#print(kf)

#KFold(n_splits=2, random_state=None, shuffle=False)
#KFold(n_splits=n, random_state=None, shuffle=False)

for train_index, test_index in kf.split(X):
    print('Training index:', train_index)
    print('Test index:', test_index)
    print('')
    #
    X_train, X_test = X[train_index], X[test_index]
    y_train, y_test = y[train_index], y[test_index]
    print('X Training: \n', X_train)
    print('')
    print('X Test: \n', X_test)
    print('')
    print('y Training: \n', y_train)
    print('')
    print('y Test: \n', y_test)
    print('')
 
#('TRAIN:', array([2, 3]), 'TEST:', array([0, 1]))
#('TRAIN:', array([0, 1]), 'TEST:', array([2, 3]))





ttscv03.py
#################### K-Folds Cross Validation in Python ####################
#
# Run this script as follows:
# python3 ttscv02.py 2
# python3 ttscv02.py (number of subsets for K-Folds)
#
#Input data files:
#y.csv    dependent variable (target) y
#X.csv    independent variables x1, x2, ... (before standardization)
#
#Reference
#https://towardsdatascience.com/train-test-split-and-cross-validation-in-python-80b61beca4b6

##### import
import sys
import pandas as pd

#print(sys.argv[0])
#ttscv02.py
#
n = int(sys.argv[1])
#i = 0
i = 1
#####


########## loading data
XSTD = pd.read_csv('XSTD.csv', header=0)
X = XSTD
y = pd.read_csv('y.csv', header=0)



########## K-Folds Cross Validation

##### import
from sklearn.model_selection import KFold
import numpy as np
#####

#X = np.array([[1, 2], [3, 4], [1, 2], [3, 4]]) # create an array
#X = np.array([[0, 10], [20, 30], [40, 50], [60, 70]]) # create an array
#y = np.array([1, 2, 3, 4]) # Create another array
#y = np.array([0, 1, 2, 3]) # Create another array

#kf = KFold(n_splits=2) # Define the split - into 2 folds
#kf = KFold(n_splits=n) # Define the split - into n folds
kf = KFold(n_splits=n, random_state=None, shuffle=False) # Define the split - into n folds

#kf.get_n_splits(X) # returns the number of splitting iterations in the cross-validator
#print(kf.get_n_splits(X))
#2
#
#print(kf)

#KFold(n_splits=2, random_state=None, shuffle=False)
#KFold(n_splits=n, random_state=None, shuffle=False)

#print(X.index)
#print(list(X.index))

#print(X)
#print(X[0:5])


#for train_index, test_index in kf.split(X):
#for train_index, test_index in kf.split(np.array(X)):
#print(kf.split(list(X.index)))
#print(type(kf.split(list(X.index))))
#
for train_index, test_index in kf.split(list(X.index)):
    print('Round: ', i)
    #
    #print('Training index:', train_index)
    #print(type(train_index))
    #<class 'numpy.ndarray'>
    #print(train_index.shape)
    #(397,)
    #
    #train_max = max(train_index)
    #print('Training index (max):', max(train_index))
    #
    #train_min = min(train_index)
    #print('Training index (min):', min(train_index))
    #print('')
    #
    #print(pd.DataFrame(train_index))
    #
    #
    #print('Test index:', test_index)
    #test_max = max(test_index)
    #print('Test index (max):', max(test_index))
    #
    #test_min = min(test_index)
    #print('Test index (min):', min(test_index))
    #
    #print(pd.DataFrame(test_index))
    #
    #print('')
    #
    #print(X[train_index])
    #print(X[list(train_index)])
    #print(list(train_index))
    #print(X[slice(train_index)])
    #print(X[np.array(train_index)])
    #print(X[pd.DataFrame(train_index)])
    #
    #print(train_index.flatten())
    #print(X[train_index.flatten()])
    #
    #print(X.iloc[[0,]])
    #print(train_index.shape)
    #print(pd.Series(train_index[:]))
    #
    X_train, X_test = X.iloc[train_index], X.iloc[test_index]
    #print(type(X_train))
    #X_train, X_test = X[train_index], X[test_index]
    #X_train, X_test = X[train_index.flatten()], X[test_index.flatten()]
    #X_train, X_test = X[train_min:train_max+1], X[test_min:test_max+1]
    #X_train, X_test = X[pd.DataFrame(train_index)], X[pd.DataFrame(test_index)]
    #X_train, X_test = X[list(train_index)], X[list(test_index)]
    #
    y_train, y_test = y.iloc[train_index], y.iloc[test_index]
    #y_train, y_test = y[train_index], y[test_index]
    #y_train, y_test = y[train_min:train_max+1], y[test_min:test_max+1]
    #y_train, y_test = y[pd.DataFrame(train_index)], y[pd.DataFrame(test_index)]
    #
    #print('X Training: \n', X_train)
    #print('')
    #print('X Test: \n', X_test)
    #print('')
    #print('y Training: \n', y_train)
    #print('')
    #print('y Test: \n', y_test)
    #print('')
    #
    X_train.to_csv(str(i) +'_X_train_.csv', header = True, index = False)
    X_test.to_csv(str(i) +'_X_test_.csv', header = True, index = False)
    y_train.to_csv(str(i) +'_y_train_.csv', header = True, index = False)
    y_test.to_csv(str(i) + '_y_test_.csv', header = True, index = False)
    #
    i = i + 1
    #print(i)
    print('***************')





Figures
Fig_1.png



Fig_2.png





References

https://towardsdatascience.com/train-test-split-and-cross-validation-in-python-80b61beca4b6

Deep Learning (Regression, Multiple Features/Explanatory Variables, Supervised Learning): Impelementation and Showing Biases and Weights

Deep Learning (Regression, Multiple Features/Explanatory Variables, Supervised Learning): Impelementation and Showing Biases and Weights ...