AdSense

Thursday, June 25, 2020

Principal Component Analysis (PCA) in Python

Principal Component Analysis (PCA) in Python



0_MacOS_Python_setup.txt
# Install on Terminal of MacOS

# 1. pandas
#pip3 install -U pandas

# 2. NumPy
#pip3 install -U numpy

# 3. matplotlib
#pip3 install -U matplotlib

# 4. scikit-learn (sklearn)
#pip3 install -U scikit-learn


1_MacOS_Terminal.txt
########## Run Terminal on MacOS and execute
### TO UPDATE
cd "YOUR_WORKING_DIRECTORY"

python3 pca.py wine.csv

#python3 pca.py wine4data.csv



Input data files



wine.csv
,class,Alcohol,Malic acid,Ash,Alcalinity of ash,Magnesium,Total phenols,Flavanoids,Nonflavanoid phenols,Proanthocyanins,Color intensity,Hue,OD280/OD315 of diluted wines,Proline
0,1,14.23,1.71,2.43,15.6,127,2.8,3.06,0.28,2.29,5.64,1.04,3.92,1065
1,1,13.2,1.78,2.14,11.2,100,2.65,2.76,0.26,1.28,4.38,1.05,3.4,1050
2,1,13.16,2.36,2.67,18.6,101,2.8,3.24,0.3,2.81,5.68,1.03,3.17,1185
3,1,14.37,1.95,2.5,16.8,113,3.85,3.49,0.24,2.18,7.8,0.86,3.45,1480
4,1,13.24,2.59,2.87,21.0,118,2.8,2.69,0.39,1.82,4.32,1.04,2.93,735
5,1,14.2,1.76,2.45,15.2,112,3.27,3.39,0.34,1.97,6.75,1.05,2.85,1450
6,1,14.39,1.87,2.45,14.6,96,2.5,2.52,0.3,1.98,5.25,1.02,3.58,1290
7,1,14.06,2.15,2.61,17.6,121,2.6,2.51,0.31,1.25,5.05,1.06,3.58,1295
8,1,14.83,1.64,2.17,14.0,97,2.8,2.98,0.29,1.98,5.2,1.08,2.85,1045
9,1,13.86,1.35,2.27,16.0,98,2.98,3.15,0.22,1.85,7.22,1.01,3.55,1045
10,1,14.1,2.16,2.3,18.0,105,2.95,3.32,0.22,2.38,5.75,1.25,3.17,1510
11,1,14.12,1.48,2.32,16.8,95,2.2,2.43,0.26,1.57,5.0,1.17,2.82,1280
12,1,13.75,1.73,2.41,16.0,89,2.6,2.76,0.29,1.81,5.6,1.15,2.9,1320
13,1,14.75,1.73,2.39,11.4,91,3.1,3.69,0.43,2.81,5.4,1.25,2.73,1150
14,1,14.38,1.87,2.38,12.0,102,3.3,3.64,0.29,2.96,7.5,1.2,3.0,1547
15,1,13.63,1.81,2.7,17.2,112,2.85,2.91,0.3,1.46,7.3,1.28,2.88,1310
16,1,14.3,1.92,2.72,20.0,120,2.8,3.14,0.33,1.97,6.2,1.07,2.65,1280
17,1,13.83,1.57,2.62,20.0,115,2.95,3.4,0.4,1.72,6.6,1.13,2.57,1130
18,1,14.19,1.59,2.48,16.5,108,3.3,3.93,0.32,1.86,8.7,1.23,2.82,1680
19,1,13.64,3.1,2.56,15.2,116,2.7,3.03,0.17,1.66,5.1,0.96,3.36,845
20,1,14.06,1.63,2.28,16.0,126,3.0,3.17,0.24,2.1,5.65,1.09,3.71,780
21,1,12.93,3.8,2.65,18.6,102,2.41,2.41,0.25,1.98,4.5,1.03,3.52,770
22,1,13.71,1.86,2.36,16.6,101,2.61,2.88,0.27,1.69,3.8,1.11,4.0,1035
23,1,12.85,1.6,2.52,17.8,95,2.48,2.37,0.26,1.46,3.93,1.09,3.63,1015
24,1,13.5,1.81,2.61,20.0,96,2.53,2.61,0.28,1.66,3.52,1.12,3.82,845
25,1,13.05,2.05,3.22,25.0,124,2.63,2.68,0.47,1.92,3.58,1.13,3.2,830
26,1,13.39,1.77,2.62,16.1,93,2.85,2.94,0.34,1.45,4.8,0.92,3.22,1195
27,1,13.3,1.72,2.14,17.0,94,2.4,2.19,0.27,1.35,3.95,1.02,2.77,1285
28,1,13.87,1.9,2.8,19.4,107,2.95,2.97,0.37,1.76,4.5,1.25,3.4,915
29,1,14.02,1.68,2.21,16.0,96,2.65,2.33,0.26,1.98,4.7,1.04,3.59,1035
30,1,13.73,1.5,2.7,22.5,101,3.0,3.25,0.29,2.38,5.7,1.19,2.71,1285
31,1,13.58,1.66,2.36,19.1,106,2.86,3.19,0.22,1.95,6.9,1.09,2.88,1515
32,1,13.68,1.83,2.36,17.2,104,2.42,2.69,0.42,1.97,3.84,1.23,2.87,990
33,1,13.76,1.53,2.7,19.5,132,2.95,2.74,0.5,1.35,5.4,1.25,3.0,1235
34,1,13.51,1.8,2.65,19.0,110,2.35,2.53,0.29,1.54,4.2,1.1,2.87,1095
35,1,13.48,1.81,2.41,20.5,100,2.7,2.98,0.26,1.86,5.1,1.04,3.47,920
36,1,13.28,1.64,2.84,15.5,110,2.6,2.68,0.34,1.36,4.6,1.09,2.78,880
37,1,13.05,1.65,2.55,18.0,98,2.45,2.43,0.29,1.44,4.25,1.12,2.51,1105
38,1,13.07,1.5,2.1,15.5,98,2.4,2.64,0.28,1.37,3.7,1.18,2.69,1020
39,1,14.22,3.99,2.51,13.2,128,3.0,3.04,0.2,2.08,5.1,0.89,3.53,760
40,1,13.56,1.71,2.31,16.2,117,3.15,3.29,0.34,2.34,6.13,0.95,3.38,795
41,1,13.41,3.84,2.12,18.8,90,2.45,2.68,0.27,1.48,4.28,0.91,3.0,1035
42,1,13.88,1.89,2.59,15.0,101,3.25,3.56,0.17,1.7,5.43,0.88,3.56,1095
43,1,13.24,3.98,2.29,17.5,103,2.64,2.63,0.32,1.66,4.36,0.82,3.0,680
44,1,13.05,1.77,2.1,17.0,107,3.0,3.0,0.28,2.03,5.04,0.88,3.35,885
45,1,14.21,4.04,2.44,18.9,111,2.85,2.65,0.3,1.25,5.24,0.87,3.33,1080
46,1,14.38,3.59,2.28,16.0,102,3.25,3.17,0.27,2.19,4.9,1.04,3.44,1065
47,1,13.9,1.68,2.12,16.0,101,3.1,3.39,0.21,2.14,6.1,0.91,3.33,985
48,1,14.1,2.02,2.4,18.8,103,2.75,2.92,0.32,2.38,6.2,1.07,2.75,1060
49,1,13.94,1.73,2.27,17.4,108,2.88,3.54,0.32,2.08,8.9,1.12,3.1,1260
50,1,13.05,1.73,2.04,12.4,92,2.72,3.27,0.17,2.91,7.2,1.12,2.91,1150
51,1,13.83,1.65,2.6,17.2,94,2.45,2.99,0.22,2.29,5.6,1.24,3.37,1265
52,1,13.82,1.75,2.42,14.0,111,3.88,3.74,0.32,1.87,7.05,1.01,3.26,1190
53,1,13.77,1.9,2.68,17.1,115,3.0,2.79,0.39,1.68,6.3,1.13,2.93,1375
54,1,13.74,1.67,2.25,16.4,118,2.6,2.9,0.21,1.62,5.85,0.92,3.2,1060
55,1,13.56,1.73,2.46,20.5,116,2.96,2.78,0.2,2.45,6.25,0.98,3.03,1120
56,1,14.22,1.7,2.3,16.3,118,3.2,3.0,0.26,2.03,6.38,0.94,3.31,970
57,1,13.29,1.97,2.68,16.8,102,3.0,3.23,0.31,1.66,6.0,1.07,2.84,1270
58,1,13.72,1.43,2.5,16.7,108,3.4,3.67,0.19,2.04,6.8,0.89,2.87,1285
59,2,12.37,0.94,1.36,10.6,88,1.98,0.57,0.28,0.42,1.95,1.05,1.82,520
60,2,12.33,1.1,2.28,16.0,101,2.05,1.09,0.63,0.41,3.27,1.25,1.67,680
61,2,12.64,1.36,2.02,16.8,100,2.02,1.41,0.53,0.62,5.75,0.98,1.59,450
62,2,13.67,1.25,1.92,18.0,94,2.1,1.79,0.32,0.73,3.8,1.23,2.46,630
63,2,12.37,1.13,2.16,19.0,87,3.5,3.1,0.19,1.87,4.45,1.22,2.87,420
64,2,12.17,1.45,2.53,19.0,104,1.89,1.75,0.45,1.03,2.95,1.45,2.23,355
65,2,12.37,1.21,2.56,18.1,98,2.42,2.65,0.37,2.08,4.6,1.19,2.3,678
66,2,13.11,1.01,1.7,15.0,78,2.98,3.18,0.26,2.28,5.3,1.12,3.18,502
67,2,12.37,1.17,1.92,19.6,78,2.11,2.0,0.27,1.04,4.68,1.12,3.48,510
68,2,13.34,0.94,2.36,17.0,110,2.53,1.3,0.55,0.42,3.17,1.02,1.93,750
69,2,12.21,1.19,1.75,16.8,151,1.85,1.28,0.14,2.5,2.85,1.28,3.07,718
70,2,12.29,1.61,2.21,20.4,103,1.1,1.02,0.37,1.46,3.05,0.9060000000000000,1.82,870
71,2,13.86,1.51,2.67,25.0,86,2.95,2.86,0.21,1.87,3.38,1.36,3.16,410
72,2,13.49,1.66,2.24,24.0,87,1.88,1.84,0.27,1.03,3.74,0.98,2.78,472
73,2,12.99,1.67,2.6,30.0,139,3.3,2.89,0.21,1.96,3.35,1.31,3.5,985
74,2,11.96,1.09,2.3,21.0,101,3.38,2.14,0.13,1.65,3.21,0.99,3.13,886
75,2,11.66,1.88,1.92,16.0,97,1.61,1.57,0.34,1.15,3.8,1.23,2.14,428
76,2,13.03,0.9,1.71,16.0,86,1.95,2.03,0.24,1.46,4.6,1.19,2.48,392
77,2,11.84,2.89,2.23,18.0,112,1.72,1.32,0.43,0.95,2.65,0.96,2.52,500
78,2,12.33,0.99,1.95,14.8,136,1.9,1.85,0.35,2.76,3.4,1.06,2.31,750
79,2,12.7,3.87,2.4,23.0,101,2.83,2.55,0.43,1.95,2.57,1.19,3.13,463
80,2,12.0,0.92,2.0,19.0,86,2.42,2.26,0.3,1.43,2.5,1.38,3.12,278
81,2,12.72,1.81,2.2,18.8,86,2.2,2.53,0.26,1.77,3.9,1.16,3.14,714
82,2,12.08,1.13,2.51,24.0,78,2.0,1.58,0.4,1.4,2.2,1.31,2.72,630
83,2,13.05,3.86,2.32,22.5,85,1.65,1.59,0.61,1.62,4.8,0.84,2.01,515
84,2,11.84,0.89,2.58,18.0,94,2.2,2.21,0.22,2.35,3.05,0.79,3.08,520
85,2,12.67,0.98,2.24,18.0,99,2.2,1.94,0.3,1.46,2.62,1.23,3.16,450
86,2,12.16,1.61,2.31,22.8,90,1.78,1.69,0.43,1.56,2.45,1.33,2.26,495
87,2,11.65,1.67,2.62,26.0,88,1.92,1.61,0.4,1.34,2.6,1.36,3.21,562
88,2,11.64,2.06,2.46,21.6,84,1.95,1.69,0.48,1.35,2.8,1.0,2.75,680
89,2,12.08,1.33,2.3,23.6,70,2.2,1.59,0.42,1.38,1.74,1.07,3.21,625
90,2,12.08,1.83,2.32,18.5,81,1.6,1.5,0.52,1.64,2.4,1.08,2.27,480
91,2,12.0,1.51,2.42,22.0,86,1.45,1.25,0.5,1.63,3.6,1.05,2.65,450
92,2,12.69,1.53,2.26,20.7,80,1.38,1.46,0.58,1.62,3.05,0.96,2.06,495
93,2,12.29,2.83,2.22,18.0,88,2.45,2.25,0.25,1.99,2.15,1.15,3.3,290
94,2,11.62,1.99,2.28,18.0,98,3.02,2.26,0.17,1.35,3.25,1.16,2.96,345
95,2,12.47,1.52,2.2,19.0,162,2.5,2.27,0.32,3.28,2.6,1.16,2.63,937
96,2,11.81,2.12,2.74,21.5,134,1.6,0.99,0.14,1.56,2.5,0.95,2.26,625
97,2,12.29,1.41,1.98,16.0,85,2.55,2.5,0.29,1.77,2.9,1.23,2.74,428
98,2,12.37,1.07,2.1,18.5,88,3.52,3.75,0.24,1.95,4.5,1.04,2.77,660
99,2,12.29,3.17,2.21,18.0,88,2.85,2.99,0.45,2.81,2.3,1.42,2.83,406
100,2,12.08,2.08,1.7,17.5,97,2.23,2.17,0.26,1.4,3.3,1.27,2.96,710
101,2,12.6,1.34,1.9,18.5,88,1.45,1.36,0.29,1.35,2.45,1.04,2.77,562
102,2,12.34,2.45,2.46,21.0,98,2.56,2.11,0.34,1.31,2.8,0.8,3.38,438
103,2,11.82,1.72,1.88,19.5,86,2.5,1.64,0.37,1.42,2.06,0.94,2.44,415
104,2,12.51,1.73,1.98,20.5,85,2.2,1.92,0.32,1.48,2.94,1.04,3.57,672
105,2,12.42,2.55,2.27,22.0,90,1.68,1.84,0.66,1.42,2.7,0.86,3.3,315
106,2,12.25,1.73,2.12,19.0,80,1.65,2.03,0.37,1.63,3.4,1.0,3.17,510
107,2,12.72,1.75,2.28,22.5,84,1.38,1.76,0.48,1.63,3.3,0.88,2.42,488
108,2,12.22,1.29,1.94,19.0,92,2.36,2.04,0.39,2.08,2.7,0.86,3.02,312
109,2,11.61,1.35,2.7,20.0,94,2.74,2.92,0.29,2.49,2.65,0.96,3.26,680
110,2,11.46,3.74,1.82,19.5,107,3.18,2.58,0.24,3.58,2.9,0.75,2.81,562
111,2,12.52,2.43,2.17,21.0,88,2.55,2.27,0.26,1.22,2.0,0.9,2.78,325
112,2,11.76,2.68,2.92,20.0,103,1.75,2.03,0.6,1.05,3.8,1.23,2.5,607
113,2,11.41,0.74,2.5,21.0,88,2.48,2.01,0.42,1.44,3.08,1.1,2.31,434
114,2,12.08,1.39,2.5,22.5,84,2.56,2.29,0.43,1.04,2.9,0.93,3.19,385
115,2,11.03,1.51,2.2,21.5,85,2.46,2.17,0.52,2.01,1.9,1.71,2.87,407
116,2,11.82,1.47,1.99,20.8,86,1.98,1.6,0.3,1.53,1.95,0.95,3.33,495
117,2,12.42,1.61,2.19,22.5,108,2.0,2.09,0.34,1.61,2.06,1.06,2.96,345
118,2,12.77,3.43,1.98,16.0,80,1.63,1.25,0.43,0.83,3.4,0.7,2.12,372
119,2,12.0,3.43,2.0,19.0,87,2.0,1.64,0.37,1.87,1.28,0.93,3.05,564
120,2,11.45,2.4,2.42,20.0,96,2.9,2.79,0.32,1.83,3.25,0.8,3.39,625
121,2,11.56,2.05,3.23,28.5,119,3.18,5.08,0.47,1.87,6.0,0.93,3.69,465
122,2,12.42,4.43,2.73,26.5,102,2.2,2.13,0.43,1.71,2.08,0.92,3.12,365
123,2,13.05,5.8,2.13,21.5,86,2.62,2.65,0.3,2.01,2.6,0.73,3.1,380
124,2,11.87,4.31,2.39,21.0,82,2.86,3.03,0.21,2.91,2.8,0.75,3.64,380
125,2,12.07,2.16,2.17,21.0,85,2.6,2.65,0.37,1.35,2.76,0.86,3.28,378
126,2,12.43,1.53,2.29,21.5,86,2.74,3.15,0.39,1.77,3.94,0.69,2.84,352
127,2,11.79,2.13,2.78,28.5,92,2.13,2.24,0.58,1.76,3.0,0.97,2.44,466
128,2,12.37,1.63,2.3,24.5,88,2.22,2.45,0.4,1.9,2.12,0.89,2.78,342
129,2,12.04,4.3,2.38,22.0,80,2.1,1.75,0.42,1.35,2.6,0.79,2.57,580
130,3,12.86,1.35,2.32,18.0,122,1.51,1.25,0.21,0.94,4.1,0.76,1.29,630
131,3,12.88,2.99,2.4,20.0,104,1.3,1.22,0.24,0.83,5.4,0.74,1.42,530
132,3,12.81,2.31,2.4,24.0,98,1.15,1.09,0.27,0.83,5.7,0.66,1.36,560
133,3,12.7,3.55,2.36,21.5,106,1.7,1.2,0.17,0.84,5.0,0.78,1.29,600
134,3,12.51,1.24,2.25,17.5,85,2.0,0.58,0.6,1.25,5.45,0.75,1.51,650
135,3,12.6,2.46,2.2,18.5,94,1.62,0.66,0.63,0.94,7.1,0.73,1.58,695
136,3,12.25,4.72,2.54,21.0,89,1.38,0.47,0.53,0.8,3.85,0.75,1.27,720
137,3,12.53,5.51,2.64,25.0,96,1.79,0.6,0.63,1.1,5.0,0.82,1.69,515
138,3,13.49,3.59,2.19,19.5,88,1.62,0.48,0.58,0.88,5.7,0.81,1.82,580
139,3,12.84,2.96,2.61,24.0,101,2.32,0.6,0.53,0.81,4.92,0.89,2.15,590
140,3,12.93,2.81,2.7,21.0,96,1.54,0.5,0.53,0.75,4.6,0.77,2.31,600
141,3,13.36,2.56,2.35,20.0,89,1.4,0.5,0.37,0.64,5.6,0.7,2.47,780
142,3,13.52,3.17,2.72,23.5,97,1.55,0.52,0.5,0.55,4.35,0.89,2.06,520
143,3,13.62,4.95,2.35,20.0,92,2.0,0.8,0.47,1.02,4.4,0.91,2.05,550
144,3,12.25,3.88,2.2,18.5,112,1.38,0.78,0.29,1.14,8.21,0.65,2.0,855
145,3,13.16,3.57,2.15,21.0,102,1.5,0.55,0.43,1.3,4.0,0.6,1.68,830
146,3,13.88,5.04,2.23,20.0,80,0.98,0.34,0.4,0.68,4.9,0.58,1.33,415
147,3,12.87,4.61,2.48,21.5,86,1.7,0.65,0.47,0.86,7.65,0.54,1.86,625
148,3,13.32,3.24,2.38,21.5,92,1.93,0.76,0.45,1.25,8.42,0.55,1.62,650
149,3,13.08,3.9,2.36,21.5,113,1.41,1.39,0.34,1.14,9.4,0.57,1.33,550
150,3,13.5,3.12,2.62,24.0,123,1.4,1.57,0.22,1.25,8.6,0.59,1.3,500
151,3,12.79,2.67,2.48,22.0,112,1.48,1.36,0.24,1.26,10.8,0.48,1.47,480
152,3,13.11,1.9,2.75,25.5,116,2.2,1.28,0.26,1.56,7.1,0.61,1.33,425
153,3,13.23,3.3,2.28,18.5,98,1.8,0.83,0.61,1.87,10.52,0.56,1.51,675
154,3,12.58,1.29,2.1,20.0,103,1.48,0.58,0.53,1.4,7.6,0.58,1.55,640
155,3,13.17,5.19,2.32,22.0,93,1.74,0.63,0.61,1.55,7.9,0.6,1.48,725
156,3,13.84,4.12,2.38,19.5,89,1.8,0.83,0.48,1.56,9.01,0.57,1.64,480
157,3,12.45,3.03,2.64,27.0,97,1.9,0.58,0.63,1.14,7.5,0.67,1.73,880
158,3,14.34,1.68,2.7,25.0,98,2.8,1.31,0.53,2.7,13.0,0.57,1.96,660
159,3,13.48,1.67,2.64,22.5,89,2.6,1.1,0.52,2.29,11.75,0.57,1.78,620
160,3,12.36,3.83,2.38,21.0,88,2.3,0.92,0.5,1.04,7.65,0.56,1.58,520
161,3,13.69,3.26,2.54,20.0,107,1.83,0.56,0.5,0.8,5.88,0.96,1.82,680
162,3,12.85,3.27,2.58,22.0,106,1.65,0.6,0.6,0.96,5.58,0.87,2.11,570
163,3,12.96,3.45,2.35,18.5,106,1.39,0.7,0.4,0.94,5.28,0.68,1.75,675
164,3,13.78,2.76,2.3,22.0,90,1.35,0.68,0.41,1.03,9.58,0.7,1.68,615
165,3,13.73,4.36,2.26,22.5,88,1.28,0.47,0.52,1.15,6.62,0.78,1.75,520
166,3,13.45,3.7,2.6,23.0,111,1.7,0.92,0.43,1.46,10.68,0.85,1.56,695
167,3,12.82,3.37,2.3,19.5,88,1.48,0.66,0.4,0.97,10.26,0.72,1.75,685
168,3,13.58,2.58,2.69,24.5,105,1.55,0.84,0.39,1.54,8.66,0.74,1.8,750
169,3,13.4,4.6,2.86,25.0,112,1.98,0.96,0.27,1.11,8.5,0.67,1.92,630
170,3,12.2,3.03,2.32,19.0,96,1.25,0.49,0.4,0.73,5.5,0.66,1.83,510
171,3,12.77,2.39,2.28,19.5,86,1.39,0.51,0.48,0.64,9.899999000000000,0.57,1.63,470
172,3,14.16,2.51,2.48,20.0,91,1.68,0.7,0.44,1.24,9.7,0.62,1.71,660
173,3,13.71,5.65,2.45,20.5,95,1.68,0.61,0.52,1.06,7.7,0.64,1.74,740
174,3,13.4,3.91,2.48,23.0,102,1.8,0.75,0.43,1.41,7.3,0.7,1.56,750
175,3,13.27,4.28,2.26,20.0,120,1.59,0.69,0.43,1.35,10.2,0.59,1.56,835
176,3,13.17,2.59,2.37,20.0,120,1.65,0.68,0.53,1.46,9.3,0.6,1.62,840
177,3,14.13,4.1,2.74,24.5,96,2.05,0.76,0.56,1.35,9.2,0.61,1.6,560



wine4data.csv
,class,Alcohol,Flavanoids,Color intensity,Proline
0,1,14.23,3.06,5.64,1065
1,1,13.2,2.76,4.38,1050
2,1,13.16,3.24,5.68,1185
3,1,14.37,3.49,7.8,1480
4,1,13.24,2.69,4.32,735
5,1,14.2,3.39,6.75,1450
6,1,14.39,2.52,5.25,1290
7,1,14.06,2.51,5.05,1295
8,1,14.83,2.98,5.2,1045
9,1,13.86,3.15,7.22,1045
10,1,14.1,3.32,5.75,1510
11,1,14.12,2.43,5.0,1280
12,1,13.75,2.76,5.6,1320
13,1,14.75,3.69,5.4,1150
14,1,14.38,3.64,7.5,1547
15,1,13.63,2.91,7.3,1310
16,1,14.3,3.14,6.2,1280
17,1,13.83,3.4,6.6,1130
18,1,14.19,3.93,8.7,1680
19,1,13.64,3.03,5.1,845
20,1,14.06,3.17,5.65,780
21,1,12.93,2.41,4.5,770
22,1,13.71,2.88,3.8,1035
23,1,12.85,2.37,3.93,1015
24,1,13.5,2.61,3.52,845
25,1,13.05,2.68,3.58,830
26,1,13.39,2.94,4.8,1195
27,1,13.3,2.19,3.95,1285
28,1,13.87,2.97,4.5,915
29,1,14.02,2.33,4.7,1035
30,1,13.73,3.25,5.7,1285
31,1,13.58,3.19,6.9,1515
32,1,13.68,2.69,3.84,990
33,1,13.76,2.74,5.4,1235
34,1,13.51,2.53,4.2,1095
35,1,13.48,2.98,5.1,920
36,1,13.28,2.68,4.6,880
37,1,13.05,2.43,4.25,1105
38,1,13.07,2.64,3.7,1020
39,1,14.22,3.04,5.1,760
40,1,13.56,3.29,6.13,795
41,1,13.41,2.68,4.28,1035
42,1,13.88,3.56,5.43,1095
43,1,13.24,2.63,4.36,680
44,1,13.05,3.0,5.04,885
45,1,14.21,2.65,5.24,1080
46,1,14.38,3.17,4.9,1065
47,1,13.9,3.39,6.1,985
48,1,14.1,2.92,6.2,1060
49,1,13.94,3.54,8.9,1260
50,1,13.05,3.27,7.2,1150
51,1,13.83,2.99,5.6,1265
52,1,13.82,3.74,7.05,1190
53,1,13.77,2.79,6.3,1375
54,1,13.74,2.9,5.85,1060
55,1,13.56,2.78,6.25,1120
56,1,14.22,3.0,6.38,970
57,1,13.29,3.23,6.0,1270
58,1,13.72,3.67,6.8,1285
59,2,12.37,0.57,1.95,520
60,2,12.33,1.09,3.27,680
61,2,12.64,1.41,5.75,450
62,2,13.67,1.79,3.8,630
63,2,12.37,3.1,4.45,420
64,2,12.17,1.75,2.95,355
65,2,12.37,2.65,4.6,678
66,2,13.11,3.18,5.3,502
67,2,12.37,2.0,4.68,510
68,2,13.34,1.3,3.17,750
69,2,12.21,1.28,2.85,718
70,2,12.29,1.02,3.05,870
71,2,13.86,2.86,3.38,410
72,2,13.49,1.84,3.74,472
73,2,12.99,2.89,3.35,985
74,2,11.96,2.14,3.21,886
75,2,11.66,1.57,3.8,428
76,2,13.03,2.03,4.6,392
77,2,11.84,1.32,2.65,500
78,2,12.33,1.85,3.4,750
79,2,12.7,2.55,2.57,463
80,2,12.0,2.26,2.5,278
81,2,12.72,2.53,3.9,714
82,2,12.08,1.58,2.2,630
83,2,13.05,1.59,4.8,515
84,2,11.84,2.21,3.05,520
85,2,12.67,1.94,2.62,450
86,2,12.16,1.69,2.45,495
87,2,11.65,1.61,2.6,562
88,2,11.64,1.69,2.8,680
89,2,12.08,1.59,1.74,625
90,2,12.08,1.5,2.4,480
91,2,12.0,1.25,3.6,450
92,2,12.69,1.46,3.05,495
93,2,12.29,2.25,2.15,290
94,2,11.62,2.26,3.25,345
95,2,12.47,2.27,2.6,937
96,2,11.81,0.99,2.5,625
97,2,12.29,2.5,2.9,428
98,2,12.37,3.75,4.5,660
99,2,12.29,2.99,2.3,406
100,2,12.08,2.17,3.3,710
101,2,12.6,1.36,2.45,562
102,2,12.34,2.11,2.8,438
103,2,11.82,1.64,2.06,415
104,2,12.51,1.92,2.94,672
105,2,12.42,1.84,2.7,315
106,2,12.25,2.03,3.4,510
107,2,12.72,1.76,3.3,488
108,2,12.22,2.04,2.7,312
109,2,11.61,2.92,2.65,680
110,2,11.46,2.58,2.9,562
111,2,12.52,2.27,2.0,325
112,2,11.76,2.03,3.8,607
113,2,11.41,2.01,3.08,434
114,2,12.08,2.29,2.9,385
115,2,11.03,2.17,1.9,407
116,2,11.82,1.6,1.95,495
117,2,12.42,2.09,2.06,345
118,2,12.77,1.25,3.4,372
119,2,12.0,1.64,1.28,564
120,2,11.45,2.79,3.25,625
121,2,11.56,5.08,6.0,465
122,2,12.42,2.13,2.08,365
123,2,13.05,2.65,2.6,380
124,2,11.87,3.03,2.8,380
125,2,12.07,2.65,2.76,378
126,2,12.43,3.15,3.94,352
127,2,11.79,2.24,3.0,466
128,2,12.37,2.45,2.12,342
129,2,12.04,1.75,2.6,580
130,3,12.86,1.25,4.1,630
131,3,12.88,1.22,5.4,530
132,3,12.81,1.09,5.7,560
133,3,12.7,1.2,5.0,600
134,3,12.51,0.58,5.45,650
135,3,12.6,0.66,7.1,695
136,3,12.25,0.47,3.85,720
137,3,12.53,0.6,5.0,515
138,3,13.49,0.48,5.7,580
139,3,12.84,0.6,4.92,590
140,3,12.93,0.5,4.6,600
141,3,13.36,0.5,5.6,780
142,3,13.52,0.52,4.35,520
143,3,13.62,0.8,4.4,550
144,3,12.25,0.78,8.21,855
145,3,13.16,0.55,4.0,830
146,3,13.88,0.34,4.9,415
147,3,12.87,0.65,7.65,625
148,3,13.32,0.76,8.42,650
149,3,13.08,1.39,9.4,550
150,3,13.5,1.57,8.6,500
151,3,12.79,1.36,10.8,480
152,3,13.11,1.28,7.1,425
153,3,13.23,0.83,10.52,675
154,3,12.58,0.58,7.6,640
155,3,13.17,0.63,7.9,725
156,3,13.84,0.83,9.01,480
157,3,12.45,0.58,7.5,880
158,3,14.34,1.31,13.0,660
159,3,13.48,1.1,11.75,620
160,3,12.36,0.92,7.65,520
161,3,13.69,0.56,5.88,680
162,3,12.85,0.6,5.58,570
163,3,12.96,0.7,5.28,675
164,3,13.78,0.68,9.58,615
165,3,13.73,0.47,6.62,520
166,3,13.45,0.92,10.68,695
167,3,12.82,0.66,10.26,685
168,3,13.58,0.84,8.66,750
169,3,13.4,0.96,8.5,630
170,3,12.2,0.49,5.5,510
171,3,12.77,0.51,9.899999000000000,470
172,3,14.16,0.7,9.7,660
173,3,13.71,0.61,7.7,740
174,3,13.4,0.75,7.3,750
175,3,13.27,0.69,10.2,835
176,3,13.17,0.68,9.3,840
177,3,14.13,0.76,9.2,560




Output files

dfclsspc.csv
,class,PC1,PC2,PC3,PC4,PC5,PC6,PC7,PC8,PC9,PC10,PC11,PC12,PC13
0,1,3.3074209742892218,-1.4394022531822968,-0.1652728297819703,-0.21502462886791013,0.6910933491309144,-0.22325036575091023,0.5947488306815286,0.06495586200824276,0.6396383626544324,1.0180839601389415,-0.4502931724404516,0.5392891438984798,-0.06605230525465954
1,1,2.203249813420225,0.3324550711941807,-2.020757060479493,-0.2905387432289206,-0.25692986617101377,-0.9245123057914416,0.05362434498856938,1.0215343244190644,-0.3079779836123777,0.15925214078798267,-0.1422560187907293,0.3871456498926666,0.0036262731908069533
2,1,2.5096606947618634,-1.0282507242941965,0.9800540552553032,0.7228631990496799,-0.25032697580580315,0.5477309633081178,0.4230121843493222,-0.34324787002566237,-1.1745212875879927,0.11304197887868297,-0.28586645250265047,0.000581931623216335,0.021655423052880457
3,1,3.746497190498001,-2.748618390858541,-0.17569622422135825,0.5663856019167641,-0.31096439791833425,0.11410911207014857,-0.38225899027920607,0.6417831052414534,0.05239661692354329,0.2387391504379256,0.7574476431290621,-0.24133877573986928,-0.3684441939604296
4,1,1.0060704895977026,-0.8673840350754072,2.0209872565518503,-0.4086131396933835,0.29761795846868305,-0.4053760832003349,0.4428253058240363,0.41552831458168393,0.32589984041222253,-0.07814604136464899,-0.5244656285832899,-0.21605469342071176,-0.07914032015451253
5,1,3.0416737256112696,-2.116430916831081,-0.6276253711399857,-0.5141870345752622,-0.6302408997615397,0.1230833540519225,0.40052392817214855,0.3937826075026363,-0.15171809772553935,-0.10170890759159312,0.4044444267393124,-0.37836536058787584,0.14474701715254223
6,1,2.4422005148251396,-1.171545342735355,-0.9743463764348986,-0.06564532699526031,-1.0248708712061014,-0.618376376956238,0.05274194975480953,-0.370887632301127,-0.4557302943391878,1.0137039217067525,-0.4411888701049246,0.14083257227219292,-0.27101368729615755
7,1,2.053643788593804,-1.6044371435937752,0.14587040015624617,-1.1892532694404772,0.0766871685043793,-1.4357561211356402,0.03228452156232402,0.23232359673626163,0.12302328241458269,0.73353084461641,0.29272910497317384,0.37859505482382616,-0.10985390195718889
8,1,2.5038113458622306,-0.9154884741383152,-1.7659873885920068,0.05611207579344695,-0.8897471068353372,-0.12881766870639613,0.12493265131002868,-0.49817261903938187,0.6048828958113518,0.17361686082084915,-0.5075012931685446,-0.6334624142350219,0.14168386288991672
9,1,2.745882382732543,-0.7872170290019044,-0.9814788553712221,0.3483987771430787,-0.46723506146572896,0.1629320385996138,-0.8718927400564324,0.1501559309777843,0.22984079886401712,0.17891540399574188,0.012443070584149108,0.5487787840836071,-0.04233543013419706
10,1,3.4699483745007793,-1.298669845233353,-0.42154608562726487,0.026766255576549346,-0.3374229485548122,-0.18238713921172017,0.2474643166009073,-1.2032168355528527,-0.5230983760329564,-0.21393410285960543,0.7304540896421186,-0.08108353968059738,0.1222571082078051
11,1,1.7498168777295426,-0.610255770402334,-1.1875284443033078,-0.8876603544925262,-0.7364950947296743,-0.5514995363688643,-0.4330446734643984,-0.9823554302145003,-0.47269705832608166,0.2196631370414422,0.041319595137458406,-0.1621036320152582,0.14236819168525308
12,1,2.107517286793351,-0.6738056143108792,-0.8626529849997499,-0.3554353694580963,-1.206525260365546,-0.21447112037911437,-0.2419147637129789,-0.46020817231746297,-0.8763413358192799,-0.09623389994997246,0.053894423602011014,-0.08949405724647415,-0.004921359024082772
13,1,3.448429214077157,-1.1274494764095218,-1.2008887892803866,0.16200107724821255,-2.0174358633910314,0.7436829854158363,1.471622149613517,-0.37931575174243415,-0.025629848511892567,-0.2439644181301538,-1.228335907770018,-0.770160959221526,0.2251860836430323
14,1,4.300652282433133,-2.090079711129265,-1.2603574351818916,0.30491306651046135,-1.0267961441431415,0.7934048131292087,0.9971580839472901,-0.4037524704055742,-0.8379787009012962,-0.36340777660023316,-0.3148579452256333,-0.14241798751992002,-0.0944575025650891
15,1,2.2987038329874876,-1.657875063256505,0.21728966750242665,-1.4365377266781327,-0.46822928494251104,-0.4210257396075808,-0.1804584505882065,0.08388008384447057,-0.40332019733664,-0.7974057420100701,0.10239527155432811,0.496395591301705,-0.06581411051361986
16,1,2.1658456828096475,-2.32075875406352,0.8293902555398711,-0.9100341775815219,-0.00011461709257406044,-0.06634216266229445,0.10917979866593315,-0.39831120973572426,0.06088354042464173,0.0194589645139306,0.0781459967699548,-0.4993884916803938,0.3371517167514269
17,1,1.8936294749078209,-1.6267799275590016,0.7926777435285638,-1.0793357115710438,-0.4374711601650541,0.3639042823579474,0.09138877248609027,0.11252284383803621,0.3809586363156592,-0.400192040489664,0.30821248171070925,-0.26308323130129213,0.5577202570204506
18,1,3.5320216723838582,-2.5112597068368268,-0.4840929395387666,-0.907762118959413,-1.1498357894573201,0.3030222314279165,-0.03337008030637092,-0.03549997731606875,-0.44032390819620665,-0.7823700003528729,0.9159245944564347,0.1646236980558128,0.5130957702205607
19,1,2.0786585582209844,-1.0581530706706517,-0.16428325513842096,0.48363314814167424,0.8800288984114187,-1.3890992216187479,-0.1021839323471412,0.5782839954550154,0.05858753222730578,-0.14962722141782256,-0.8333756983105735,0.28706483106453895,0.05854416728645179
20,1,3.1156137623839237,-0.784683607297999,-0.36386067565885344,-0.025489786983200084,0.9696782561931889,-0.10662099654256281,0.26401772864278217,0.18507932891860365,1.3147535460636144,0.361072678370261,-0.4599227273757287,0.47212668687598763,-0.027701208997739344
21,1,1.0835136063891346,-0.24106353999608904,0.9343259780756646,1.027012761151012,0.3150828841781848,-1.207608377792764,0.2960964456899515,-0.10615183924029503,-0.5710882401159311,-0.09217259996793538,-0.6569765422030359,0.6807690080652379,-0.30378804722445985
22,1,2.5280926337657177,0.09158227809738445,-0.3110552099041699,-0.04825511364548911,-0.42837316874357745,-1.0120881281178324,-0.1274102375766483,-0.0763985397015914,0.10795820024989748,0.8231959971323428,-0.08883189118523681,0.48074585946183995,-0.04480734162237901
23,1,1.6403610793700942,0.5148266657837177,0.14348035379962631,-0.41255625272774066,-0.37466308512797625,-0.7822987503101085,-0.6665220049334892,0.1947161702732049,-0.690454666883279,0.4703556297230623,-0.03241437683196368,0.48248557640744244,-0.3427830882036842
24,1,1.756620656061524,0.316256810150089,0.8877813227614748,-0.11479179737973463,-0.5551025444062897,-0.8962207392039387,-0.6217968483093256,-0.34131104539725116,0.09471675139619257,0.6645556770303804,-0.37844149754573597,0.31319242506024947,-0.22690655904202958
25,1,0.9872940634572199,-0.9380212926164957,3.8101600037581993,-1.3178437193098371,0.15855743712771916,-0.26438245046216347,0.48055175254882465,0.09462456001476269,0.11008284628873495,0.4646479965771064,-0.43827491734288576,-0.1092096192061262,-0.05793804847985151
26,1,1.7702838696187102,-0.6842449613073472,-0.08645652300567644,-0.23225162615194442,-1.1397276527607676,-0.5698669628573323,-0.45673986407245715,0.906164864308151,-0.7401750715569274,0.4412991048963981,-0.03300611102727713,-0.20824765429536918,0.0021594068651640897
27,1,1.2319487845303723,0.08955441908422898,-1.382995281064454,-0.49428839204418795,-0.37488389601603217,-0.6063776626563157,-0.3619891845291195,-0.23582298942501034,-0.7327268560094734,0.2985895645781169,0.8634241336219153,-0.3761562953604829,0.00856754802090968
28,1,2.182250467589578,-0.6876298991801476,1.3906440407844838,-0.7753047693138667,-0.8083039404215187,-0.6003788455681809,0.11760100890786178,-0.07458528540577207,0.4584504007233105,-0.028027557394114493,-0.5353370766374111,0.049439015160360755,-0.19814020897982706
29,1,2.249762670593061,-0.1909233649705948,-1.0895836726362598,0.28534736854705495,-0.4817143981931104,-0.33429054680439735,-0.15790488987377044,-0.4687420345631156,0.1371152280338577,0.8001022331833102,-0.1215735713528071,0.13534940104875146,-0.4453531132588418
30,1,2.493187044133723,-1.2373434374484842,1.3821190625909283,-0.36583324846976856,-0.6205822759416292,0.580816767537243,-0.41482756937395976,-1.0564552255595354,-0.5368809345332655,-0.31983451667787954,0.3754783945565923,-0.6076575043735635,0.08566163713314369
31,1,2.669879637173231,-1.4677333463513165,-0.3313270943763054,-0.34837116214492225,-0.08635074375669875,0.16436614107677178,-0.5327391371025523,-0.5303100989422633,-0.7853027410336859,-0.21836593345032498,1.0717787842468531,0.0027784771207941586,0.2717838068437982
32,1,1.6239980127950082,-0.052556195849641114,-0.1666585823710191,-0.7472064658409209,-0.6342735392504593,-0.04419383614561607,0.9775719807371694,-0.49766686758976064,0.13115908614474558,0.16510415174154597,-0.21989075867410193,-0.20352133225509988,0.24508396851363604
33,1,1.8973386973116355,-1.6284667254316378,1.1687851165283358,-2.333845540289786,-0.1862885542722457,-0.3270651524288018,0.9811655160257884,0.5112019938757084,0.7097196855459511,0.09932851800844822,0.6231038549223441,0.10841917216745585,0.05438415381203361
34,1,1.4064211811087515,-0.6959710736769414,0.47839353437958726,-1.0571042518965548,0.09888181066019497,-0.7082520579816178,-0.2290926894429825,-0.2268349971138301,-0.395617175267514,0.19939496808119422,-0.17502293396398036,-0.21905640540404786,0.10660953851632171
35,1,1.8984708735346252,-0.1762138728925004,0.4495668658230892,0.28331435651213255,-0.16249107741801475,-0.17415046745002996,-0.655976907177617,-0.353731127276279,0.2075761150514321,0.335996604284234,0.19795139739633327,0.23229121221858554,0.13238084835183042
36,1,1.3809666889579084,-0.6567871359160178,0.45714901829511007,-1.2810444786022082,-0.2044349041261064,-0.6187462210450853,-0.07018003490128058,0.9580447329286421,-0.30442601630070387,-0.20278051598462588,-1.096826563873629,-0.1468095403158068,-0.06111848675607615
37,1,1.119050702611552,-0.11378877580647277,-0.03899727006764189,-0.9537109191467315,-0.3412486136528005,-0.4079754592431164,-0.40745413134915226,-0.029258520819144356,-0.851462290917055,-0.3125910908067634,0.0037966146281084873,-0.40109969115134153,0.005741893436263629
38,1,1.497968909967301,0.7672676364432787,-1.4221655873220307,-0.7553772638646288,-0.16537762445167264,-0.4142836165922483,-0.026823286858989857,-0.026281114883573573,-0.27733523601659527,-0.28249566963810063,0.2830593031267619,-0.17884943003517423,0.3184168070718743
39,1,2.522684900799988,-1.797930230498623,-0.3421871201505835,1.1836354020937956,1.2942447997362065,-1.537932445078131,0.9629718580243375,0.7667706217150407,0.7193014784445417,0.022617923081025657,-1.1901643071738848,0.1684138709622455,-0.17778591046166153
40,1,2.5808152555231954,-0.7774232863343548,-0.11814419550667028,0.4746325582302913,0.40218749325962383,0.7231431257565663,0.5475872612739148,0.7040422599393478,0.8152039707257841,0.3232348295554939,-0.28156939298558853,0.19608159020103444,0.07672004676988974
41,1,0.6666015906093644,-0.16948284982577075,-0.7811589922246801,1.3093901444353093,-0.37071766782718385,-1.3209653565080965,0.027115198909729615,-0.3251205326112539,-0.2842835743836267,-0.07201729078912822,0.825148162413623,-0.07319633456476206,0.42764627066013927
42,1,3.0621689789534154,-1.1526674200517177,-0.31187831261631505,0.5487367150016296,-0.303270163810577,-0.8056447910596479,-1.0085725134062486,0.9272066340562984,-0.2176283934871774,0.22944230031473092,-0.4412790640674128,-0.18586058482059148,-0.02288917812756132
43,1,0.46090896859018554,-0.3298117727341145,-0.2009097539350513,1.4324530849101924,0.28242818123653074,-1.0257100418339755,0.5697869806117043,0.5641789109456607,0.292474332301798,-0.07797227229003006,-0.07052267604866498,-0.04944706574649849,0.20148853183729126
44,1,2.0954409429340415,0.07080917675823027,-0.654004546648923,0.7556133440756121,0.47021131661949966,0.3707621014344715,0.02049032322337044,0.6225777810094425,0.29816197629125823,0.4280041824723777,0.4697757008530699,0.056031564270241016,0.08083833921783147
45,1,1.1329702044857708,-1.7721084874548823,0.02862498812610334,0.6898851285614771,-0.06075282930630004,-1.9084843071292972,0.17221136143917853,0.2559916712410326,0.5518592625595473,0.18682540238535605,0.5928099092388228,0.046614245635118096,0.061515246915304536
46,1,2.718931184836575,-1.1879835328800186,-0.5382549086008053,1.3856419443788857,-0.4954968511330634,-1.00815469759938,0.7373499566098313,-0.2425697998286818,0.44534377991020224,-0.1107552605128333,0.04849746100373086,-0.20265624011510713,-0.15163954267164179
47,1,2.813403001722132,-0.6444407091857332,-1.1523019052426833,0.9848846707803504,-0.009948952887740456,0.2899475175274655,-0.4725809269094898,0.14181256965192673,0.4054588785283,0.2530761197410418,0.12264301091701654,-0.08498487526933482,0.18875649446092163
48,1,2.004197250821421,-1.2435216352751537,-0.05713282309762617,0.22999432064387018,-0.3835690681530925,0.5244747548739693,0.22787579557395235,-0.7988747536001481,0.11495834676584969,-0.02763811727885379,-0.10303562176780824,-0.31705166682844677,0.1286614139939428
49,1,2.6998752798847527,-1.7470392190832238,-0.6413045687209715,-0.09994957430041805,-0.4933214313470488,0.7302185575966701,0.007598470443184706,-0.25056454251178295,0.17706487173552557,-0.30452471706595635,0.5455590676600995,0.7390721033606447,0.5471264055483989
50,1,3.205874088435237,-0.1665222556837136,-1.9680201311265506,1.117531516655094,0.012243659417974024,1.227356703636638,0.260077529840789,-0.46673008361894996,-1.2289919950755133,-0.5916490700865153,-0.3662784735099781,0.4098683788116398,0.08056253782995086
51,1,2.850917729698514,-0.7431823756341741,0.0047062264642350506,-0.21467099585967478,-0.7566740076926165,-0.19158263838572048,-0.35275168949074714,-1.0691076151736254,-0.9841835042939064,0.11383364053299355,-0.5481385506010691,0.31569140751186686,-0.051318447526751305
52,1,3.4957432833483915,-1.608197324246599,-0.5193096199166687,0.10623674255434105,-0.5999919386244159,0.18302806568711835,0.1514454199254622,1.4171049790804988,0.40167551343370506,-0.42365254343764464,0.2912557802932078,-0.08961885987011037,-0.1306544786285282
53,1,2.21853316453292,-1.869893254938051,0.33859471526845425,-1.2035714486237812,-0.575022180192874,-0.24526282437367694,0.463852190134525,0.39815366577388767,-0.31440735229476435,-0.08225067430242156,0.3083875379413572,4.517517889077533e-05,-0.15919986642811818
54,1,2.14094846469849,-1.0138914715076128,-0.9550686278877919,-0.23964218834466247,0.8776288187556921,-0.3607573432253308,-0.41078908947462445,0.2887533221523879,0.27974744212291525,0.4534968528069617,0.17194205574630242,0.17814253504753816,0.29528258227127613
55,1,2.4623833955406345,-1.325269883242171,0.5119931816713286,0.2546303390667216,0.9095743021841345,0.5533086478671215,-0.3573925646785929,-0.5360108046608262,-0.13526474578281494,0.1554481250848168,0.3319026170817106,-0.16975040570688926,-0.24105952400843178
56,1,2.733806174910112,-1.4325078506595068,-0.6107505416832127,0.20516868477134767,0.43304230303530944,0.1156714730387753,-0.039119002463002574,0.3503989857564746,0.9304625499321768,0.34587671320010077,-0.04255883464266493,-0.0538491974438759,-0.16202899318222075
57,1,2.167626307105844,-1.2087899891028415,0.26104322114957057,-0.5030071631954668,-0.6934656296587212,-0.25626085578808605,-0.2184473626059731,0.6101237638245087,-0.8366270634265777,-0.45403817168779276,0.08348552294273211,-0.12523066569000058,0.11125097010702825
58,1,3.1305492478324086,-1.7267082790131467,-0.2848578627472581,0.22991863957406958,0.0748297503205185,0.35057324303533727,-0.8407037675895143,0.6245159753615217,-0.413469281224405,-0.20213874376257274,0.2591404564089225,-0.5561413046778272,0.16363643647318915
59,2,-0.9259699188279119,3.064840615364419,-4.572166473843188,-1.0488856100276647,0.4558382584440355,-0.387160528537968,-0.09201200414567849,0.675902573278413,0.37491519823376523,-0.4141407262136847,0.3647158846912128,-0.6006007957832995,-0.49094991402867344
60,2,-1.5381412259602447,1.3775575833553673,-0.8722226768757676,-2.8819899979961154,-0.9752572409888745,0.034912671818695944,1.0558745246379948,1.0723508166333218,0.27522103677648074,-0.5805196706023145,0.14807756976197387,-0.301964127781274,-0.10250455647178205
61,2,-1.8310844937032627,0.8276494232885928,-1.6011854342330314,-1.4480201619659623,-0.2932485782861571,0.6821923265710889,0.2534286521815966,0.9796897296315416,0.9047201126689395,-0.5703136497046722,0.15013235796669783,-0.08382021110810935,0.3158692170457201
62,2,0.03052073874288906,1.2592339961221526,-1.7793885686600854,-1.2031820383251959,-0.39249713478280623,-0.6053793580732025,-0.5693772318659042,-0.6181201431251813,1.198577374846033,-0.30434370836003777,0.251909057858663,-0.14043435759752598,0.16328767726037324
63,2,2.044494334245672,1.9196175936288766,-0.007348049383722965,0.7192928638291637,-0.08089844366657982,0.7623755844368711,-1.0728110652643927,0.29770876732503737,0.6093381895655223,-1.4035007349592772,0.008235090824586419,-0.2302245197208359,-0.3851804256903097
64,2,-0.6079658284858496,1.9026915404301799,0.6774469413987676,-2.1470196060089553,-0.04976217809444739,-0.19082721124262342,0.5017058138927584,0.015770819037716342,0.32821559088728375,-1.0456355292980946,-0.8798119930135831,0.29035849534615016,0.029689070044232598
65,2,0.8976955472246841,0.7617626325775957,0.5717484682353131,-0.6774506273105886,-0.14223182957686967,1.0146699823838343,0.19276896415376862,0.09289823867057588,-0.589112288184419,-0.6781135552219018,-0.8063548709188705,-0.20344580186677616,0.12611435205768495
66,2,2.2421822635269995,1.8792912293663486,-2.026124737965077,1.403667656194062,-0.7299478606701855,1.2997321398258554,-0.3437490933521367,-0.17629064634906919,0.8053562465441454,-0.3748601965586203,-0.15451430481396727,0.179013199271669,0.11962858648328333
67,2,0.18286817748655626,2.4203186859475636,-1.0667364247221707,0.12705955222571322,-0.533069832983599,-0.06980397361321142,-1.350954475990899,-0.22567076430647548,0.40126416397709747,0.10650225928031236,0.47840064264259013,1.0964388037503068,-0.015744893570951417
68,2,-0.8105186514379262,0.21989369316463414,-0.7050166284413504,-2.4819881865990236,-0.5819183093481428,-0.316546704313857,0.3660591416575704,1.2430922412129188,0.9940478077008594,0.12766841195501902,0.2318237015461732,-0.9980892562195552,-0.28493239398568576
69,2,1.9700631868086649,1.3993358666453517,-1.234793016667563,-1.1490651139815151,4.174879906676839,0.49161077027076305,1.452448404931541,-1.0395448639048945,0.3733260011509063,0.4149429214896191,0.2308208814893417,0.72079455866375,-0.49537004857020883
70,2,-1.567793661724446,0.8824937279348999,-0.6272286135332928,-1.1685500675369433,0.9626291084973583,0.26865226568082357,0.1503975452650694,-0.7177820778962192,-1.0862736525434897,0.6694551329548812,0.22775554145211901,-0.45324781660493024,0.3785424746520656
71,2,1.6530188409215603,0.9540210185768454,1.947091704413286,0.15178075628994517,-0.7849469419260328,-0.315335432024712,-1.4874346880627005,-1.52664308019699,1.155464136446271,-0.7024076285092726,-0.7361670092838548,-0.4622716431080946,-0.33860119996554927
72,2,-0.7233319553350407,1.0606534245943224,0.0801062584670545,-0.07580559269367462,-0.17965897394884908,-0.5427855570507514,-1.5944036906549126,-1.0493396300256277,0.8955731525559633,0.38696753491744673,0.09721905630452617,-0.1761362614656385,0.2600525749645636
73,2,2.5550197688136347,-0.2594666264180438,3.3649019772981346,-0.9766444355337481,1.9752267229240112,-0.3111618840625928,-0.448128905378356,-1.127452117507918,1.108799864716895,-0.13360217592083945,1.7754471645010153,-0.1073558410235495,-0.24861954535988698
74,2,1.827412658093779,1.2842554672020607,0.45699091084476323,0.17086875325547202,0.9984058502533563,0.07763881659557788,-1.3717693741675092,0.527523314520388,-0.45036156196103894,-0.2394242410782638,1.0697517202822284,-0.4634538268313182,-1.0152929536236002
75,2,-0.8655512868166597,2.4372260631414138,-1.5589356085542145,-0.8291210084522862,0.7059884717216235,0.11204767877944678,0.4219903674921447,0.06471227096381829,-0.2156312253426638,-0.940274208542688,-0.1725429979605963,0.5980769572489727,0.2716864007777533
76,2,0.368973574091476,2.1478481530980833,-2.4424963589917366,-0.12580416600983738,0.09503176903637749,0.5553569371208854,-0.6459915322347928,-0.7377308588350916,0.7790094842620015,-0.4697738831996392,-0.383038305145555,0.22139311725787222,0.2497309563587562
77,2,-1.453277523297423,1.3794604765040297,-0.22666750049154827,-0.6943980567315426,1.064359751348741,-0.816466625419427,0.9316327303633561,0.8528823152570735,-0.04867074741173509,0.07169069991344872,0.06163227522417351,0.343859119829722,0.11848080377577622
78,2,1.2593782945905727,0.7686811725665003,-1.1808933588287809,-0.9232777877963567,2.6062411509804857,1.593301206625196,2.0080936829819906,-0.17199396078680332,-0.15606269948036464,0.5745496226179538,-0.3836014200713057,-0.1866931055442296,0.1147932171940048
79,2,0.37509228159086616,1.02415438665293,1.7894185593829062,0.9757919352287227,-0.13618502668417518,-0.6592933989111452,1.1898564336733548,-0.3323137711635276,0.7974350809329698,-0.5842839877618476,0.263448330837935,0.01330275102923922,-0.06543783632433108
80,2,0.759920260189146,3.3655599689064912,-0.3564645119004504,-0.3657013149659154,-0.15400613982521588,0.24455213290643102,-0.4187352558897184,-0.24075666039721386,0.6711658376970566,-0.6635789142014301,-0.12246801469718802,0.4848350428039591,-0.14170788592172395
81,2,1.03166775614799,1.4466289714029417,-0.36199064097754186,0.2957614728043514,-0.3050267933588881,-0.10005581789584614,-0.45800756197048675,-0.6038924202088877,-0.29024870273275105,-0.09979526392610182,-0.1330780452100996,0.3600220716302087,0.1606109270726141
82,2,-0.4934846945382556,2.374545219470906,1.3319858040701635,-1.0380506279143709,-1.0391951890581457,0.11245746786966497,-0.6445918797884597,-0.9459611718037992,-0.5289037891332584,-0.13438576524148937,0.12718993686841482,-0.04181222218276463,-0.3163825088783423
83,2,-2.531835081161833,0.08719738394204175,0.47291734964048937,0.7564675499973582,-1.124643989429972,0.18017840641760152,1.1530460312534934,-0.5008033345182336,0.20055727893997022,0.14929434981891396,0.036231533095640235,-0.19577424425713874,0.587483712660505
84,2,0.8329704356893115,1.4695251970529912,0.6083774159614244,0.5813120661008785,0.8966554261661,0.8800453715462215,-0.8307534544190774,0.5592807376014933,-1.2088581326502295,0.8402465905982196,-1.256459905845086,-0.2117867604330143,-0.3302736022156662
85,2,0.7856882825683588,2.0209257333285535,-0.25400688102000313,-0.8071412318131438,0.25812440435934836,-0.1573620642873449,-0.2692648173596125,-0.14666054447640153,0.5171534557937559,0.134352440051573,-0.6004544385740098,0.268731186797142,-0.27952629152797376
86,2,-0.8045625814106576,2.227546748604948,0.7706817962750929,-1.0108642201946911,-0.29104130611633167,0.3486104876045016,0.25620698682697474,-1.0547359247470272,-0.06573869226666103,-0.49034874684908103,-0.021025688110075352,-0.08916763068109713,0.14967251401235296
87,2,-0.5564728816117427,2.366310351129015,2.3011202198340097,-1.0324080066486097,-0.4755979033483898,-0.23689882651203364,-0.49031762085305136,-0.8640362910533956,-0.41362986757694975,-0.14366532821532324,0.3165805432820208,0.7887666647616159,-0.3101848187986733
88,2,-1.1119742957847085,1.797177569269313,0.9565549808027968,-0.3303519080379427,-0.7294523642295798,0.05778307683731034,0.08752725553883954,0.3116384719018634,-0.9021306868726943,0.30468658395953585,0.31962726236501604,0.1517950974934612,0.007532142470951685
89,2,-0.554159612143513,2.650064521187376,0.8467383504019524,-0.0031886465375461485,-1.3424700549102735,0.040500040869833644,-0.7590597791440754,-0.4348815432252014,-0.3496784453273201,0.6777924954900185,0.6171322072176756,-0.07366349816771028,-0.3755495517997116
90,2,-1.3454898170279466,2.1120436479411024,-0.04751827748653969,-0.46493902970919915,-0.8880409160608248,0.49519284306522016,0.7541626646799844,-0.12142221102292393,-0.6162983515156276,0.19121850414432914,-0.6682354554653186,-0.17967266920018277,0.13125768066516572
91,2,-1.5600818020493494,1.84700434401334,0.7788699322618047,-0.6033773359227952,-0.5150125764659572,0.7299581954321912,0.13725448901315523,-0.44243310579704376,-0.41451327929302983,0.5915943414770164,-0.4072296285128524,0.44336154207423273,-0.006230548048380047
92,2,-1.927119437807473,1.5551086818038762,-0.08902355134649248,-0.5210993979488705,-1.1703960239662432,0.9034129827743402,0.5373556618537969,-0.5464923323605226,-0.131852777451877,0.7330222563189103,-0.43380462128059505,-0.4585764858949852,0.5025225858785444
93,2,0.7445656116900051,2.3064255593163105,0.11435718085253053,1.2450266331682174,0.26219724751175877,-0.5152344754081475,0.2081948039520098,-0.19516652739420007,0.10341896968557174,-0.4049917381492846,-0.7941813802151679,0.25602395173406556,-0.36241601343972996
94,2,0.9547620944352145,2.2172737712655812,0.14204408437136987,0.3131595614713462,0.9414868900692571,-0.38174962555722397,-0.7559767653265896,0.8999133006451712,0.0963402831097209,-1.2707278154338777,-0.17224588331091267,0.21580577766017595,-0.674226299339812
95,2,2.536709431109619,-0.1687978642677518,0.786478429414637,-1.0299317288080005,3.7205838842896903,1.3478763849955588,2.6366328740389684,-0.531610576970713,0.1577123266650587,0.5281866128074082,0.23808454159930822,-0.4547191709241536,-0.10609020496784292
96,2,-0.5424224800102099,0.3678887757600091,1.3052140794666653,-1.2744036190701307,3.337376376732807,-0.7074981204699181,-0.3508692641104919,-0.11334224725357762,-0.9953180038653363,0.05665155743921606,-0.6757643296070344,-0.21512013816209183,-0.487466340438638
97,2,1.0281494640510254,2.558352543020442,-1.083334217908027,0.26311875832166665,-0.18930999418935326,0.40155258803138355,0.013416035695016568,0.03517303384451388,0.18023802870594183,-0.7274923841164124,-0.3930297805203765,-0.06648874424817279,-0.02834007100618189
98,2,2.245574924586746,1.4287111575172788,-0.22956068064780477,0.918579892826997,-0.2392480417868987,1.0394156756940467,-0.8691743742631992,0.7582036889230422,0.23585297115030834,-0.9402642590312434,0.4963215993732369,-0.5834300168506396,0.29440594625669736
99,2,1.4062491551155014,2.1600983908598645,0.7467898062862153,1.1922479994339221,-0.7078604231133887,0.608901016089344,2.08619382889341,-0.5853862467808673,0.12788941137932083,-1.2077966494558723,-0.6164293031362709,-0.08174612483683484,-0.01141887221495761
100,2,0.7954758507157222,2.3702625766073173,-1.5637015168236865,-0.03471842371141368,0.6153555528519862,-0.2868367381240344,0.28129945502024156,-0.3779260487374203,0.11269062548178123,-0.6144250070933889,0.9679390005709513,0.6391003711760381,0.173276809536184
101,2,-0.5479859240749373,2.2866781982784805,-1.4947189004373143,-0.30617498812804417,0.32509388339510287,-0.150333741098233,-0.41063250245701055,-0.797512447111971,-0.0513538480574286,0.6665710583272892,-0.030961167135778723,0.12204638096603346,0.1253669188847253
102,2,-0.1607203670674601,1.1612076939134783,1.0008897129888508,0.6598249368633862,0.30033792446975177,-0.6586620402646549,-0.49662044291676605,0.958527249016428,0.3236436777097677,0.6010409878349311,-0.024825749205245867,0.04794746166997587,-0.22515396306842284
103,2,-0.6579389747400411,2.672422602319141,-0.7627691880751541,0.4686328165802475,0.14406399737800732,0.4523707135586575,-0.047534646932549704,0.44176041235673125,0.23067169232605791,-0.10307943773599658,0.6739478614667038,-0.5661242495738223,-0.22894484643635696
104,2,0.39125073677743083,2.092828091701543,-0.4705227194456849,0.43582715859082155,-0.3029558524524864,-0.24861284461431396,-0.45192163492634363,-0.39881108986885977,0.17486375564781811,0.7372634624211198,0.7194262187787541,0.5105416113597041,-0.12790273236674105
105,2,-1.7675131353743996,1.712457831201066,0.9443692221388208,0.2587825914122184,-1.0019989721146851,0.10572252275162716,1.0760055879344859,0.40962759192135456,0.7645615355076267,1.4181035488525404,0.012799223159603332,0.5501176275242351,0.5591197499102967
106,2,-0.36523706738607475,2.1632510261127957,-0.47997029609750447,0.3972797830485991,-0.46773189813002713,0.19393953944776585,-0.2396000265917612,-0.28560806828312235,-0.3372417152889503,0.6485757380492955,-0.2773005223245178,0.6048618137981666,0.3429063212866539
107,2,-1.616113709824723,1.351770206384974,0.2863512381933844,-0.02595134036921258,-0.5677509908063006,0.5795329761649178,-0.07739754245576287,-0.6874718261840388,-0.05371294756406771,0.9190452167697706,-0.25906634399751965,-0.228358493732503,0.6698805529295843
108,2,0.082303614866062,2.299747276478885,-0.4622709783818495,0.8878465149144911,0.33961852849753016,1.1093752097689664,0.2097749263895447,0.30118214818992367,0.5589911597109024,0.8078317411659525,-0.06377413145683151,-0.18013692742564413,-0.03635976091838382
109,2,1.5738354698144696,1.4579216680207845,1.774639908375691,0.6118584446957042,0.28717165944787854,0.7950753571093649,-0.2804463499885867,0.6153174951404998,-1.261277425102942,0.31142710290393316,-0.5956073481169245,-0.23301253177129652,-0.20101246401745476
110,2,1.4165732645275273,1.4142173002300027,0.13888405422690925,3.4208895418279863,2.190704621795702,1.4530172194800697,1.6772946741325403,0.03488410271974072,-0.5489503662189008,-0.19269585914902593,0.742466550436624,-0.6214695047891213,-0.3600071682582254
111,2,-0.2779187781052679,1.9251375066366263,0.07844925710098904,0.9259088629339516,0.2607602049219674,-0.6780842105922832,-0.8452094953652757,0.28824506908462605,0.5923749124432093,-0.1749766683422901,0.08933369649548151,-0.59306552785156,0.06509808716655012
112,2,-1.2994792910085773,0.7610255518421406,1.9939717552925118,-1.7209811985987717,-0.7328390219216642,-0.4853507665272528,1.1300887866409677,0.8425817256314069,-0.7002715570258742,-0.5048789599217169,-0.6873065881388379,0.7079421148752832,0.37796205881827327
113,2,-0.45578614759048813,2.2630318660898427,1.0583534793506029,-0.8350764039913726,-0.322540991810149,1.0002949712875957,-0.5030120578962615,0.7389496895821299,-0.4220802267788902,-0.5055404434920212,-0.15706852680752265,-0.32689965091322054,-0.20308388328263957
114,2,-0.4927957293903609,1.9335906222276498,1.3202139072711139,-0.06142397862369463,-0.8465195986171479,-0.033084232234943146,-0.9793704755081214,0.9557604828686401,0.37171653482369943,0.3658506218042369,0.11029868289380824,0.01850187570443315,-0.01417842132675145
115,2,0.48071836092713316,3.8608927330254668,1.3404898622500956,-0.897379093403772,-0.9297350550845533,0.9666431434936592,1.3991610514283277,-0.676882069302124,-0.09692066220170474,-1.3503777477879266,0.29272308574224887,0.7212221268233416,-0.2712693741385356
116,2,-0.252177515125455,2.813555671568149,-0.30178847543346715,0.4566788651099308,0.37004340218710857,0.048128758660364805,-0.566567453784162,-0.14188223644114936,-0.16079059503463783,0.8414559510342573,0.4718268486606884,0.2928895101958143,-0.18632786966461518
117,2,-0.10692601227162284,1.9234960907704275,0.6882068933945534,-0.2729525177659504,1.0680839938618585,0.037116515143342556,-0.012666328241186168,-0.3595101038458795,0.8152498918854125,0.4531815319991168,0.0034551879479837824,-0.00981664939746948,0.303883014663496
118,2,-2.4261686716860034,1.253604771771951,-1.8976742928101842,0.8672853971794844,-0.46682577460849073,-0.8256269697856448,0.2412624162299436,0.6361604217574296,0.1263882239217863,0.17428110874865746,-0.3249431616920232,-0.30210685559975337,0.3084463570611815
119,2,-0.5495393545590227,2.2159107323440788,-0.3552267780722141,1.359484943306307,0.21610598873698922,-0.570806650491302,0.9632105145170972,-0.19309950733267717,-0.4875426363985792,0.4643149320210722,0.31270902694288993,-0.016120666602748163,-0.0985491059482606
120,2,0.7375414126927708,1.4049933496204523,1.122179956923341,1.2017215552019982,0.42871188805297633,0.10554846956140769,-0.20444946088358162,1.3895468315702135,-0.6451058098636404,0.2110250772768042,0.37910030772768716,0.1403313326006754,-0.0634867577905009
121,2,1.3325627337541783,-0.2526243082906243,5.330351895741045,0.23836511320781134,0.23583727971795793,0.798726191416697,-0.5987130127570963,1.7737746801226777,0.4148620018047754,-0.25110261226951247,0.14443556349281556,0.8464352774431565,1.5291092081203943
122,2,-1.1737759151563543,0.6620991375369105,3.001754299054594,1.1628566772514968,0.33179273880826265,-1.1763072164169404,0.6108274563027806,-0.24954931532151192,0.28614623179804777,0.1705629380728146,-0.02710068988116464,-0.0012636114954196927,0.14509822120950297
123,2,-0.4610344850270418,0.616548968783823,0.4820824695114952,3.566663227848726,0.12845907992922145,-1.5109587387645484,0.7606159983465968,-0.24864609012263242,0.471212189685689,-0.23261026953115535,0.2004849515120396,-0.3576277120154898,0.3152100072457089
124,2,0.9757216854954225,1.4415041884527886,1.477070336713939,3.7796730867633337,0.49840431968313653,-0.11895335309486774,0.1802605760340242,0.09988871025780446,-0.7902574715329386,0.017256090358669242,-0.45450414590121746,0.029152427979506585,-0.16032931890572438
125,2,-0.09653740587128484,2.1040626846318853,0.43360297362807887,1.051728478965351,-0.31056835035424485,-0.10982499624592806,-0.5317645575348352,0.8394100964814842,0.44178167935331764,0.2937038346282562,0.38737697431328055,0.04976102531002549,0.2962612353084895
126,2,0.03837888372064135,1.263198776897543,0.6856437939559844,1.26930363029728,-0.35205906349368665,0.9711685591773062,-0.8144955845153399,0.9774745301748647,0.5182308418086667,0.44504515231698993,-0.01871944964638958,-0.5665258537512267,0.6462319381754056
127,2,-1.5926657820370078,1.2047451325886789,3.351720753097512,-0.21866073710058576,-0.6389610008666169,0.8636983920508202,0.13822547575946378,-0.11254046549836474,-0.16380803751274675,0.23193078979243115,0.3508697944981968,-0.36375101673713855,0.470529941981484
128,2,-0.4782159263051303,1.9333868080701602,1.2928605143651206,0.6850437986787203,-0.06031507775016224,0.6822365721176896,-0.40942099570076385,-0.3002508090783195,0.41421535964799727,0.6075764002930362,0.05959731787552425,-0.6106771513435478,0.4609693701448744
129,2,-1.7877903309086522,1.1470524071619204,0.7805981992754542,1.4746192417971207,-0.50279963543327,-0.9926946553355452,0.31761164133684183,0.252378301776558,-0.7231242536156393,-0.08521767182362255,0.4181160581284557,-0.25827743703511846,0.10581957148072982
130,3,-1.3233685920485925,-0.1699099364946492,-1.1766940425306993,-1.4974855784438388,2.298805671074657,-0.15242296183921147,-0.8234944537227757,0.27427751988199445,-0.17352483062646692,-0.027375424494621717,-0.5398529518235398,-1.022194843895916,0.3413263123852222
131,3,-2.377793364818573,-0.3735289252221484,-0.721786521594591,-0.3073280478837252,1.3358636736178173,-0.7180066835337198,-0.9272044497252808,-0.14108922194378462,-0.36139737133069344,-0.4721742589560529,-0.5495752677551531,-0.39315116784535675,0.47046784970219757
132,3,-2.928678650894629,-0.26311960122796935,-0.1671682543857734,-0.4076235646003552,1.0847461227160142,-0.09401052705001788,-1.5991420855640033,-0.5714267140032897,-0.3504491434089569,-0.0012410446124436713,-0.005471418736203657,-0.5194313399376302,0.6142253707046779
133,3,-2.1407722719692472,-0.36721907092617495,-0.4520261897970701,0.04839105967847737,1.7217751254799463,-1.0287377514115252,-1.0057068780744571,-0.2865828449180438,-0.4185973893821422,-1.058180055385553,0.05941173536872758,-0.6876248510872711,0.18878252329267062
134,3,-2.36320317558219,0.4583418817494654,-1.0983016114601012,-0.8543672676919823,-1.0640051087033153,1.4267161783644564,0.33858876324494647,0.8463026591361479,-0.31499250927264927,0.3680179825340957,-0.016630032669152167,-0.6660466131495678,-0.470567632291498
135,3,-3.055223150610537,-0.3524187045567082,-1.0960323283588675,-0.73831543279361,-0.7356329018468011,0.8386907512546452,0.7145543015378172,0.7468526525072745,-0.09924400450245582,0.1916051065268591,0.3995979062340573,0.1671955047017255,0.07349906113535276
136,3,-3.9047389833163133,-0.1541476873020331,0.22120381041975087,-0.009042818745782352,-0.36170840036442253,-1.1216471760100841,0.8948169842386635,0.20248024172244936,-1.1990876380753719,-0.44882286207306055,0.19354764741674718,-0.5758570848677765,0.02839004037419811
137,3,-3.9253903353329624,-0.6578315694429221,1.7073990512510235,0.5439493847992597,-0.5358133499578563,-0.8881179062308411,1.4618477613524716,-0.06247904742305007,-0.06447091319340856,-0.5951864774709611,0.43996443087913484,-0.0704101095625457,-0.16275336534394266
138,3,-3.0855720889370413,-0.34786148374548376,-1.0239429930356019,-0.0711485065606948,-1.056369964111501,-0.38586885330085613,0.7702159210219863,-0.1459202687369528,0.5266190705146743,0.1385634656286851,0.12625362426702152,-0.08394656971059132,-0.14108445382793455
139,3,-2.367792369689852,-0.2911590268624995,1.2384208960007421,-0.7466991907304387,-0.41441011158712715,-0.42564678422472935,0.07326954804310908,0.3308079006707629,0.5374551847192763,-0.08227013990114343,0.5417923302667039,-0.21995306679837479,-0.758900946227189
140,3,-2.770996299384063,-0.28599810652203594,0.6079551553881278,-0.8768733313434667,-0.5528643720015415,-0.7336892298809362,0.010183486792775724,0.4335914686666111,-0.1517753276035473,0.7414111977326212,-0.41599904632548285,0.05265909747948664,-0.3881970571264233
141,3,-2.280129313390217,-0.3714600005710776,-0.9689098538597637,-0.37039290200024305,-0.3371413329390814,-0.9102510732039005,-0.9620838025054934,-0.14985747474177907,-0.2413787734959448,0.9221599905599008,0.07943326403226487,0.23706957997556785,-0.28626764878734595
142,3,-2.9772350643781795,-0.487841765231782,0.9442892051961244,-1.0333107399973118,-0.5835007573389869,-1.224220128963269,-0.23096954746741516,-0.31974990374119777,0.5087047479793511,0.2621770750708436,-0.3510641806555167,-0.24568204198206822,-0.2784057225301421
143,3,-2.36851340595321,-0.4809769389812348,-0.25217264526288674,0.6692613577843305,-0.6263251169991383,-1.5559512853405055,0.9693313604468707,-0.37351874406299157,0.5139222740087506,-0.5083390921821158,-0.07602890661965264,-0.2805287206187857,-0.3889978990043653
144,3,-2.2036492952070366,-1.1567893401571794,-1.241622757025274,0.3179413138105941,1.6839431962976839,-0.3301432491975633,0.00933142174996603,0.2860874469569177,-0.9975773687895988,-0.2099508326262409,0.495547412704524,0.9911227392102062,0.03742886654107179
145,3,-2.6182352769686186,-0.5615766239019596,-0.8535533102775806,0.2749085420714298,0.6492806653363102,-0.44606133175890117,0.5750300920992982,-0.3979081690177992,-0.3225698337704129,0.7595276632421266,0.6392828136888493,-0.863363524222538,0.027569339910878083
146,3,-4.268597576898806,-0.6478434752726763,-1.4540951336491028,1.1249908381987286,-0.35854284366379413,-1.6120963636941972,-0.07510884909595271,-0.761918633715405,0.09846655006338835,-0.023777567627215838,-0.5946986167471129,-0.6579358573596606,0.2899834701112059
147,3,-3.5725635985298316,-1.269122706771212,-0.11047240820958684,1.046955708065866,-0.5220791768414436,-0.5984853453318119,-0.18793567941534142,0.42016073318833386,-0.42290248763578414,-0.15050948410649836,0.22145341373486,0.2802910176559667,-0.1561670434439966
148,3,-2.7991676039614424,-1.5661159617298281,-0.47119873943670904,0.625593725471392,-0.25944114457359363,0.5516866707094517,-0.44819985382484634,0.12252734307527831,0.044501503867076256,0.021806545645107457,0.1782316725337948,-0.15806298656506235,-0.23500304103152217
149,3,-2.8915027505285344,-2.0353156266412147,-0.49456470252002216,0.4698302805618495,1.4348297265127201,0.17804117842932102,-0.3924129113752749,0.045929094404176904,0.09022286242751346,-0.6088096540786301,-0.07636390867982476,0.35186800024157366,0.7482138983759281
150,3,-2.314208870856335,-2.3497377471645446,0.4364505689052776,-0.05211277166127583,2.228141873039456,0.07656256835140424,-1.2698565858363986,-0.4651562765524699,0.3037924593530298,-0.42175213887647495,-0.581149246210182,-0.17674483124103735,0.7379429754606636
151,3,-2.5426584127723646,-2.0395298219826046,-0.3113896059160807,0.38588315213564317,1.8423256365184257,0.9772723793531616,-1.6226144630363522,0.26890470146445156,-0.14738643955885414,-0.43143804423015214,-0.41262290106339927,0.5908003972569961,0.4632703669328207
152,3,-1.8074427057272562,-1.5233487620986803,1.3587568913464911,-0.18886314575017082,1.7444662671839892,0.9675621409339417,-1.5375136561991418,-0.07926711079269495,0.36773704718435196,-0.30221623135829334,-0.45790103700277063,-0.9988591539595003,-0.22263010957232132
153,3,-2.752380506594457,-2.132915648388708,-0.9619152406645815,0.6665059784932824,-0.4756998004855312,1.7970441950578306,1.0155413409513112,0.370713380357914,-0.013269080707583378,0.09570921225642422,-0.12097096074858481,0.44261118923050646,-0.057022536746749695
154,3,-2.729451046595337,-0.4087332833069762,-1.187056140951231,-0.6611800945682843,0.45774553145335656,1.8765966020087232,0.023272553411034243,0.39208974881394176,-0.0022830667087978553,0.8354692889256904,0.35868921124655445,-0.012170831695774882,0.08639028677951947
155,3,-3.594728569985811,-1.7973142097414374,-0.09377234080821274,1.2652712949285638,-0.607750939415251,0.19139289939489104,1.4144328227648275,-0.19731873060530708,-0.17417245209012586,-0.18606633978600498,0.5777154870156036,-0.0036100220234496377,-0.008642082870876146
156,3,-2.8816970750128132,-1.9198030788774758,-0.7801219251139306,1.3209990209597458,-0.5696360796934196,0.4293944045606278,0.2172429241162613,-0.13661678879319275,0.34835884039622766,-0.1447803442261172,-0.6595787284226692,0.04236773563435934,-0.1760742558402158
157,3,-3.3826141280409807,-1.308186152222578,1.5975195578101928,-0.4814839356892369,-0.6689842600937349,0.8013051016944336,0.0955669334040979,0.10746404020791399,-0.42910199057174203,0.3095172722445282,1.3155346871141063,0.02071445364960373,-0.21538335686676655
158,3,-1.0452334187292036,-3.5052019369175107,1.1567754411202809,0.9326983641895057,-0.8969184352720134,3.2750428051619154,-0.6406561432704916,-0.5208667777190912,0.9406135499531312,0.3508944914574697,-0.28104587359404465,0.08801441899536908,-0.8128865687113619
159,3,-1.6053836879899748,-2.399868418570091,0.5470166289315384,0.7521878522557115,-0.9924073839586093,2.9112647299253154,-0.7090559519103415,0.13714079482392416,0.1890156711154343,0.0797222139335353,-0.36573123575461464,0.08549783453563893,-0.8102755316136631
160,3,-3.1342895119109735,-0.7360846376879293,-0.09074274950819741,0.9778893369396732,-0.4086607359503837,0.39780018687446833,-0.07382297865279404,1.0901152493077628,-0.1553595724735175,-0.6193075201549261,0.46598978568755517,-0.11285950042100841,-0.31793760075633914
161,3,-2.2338554579181786,-1.1721587688033697,-0.1010917647512017,-1.1620008512831224,-0.26370541085404337,-0.7987854835170894,0.5383640323408417,-0.13223123426356176,0.5540274382441771,-0.24612673983999012,-0.2214369726943366,-0.10183950994874129,-0.4372759353295997
162,3,-2.839663426141401,-0.5544798447367412,0.8019530048486757,-0.8953623564285083,-0.25408457461053774,-0.2877563089850199,0.8653214292195699,0.25127528252482473,0.31771435802564085,0.2519116416802223,0.006822215625907938,0.31574879411415674,-0.22860320337287995
163,3,-2.590190440046338,-0.6960021984672011,-0.8824502348460967,-0.2734573572720806,0.7700632411353519,-0.7178175530162474,0.2717337243387851,0.25980866643458134,-0.33853713184261597,0.22416992037028127,-0.21206121268351796,-0.16832729977374392,0.057330492697687536
164,3,-2.941003155932168,-1.5509339655630971,-0.9806344749737644,0.015436801559930956,-0.3630579674984461,0.48982425824297104,-0.9831617089390744,-0.7835869144812827,0.3553261363099026,0.007632985954423196,-0.07348335371915626,0.476401025543271,0.10099127406780126
165,3,-3.5201024794036875,-0.8800442967824436,-0.46471821342984976,0.5791558952457233,-0.6670779730305852,-0.45752368802543814,0.5207516900423493,-1.0721033405964806,0.4850711105060768,0.03418022803801199,-0.006251149872253342,0.12013643561646878,0.0808598445101382
166,3,-2.3993422836378624,-2.5850640194576635,0.4270216339807479,-0.1838165671141081,0.4464018776088038,0.5679042785199714,0.03570135985046454,-0.6713512455205429,0.16753442104876787,-0.8634197497442598,-0.09717099283062482,0.797403434028297,-0.09023869118588064
167,3,-2.920845372454543,-1.270861998614688,-1.2099451613079386,0.2944853391210953,-0.2665978869204795,0.38014077871934315,-0.6425082972495753,0.0005513563769947904,-0.5011174917631666,-0.5983364995434163,0.07542555294965854,1.076619203596665,-0.11267349066001367
168,3,-2.1752765844382633,-2.0716933143428586,0.7616340731767599,-0.38849741727592696,0.35886129735695155,0.6277968486837119,-0.7512032931580398,-0.9234686973835253,-0.10952458169969097,0.21602496557385267,-0.22458042115449448,0.1526518324636478,-0.148077986367758
169,3,-2.3742303679974204,-2.581385653064324,1.414055149500831,0.5868465118875059,1.124823815914325,-0.9808780254197587,-0.9278552742007292,-0.2931778502391408,-0.0025185675210148075,-0.7067957958330001,-0.15196218887752325,0.2508945834204587,-0.3949136553997779
170,3,-3.202583112152086,0.25054235448990353,-0.8447462243359557,-0.21645476174096895,0.6073818633142757,-0.3942655555844868,-0.29102834430369373,0.581266587675988,-0.5954435617735772,0.1544897002229716,-0.19861548450945934,0.24475448303018793,-0.02383056369191414
171,3,-3.667572937551173,-0.8453631760105307,-1.3356525155175534,-0.1248237523169363,-0.4847445265156029,0.855545817902423,-1.0227549486074867,0.6741852287255892,0.020565243973596086,-0.027260063870291985,-0.1286271900170442,0.8023283579467756,0.025895626736334453
172,3,-2.4586203249541345,-2.1876272694783943,-0.9161964800313112,0.017974291678793372,-0.6992370576869062,0.678939794989935,-0.8268666283914551,-0.2956820473786353,0.3259168604122431,0.27338753493323636,-0.607010545439211,0.0983633056155488,-0.2584692014684034
173,3,-3.3610430460834086,-2.210054840198065,-0.3416058831849903,1.055549260853208,-0.5725488077111138,-1.1056694591750207,0.9557198622526608,-0.14568650334331204,-0.02243437027920649,-0.3032618956042976,0.13883600020102219,0.17030549798726619,-0.1141047343497259
174,3,-2.594636692277983,-1.7522863612972366,0.20699744002713222,0.34851330730216523,0.25434569901934656,-0.026390931823997973,0.14648034334317991,-0.550873093739413,-0.09769379277486212,-0.20548114909273332,0.25747145220928386,-0.2786450292607044,-0.1868442132508434
175,3,-2.6703068454902885,-2.7531328740068095,-0.9382950588411649,0.3111570096562606,1.2677787511428322,0.2722997984371898,0.6773247538308075,0.04689152892663511,0.0012187090359794914,-0.24729970903264356,0.5110504128298131,0.696800859788708,0.07187494421355198
176,3,-2.3803025432632827,-2.2908843696406844,-0.5491471188453654,-0.6863484409670154,0.8116656077061654,1.1754671255597529,0.6321919328789716,0.3897293941550381,0.05728607539943062,0.49010696866426384,0.2989785852585675,0.33886475686132467,-0.021804253316769034
177,3,-3.199732103661903,-2.7611307473383158,1.0110615806458112,0.5952241301754289,-0.8926744603528465,0.29525929307068594,0.005725107256543195,-0.2920897840143633,0.7395741728677374,-0.1176371785149067,-0.2293174540813724,-0.18825691270517114,-0.3230534254805805




Python files

pca.py
#################### Principal Component Analysis (PCA) in Python
#
#
########## Run this script on Terminal of MacOS as follows:
#
#python3 pca.py (csv data file) (target: classification column name)
#
#For instance,
#python3 pca.py wine.csv
#python3 pca.py wine4data.csv
#
#
########## Reference
#https://qiita.com/maskot1977/items/082557fcda78c4cdb41f
#
#
########## Data Source
#https://raw.githubusercontent.com/maskot1977/ipython_notebook/master/toydata/wine.txt


########## import
import numpy as np
import pandas as pd
from pandas import plotting
import matplotlib.pyplot as plt
import sklearn
from sklearn.decomposition import PCA
#from sklearn.preprocessing import StandardScaler
import matplotlib.ticker as ticker
import sys


########## arguments
dfname = str(sys.argv[1])    # e.g., wine.csv
#tgname = str(sys.argv[2])    # e.g., target (classification name columns)


########## load data
#df = pd.read_csv("wine.txt", sep="\t", index_col=0)
df = pd.read_csv(dfname, sep=",", index_col=0)

#print(df.head())
'''
   class  Alcohol  Malic acid   Ash  ...  Color intensity   Hue  OD280/OD315 of diluted wines  Proline
0      1    14.23        1.71  2.43  ...             5.64  1.04                          3.92     1065
1      1    13.20        1.78  2.14  ...             4.38  1.05                          3.40     1050
2      1    13.16        2.36  2.67  ...             5.68  1.03                          3.17     1185
3      1    14.37        1.95  2.50  ...             7.80  0.86                          3.45     1480
4      1    13.24        2.59  2.87  ...             4.32  1.04                          2.93      735
'''


########## show data
#print(df.iloc[:, 1:].head())
'''
   Alcohol  Malic acid   Ash  ...   Hue  OD280/OD315 of diluted wines  Proline
0    14.23        1.71  2.43  ...  1.04                          3.92     1065
1    13.20        1.78  2.14  ...  1.05                          3.40     1050
2    13.16        2.36  2.67  ...  1.03                          3.17     1185
3    14.37        1.95  2.50  ...  0.86                          3.45     1480
4    13.24        2.59  2.87  ...  1.04                          2.93      735

[5 rows x 13 columns]
'''
#
#print(df.iloc[:, 0:].head())
'''
   class  Alcohol  Malic acid   Ash  ...  Color intensity   Hue  OD280/OD315 of diluted wines  Proline
0      1    14.23        1.71  2.43  ...             5.64  1.04                          3.92     1065
1      1    13.20        1.78  2.14  ...             4.38  1.05                          3.40     1050
2      1    13.16        2.36  2.67  ...             5.68  1.03                          3.17     1185
3      1    14.37        1.95  2.50  ...             7.80  0.86                          3.45     1480
4      1    13.24        2.59  2.87  ...             4.32  1.04                          2.93      735

[5 rows x 14 columns]
'''
#class is a column for classifications (1, 2, and 3)
plotting.scatter_matrix(df.iloc[:, 1:], figsize=(8, 8), c=list(df.iloc[:, 0]), alpha=0.5)
plt.savefig("Figure_1_scatter_matrix_raw_data.png")
plt.show()


########## Standardization (every column excluding class has mean = 0 & standard deviation = 1)
dfs = df.iloc[:, 1:].apply(lambda x: (x-x.mean())/x.std(), axis=0)
#dfs = StandardScaler().fit_transform(df)

#dfs.head()
#print(dfs.head())
'''
    Alcohol  Malic acid       Ash  ...       Hue  OD280/OD315 of diluted wines   Proline
0  1.514341   -0.560668  0.231400  ...  0.361158                      1.842721  1.010159
1  0.245597   -0.498009 -0.825667  ...  0.404908                      1.110317  0.962526
2  0.196325    0.021172  1.106214  ...  0.317409                      0.786369  1.391224
3  1.686791   -0.345835  0.486554  ... -0.426341                      1.180741  2.328007
4  0.294868    0.227053  1.835226  ...  0.361158                      0.448336 -0.037767

[5 rows x 13 columns]
'''


########## PCA
pca = PCA()
pca.fit(dfs)
feature = pca.transform(dfs)


########## PC score
#print(pd.DataFrame(feature, columns=["PC{}".format(x + 1) for x in range(len(dfs.columns))]).head())
'''
        PC1       PC2       PC3       PC4       PC5  ...       PC9      PC10      PC11      PC12      PC13
0  3.307421 -1.439402 -0.165273 -0.215025  0.691093  ...  0.639638  1.018084 -0.450293  0.539289 -0.066052
1  2.203250  0.332455 -2.020757 -0.290539 -0.256930  ... -0.307978  0.159252 -0.142256  0.387146  0.003626
2  2.509661 -1.028251  0.980054  0.722863 -0.250327  ... -1.174521  0.113042 -0.285866  0.000582  0.021655
3  3.746497 -2.748618 -0.175696  0.566386 -0.310964  ...  0.052397  0.238739  0.757448 -0.241339 -0.368444
4  1.006070 -0.867384  2.020987 -0.408613  0.297618  ...  0.325900 -0.078146 -0.524466 -0.216055 -0.079140

[5 rows x 13 columns]
'''

dfcls = df.iloc[:, 0]
dfspc = pd.DataFrame(feature, columns=["PC{}".format(x + 1) for x in range(len(dfs.columns))])
dfclsspc = pd.concat([dfcls, dfspc], axis=1)
#print(dfclsspc.head())
'''
   class       PC1       PC2       PC3       PC4       PC5  ...       PC8       PC9      PC10      PC11      PC12      PC13
0      1  3.307421 -1.439402 -0.165273 -0.215025  0.691093  ...  0.064956  0.639638  1.018084 -0.450293  0.539289 -0.066052
1      1  2.203250  0.332455 -2.020757 -0.290539 -0.256930  ...  1.021534 -0.307978  0.159252 -0.142256  0.387146  0.003626
2      1  2.509661 -1.028251  0.980054  0.722863 -0.250327  ... -0.343248 -1.174521  0.113042 -0.285866  0.000582  0.021655
3      1  3.746497 -2.748618 -0.175696  0.566386 -0.310964  ...  0.641783  0.052397  0.238739  0.757448 -0.241339 -0.368444
4      1  1.006070 -0.867384  2.020987 -0.408613  0.297618  ...  0.415528  0.325900 -0.078146 -0.524466 -0.216055 -0.079140

[5 rows x 14 columns]
'''

dfclsspc.to_csv('dfclsspc.csv', header=True, index=True)


########## PCA plot (PC1 and PC2)
plt.figure(figsize=(6, 6))
#plt.scatter(feature[:, 0], feature[:, 1], alpha=0.8, c=list(df.iloc[:, 0]))
plt.scatter(dfclsspc.iloc[:, 1], dfclsspc.iloc[:, 2], alpha=0.8, c=list(dfclsspc.iloc[:, 0]))
plt.grid()
plt.xlabel("PC1")
plt.ylabel("PC2")
#plt.legend()
#plt.legend(list(dfclsspc.iloc[:, 0]))
#print(dfclsspc.iloc[:, 0])
#

### comment out if you want to delete numbers for each data point
for x, y, name in zip(dfclsspc.iloc[:, 1], dfclsspc.iloc[:, 2], dfclsspc.iloc[:, 0]):
    plt.text(x, y, name)

plt.savefig("Figure_2_PCA_plot_PC1_and_PC2.png")
plt.show()



########## show all PCs
plotting.scatter_matrix(pd.DataFrame(feature,
                        columns=["PC{}".format(x + 1) for x in range(len(dfs.columns))]),
                        figsize=(8, 8), c=list(df.iloc[:, 0]), alpha=0.5)
plt.savefig("Figure_3_PCA_plot_all_PCs.png")
plt.show()


########## Cumulative contributions
# calculate cumulative contribution
#pd.DataFrame(pca.explained_variance_ratio_, index=["PC{}".format(x + 1) for x in range(len(dfs.columns))])
#print(pd.DataFrame(pca.explained_variance_ratio_, index=["PC{}".format(x + 1) for x in range(len(dfs.columns))]))
'''
             0
PC1   0.361988
PC2   0.192075
PC3   0.111236
PC4   0.070690
PC5   0.065633
PC6   0.049358
PC7   0.042387
PC8   0.026807
PC9   0.022222
PC10  0.019300
PC11  0.017368
PC12  0.012982
PC13  0.007952
'''


##### show cumulative contribution
plt.gca().get_xaxis().set_major_locator(ticker.MaxNLocator(integer=True))
plt.plot([0] + list( np.cumsum(pca.explained_variance_ratio_)), "-o")
plt.xlabel("Number of principal components")
plt.ylabel("Cumulative contribution rate")
plt.grid()
plt.savefig("Figure_4_PCA_plot_cum_contb.png")
plt.show()


########## PCA eigenvalues
#pd.DataFrame(pca.explained_variance_, index=["PC{}".format(x + 1) for x in range(len(dfs.columns))])
#print(pd.DataFrame(pca.explained_variance_, index=["PC{}".format(x + 1) for x in range(len(dfs.columns))]))
'''
             0
PC1   4.705850
PC2   2.496974
PC3   1.446072
PC4   0.918974
PC5   0.853228
PC6   0.641657
PC7   0.551028
PC8   0.348497
PC9   0.288880
PC10  0.250902
PC11  0.225789
PC12  0.168770
PC13  0.103378
'''


########## PCA eigenvectors
#pd.DataFrame(pca.components_, columns=df.columns[1:], index=["PC{}".format(x + 1) for x in range(len(dfs.columns))])
#print(pd.DataFrame(pca.components_, columns=df.columns[1:], index=["PC{}".format(x + 1) for x in range(len(dfs.columns))]))
'''
       Alcohol  Malic acid       Ash  Alcalinity of ash  ...  Color intensity       Hue  OD280/OD315 of diluted wines   Proline
PC1   0.144329   -0.245188 -0.002051          -0.239320  ...        -0.088617  0.296715                      0.376167  0.286752
PC2  -0.483652   -0.224931 -0.316069           0.010591  ...        -0.529996  0.279235                      0.164496 -0.364903
PC3  -0.207383    0.089013  0.626224           0.612080  ...        -0.137306  0.085222                      0.166005 -0.126746
PC4  -0.017856    0.536890 -0.214176           0.060859  ...         0.065926 -0.427771                      0.184121 -0.232071
PC5  -0.265664    0.035214 -0.143025           0.066103  ...        -0.076437 -0.173615                     -0.101161 -0.157869
PC6  -0.213539   -0.536814 -0.154475           0.100825  ...         0.418644 -0.105983                     -0.265851 -0.119726
PC7  -0.056396    0.420524 -0.149171          -0.286969  ...        -0.227712  0.232076                     -0.044764  0.076805
PC8  -0.396139   -0.065827  0.170260          -0.427970  ...         0.033797 -0.436624                      0.078108 -0.120023
PC9   0.508619   -0.075283 -0.307694           0.200449  ...         0.056218  0.085828                      0.137227 -0.575786
PC10  0.211605   -0.309080 -0.027125           0.052799  ...        -0.290775 -0.522399                      0.523706  0.162116
PC11 -0.225917    0.076486 -0.498691           0.479314  ...         0.031839 -0.048212                      0.046423  0.539270
PC12 -0.266286    0.121696 -0.049622          -0.055743  ...         0.604222  0.259214                      0.600959 -0.079402
PC13  0.014970    0.025964 -0.141218           0.091683  ...        -0.011993 -0.089889                     -0.156718  0.014447

[13 rows x 13 columns]
'''


########## PCA PC1 & PC2 and observations

plt.figure(figsize=(6, 6))
for x, y, name in zip(pca.components_[0], pca.components_[1], df.columns[1:]):
    plt.text(x, y, name)
plt.scatter(pca.components_[0], pca.components_[1], alpha=0.8)
plt.grid()
plt.xlabel("PC1")
plt.ylabel("PC2")
plt.savefig("Figure_5_PCA_plot_PC1_PC2_and_observations.png")
plt.show()




Figures
Figure_1_scatter_matrix_raw_data.png


Figure_2_PCA_plot_PC1_and_PC2.png


Figure_3_PCA_plot_all_PCs.png


Figure_4_PCA_plot_cum_contb.png


Figure_5_PCA_plot_PC1_PC2_and_observations.png


Monday, June 15, 2020

Multinomial Logistic Regression with L2 (or L1) Regularitation of Standardized Training Data (plus Test Data) in Python

Multinomial Logistic Regression with L2 (or L1) Regularitation of Standardized Training Data (plus Test Data) in Python



0_MacOS_Python_setup.txt
# Install on Terminal of MacOS

#pip3 install -U scikit-learn

#pip3 install -U matplotlib

#pip3 install -U numpy

#pip3 install -U pandas

#pip3 install -U seaborn


1_MacOS_Terminal.txt
########## Run Terminal on MacOS and execute
### TO UPDATE
cd "YOUR_WORKING_DIRECTORY"


python3 mlgstcreg.py l2 1
#python3 mlgstcreg.py l2 10
#python3 mlgstcreg.py l2 100
#python3 mlgstcreg.py l2 1000





Data files



X.csv
4.494746752403546,2.565907751154284
3.3841445311320393,5.169101554140335
4.641125838188323,0.617920368071555
3.3436279255182804,1.7859485832624662
1.854026499900972,2.580673970131322
2.203708371908368,4.056653316946923
3.076269872380446,2.172074458526128
2.6277174035873467,2.471886759188408
4.11294688851731,1.7098624009494876
2.4427445896156783,0.7921262219144254
-1.6104728221529445,2.9243562822910114
3.2224973962333587,0.9504201626223048
5.20991777250232,-0.05678366166746862
2.064712315761789,1.7352820606362547
4.167677153069308,4.077987100384831
2.2191287508753765,2.5348025640025753
0.7444813552199567,-0.8012692296630051
1.5079779199085814,2.2211108325699005
3.7398937663463703,3.7004218892750256
1.4522371617508665,1.5724793501936696
0.5171221759355953,-0.008208625571572536
-0.4130304446548192,4.758813021080532
1.279242972473774,1.3804693813343385
0.2282798109393096,3.0995374058317706
-0.28239622430120104,1.6991398104583528
0.7336190445083068,2.5471627597885957
1.2776124467202452,0.33033395303991564
1.960144310465579,2.6057527405007535
2.094069558025663,2.427759860020327
1.1029330922034843,1.4870065234299592
1.0489973145957656,1.4915150425552421
0.8500374997394751,-0.4413322687061778
2.2509184566947953,1.4317959509113165
-0.30544861163758075,2.654472942190161
0.7168863479702925,2.073461883237738
3.0310897612296968,2.1824093817074885
3.6113559010583245,0.2536925776873813
2.568997005660739,1.0315322817422565
0.7685068615511554,1.181382953515044
1.5593981836737902,2.079429788716632
0.35222929296732475,3.2739610351955215
2.65854613775456,-0.172576656243288
4.104706436697249,4.681192185498803
3.6670460565823193,1.745547856986421
0.4857271207132452,3.491219933093745
1.4298216935543513,3.728798397790962
2.294545298699255,3.3811761709382586
2.503978192058076,2.999245357265979
2.0148492717085826,4.52560227312357
2.1794808027306107,2.5684988097132244
9.26171268561018,4.665612829739954
3.7994554323686414,8.679044351219694
3.9680906587940683,10.366450644211836
5.283590910349264,5.705370290222793
9.330633289873484,9.564326840121662
9.234707005589334,8.569315382059349
4.508313356701943,10.308329544524192
5.53580454491248,8.389895248377957
7.640688535753728,6.731514643080693
7.063616669319316,8.597308810232208
6.6519881452276515,5.095781972826784
6.5165636704814665,9.29736776350474
4.7969731776409015,6.74082537358526
5.246291939324073,10.203018734320477
7.1644486768077105,7.705744602514913
4.66646624149673,7.934006997257856
4.8320215705383225,7.0551321441644586
4.898682286454355,8.171616834778561
6.998684589554707,6.639215972185542
6.685903746468299,5.106761931080872
3.4170660822610364,7.761048751014304
6.288686962260316,8.099906713100598
10.12772783185664,8.635886458173985
4.4189455272526255,8.934728963864012
3.7207815069376498,6.200512012469322
5.881802072387676,9.967596644675044
4.710046809030184,5.568566461709612
5.829475225561735,5.850821898302097
7.9513906587206815,5.129503758820057
4.012525993931379,6.241673437930561
5.137382491582886,10.342047552043745
7.644445075358459,7.151643198344877
3.8774834199866217,8.46247957515756
4.267576199811119,4.324377974317278
8.05772796125969,7.548960706937425
7.594974269386494,7.552052488674493
7.484074153295792,5.872390595376473
4.2086388506648875,8.180556335824212
4.608453642359058,5.805664750588825
5.210994559408913,7.030274791504974
5.386864560138938,4.618514501867448
4.885220225607101,3.1489527746384947
7.082932639669101,4.22515474383443
4.087151945077339,7.090352567660865
4.719039314852367,9.672579676064801
3.760706145442498,7.462545673918223
5.931960162965485,4.976802713921405
6.906341762455969,6.702873038471027
7.336780447503662,8.426351034909207
9.746834572913645,9.314934314159679



y.csv
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
0.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0
1.0





Python files


mlgstcreg.py
#################### Multinomial Logistic Regression with L2 (or L1) Regularitation of Standardized Training Data (plus Test Data) in Python ####################
#
#Run this script on Terminal of MacOS as follows:
#python3 mlgstcreg.py l2 1
##python3 mlgstcreg.py (penalty/regularization of LogisticRegression()) (parameter c, 1 by default)
#
#Reference
#http://ailaby.com/logistic_reg/



##########Overview
'''
We have to avoid overfitting, i.e., letting a model learn outliers and noises of training data too well.
Reasons of overfitting are
(1) variables are too many,
(2) parameters are too big/impactful,  and/or
(3) data is not enough.

We can use L1 regularization (e.g., linear Lasso Regression) to eliminate redundant / unnecessary parameters as in (1).
Also, L2 regularization (e.g., linear Ridge Regression) is to avoid (2).
'''



########## import
'''
from sklearn import datasets
import matplotlib.pyplot as plt
import seaborn as sns
'''

#from sklearn.cross_validation import train_test_split
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
import sys
import math


########## arguments
pnlty = str(sys.argv[1])    #l2 or l1
c = int(sys.argv[2])    #C = 1 by default. The larger the C, the weaker regularization is going to be.


########## Multinomial Logistic Regression with Regularitation

''' 
##### generate dataset with TWO classes as the mixture of training data and test data
np.random.seed(seed=0)

X_0 = np.random.multivariate_normal( [2,2],  [[2,0],[0,2]],  50 )

y_0 = np.zeros(len(X_0))

X_1 = np.random.multivariate_normal( [6,7],  [[3,0],[0,3]],  50 )
y_1 = np.ones(len(X_1))
 
X = np.vstack((X_0, X_1))
#print(X)
#print(type(X))
#<class 'numpy.ndarray'>

y = np.append(y_0, y_1)
#print(y)
#print(type(y))
#<class 'numpy.ndarray'>


##### save dataset
pd.DataFrame(data=X).to_csv("X.csv", header=False, index=False)
pd.DataFrame(data=y).to_csv("y.csv", header=False, index=False)
'''



##### load raw dataset
X = pd.read_csv('X.csv', header=None).values
y = pd.read_csv('y.csv', header=None).values.ravel()

#print(type(X))
#<class 'numpy.ndarray'>

#print(type(y))
#<class 'numpy.ndarray'> 



##### plot raw data
plt.scatter(X[y==0, 0], X[y==0, 1], c='blue', marker='*', label='raw data 0')
plt.scatter(X[y==1, 0], X[y==1, 1], c='red', marker='*', label='raw data 1')

plt.legend(loc='upper left')
plt.title('Raw Data')
plt.xlabel('X1: Raw Data')
plt.ylabel('X2: Raw Data')

plt.savefig('Figure_1_Raw_Data.png')
plt.show()
plt.close()


##### splitting Training Data and Test Data
#X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=3)
#random_state=3 is to fix results. You can change this number to, say, 1, 2, or any other integer you like.



##### Standardization of Training and Test Data (Average=0, SD=1)
sc = StandardScaler()
X_train_std = sc.fit_transform(X_train)
X_test_std = sc.transform(X_test)


#####  max and min of X and y
xmax = max(
    max(X_train_std[y_train==0, 0]),
    max(X_train_std[y_train==1, 0]),
    max(X_test_std[y_test==0, 0]), 
    max(X_test_std[y_test==1, 0])
)
#
xmin = min(
    min(X_train_std[y_train==0, 0]),
    min(X_train_std[y_train==1, 0]),
    min(X_test_std[y_test==0, 0]), 
    min(X_test_std[y_test==1, 0])
)
#
#
ymax = max(
    max(X_train_std[y_train==0, 1]),
    max(X_train_std[y_train==1, 1]),
    max(X_test_std[y_test==0, 1]),
    max(X_test_std[y_test==1, 1])
)
#
ymin = min(
    min(X_train_std[y_train==0, 1]),
    min(X_train_std[y_train==1, 1]),
    min(X_test_std[y_test==0, 1]),
    min(X_test_std[y_test==1, 1])
)


##### plot trainging and test Data
plt.xlim([math.floor(xmin), math.ceil(xmax)])
plt.ylim([math.floor(ymin), math.ceil(ymax)])

plt.scatter(X_train_std[y_train==0, 0], X_train_std[y_train==0, 1], c='blue', marker='x', label='train 0')
plt.scatter(X_train_std[y_train==1, 0], X_train_std[y_train==1, 1], c='red', marker='x', label='train 1')
plt.scatter(X_test_std[y_test==0, 0], X_test_std[y_test==0, 1], c='blue', marker='o', s=60, label='test 0')
plt.scatter(X_test_std[y_test==1, 0], X_test_std[y_test==1, 1], c='red', marker='o', s=60, label='test 1')
 
plt.legend(loc='upper left')
plt.title('Training Data and Test Data')
plt.xlabel('X1: Training Data and Test Data')
plt.ylabel('X2: Training Data and Test Data')

plt.savefig('Figure_2_Traing_Data_and_Test_Data.png')
plt.show()
plt.close()

 
########## Logistic Regression built by Standardized Training Data

#lr = LogisticRegression()
lr = LogisticRegression(C=c, penalty=pnlty)

lr.fit(X_train_std, y_train)


#check predict and score
#print(lr.predict(X_test_std))
#[1. 1. 0. 1. 1. 0. 1. 1. 0. 0. 1. 0. 1. 1. 0. 1. 1. 0. 1. 0.]
#
#print(y_test)
#[1. 1. 0. 1. 1. 1. 1. 0. 0. 0. 1. 0. 1. 1. 0. 1. 1. 0. 1. 0.]
#
#print(lr.predict(X_test_std))
#[1. 1. 0. 1. 1. 0. 1. 1. 0. 0. 1. 0. 1. 1. 0. 1. 1. 0. 1. 0.]
#
#[T  T  T  T  T  F  T  F  T  T  T  T  T  T  T  T  T  T  T  T ]
#Number of T (  correct answers) = 18
#Number of F (incorrect answers) = 2 
#18/(18+2) = 0.9

#print(lr.score(X_test_std, y_test))
#0.9


'''
A result of Logistic Regression is weights w0, w1, and w2 of a boundary line below:
w0+w1x+w2y=0

w0 is stored in intercept_ while w1 and w2 are included in coef_.

'''

#print (lr.intercept_)
#[-0.09150732]
 
#print (lr.coef_)
#[[1.99471124 2.32334603]]
 
w_0 = lr.intercept_[0]
w_1 = lr.coef_[0,0]
w_2 = lr.coef_[0,1]


# a boundary line 
#   w_0 + (w_1 * x) + (w_2 * y) = 0
#   y = ((-w_1 * x) - w_0) / w_2
#
print("y = ((-w_1 * x) - w_0) / w_2")
print("y = (-w_1/w_2) * x - w_0/w_2")
print("y: x2")
print("x: x1")
print("w_0 = ", w_0)
print("w_1 = ", w_1)
print("w_2 = ", w_2)
print("(-w_1/w_2) = ", (-w_1/w_2))
print("(-w_0/w_2) = ", (-w_0/w_2))

'''
y = ((-w_1 * x) - w_0) / w_2
y = (-w_1/w_2) * x - w_0/w_2
y: x2
x: x1
w_0 =  -0.09150731939635004
w_1 =  1.9947112354879184
w_2 =  2.3233460327656656
(-w_1/w_2) =  -0.8585510756283915
(-w_0/w_2) =  0.03938600540162393
'''


# plotting a boundary line
#plt.plot([-2,2], map(lambda x: (-w_1 * x - w_0)/w_2, [-2,2]))
#plt.plot([-2,2], list(map(lambda x: (-w_1 * x - w_0)/w_2, [-2,2])))
plt.plot([math.floor(xmin) - 1, math.ceil(xmax) + 1], list(map(lambda x: (-w_1 * x - w_0)/w_2, [math.floor(xmin) - 1, math.ceil(xmax) + 1])))


# plotting Training Data and Test Data
plt.xlim([math.floor(xmin), math.ceil(xmax)])
plt.ylim([math.floor(ymin), math.ceil(ymax)])

plt.scatter(X_train_std[y_train==0, 0], X_train_std[y_train==0, 1], c='blue', marker='x', label='train 0')
plt.scatter(X_train_std[y_train==1, 0], X_train_std[y_train==1, 1], c='red', marker='x', label='train 1')
plt.scatter(X_test_std[y_test==0, 0], X_test_std[y_test==0, 1], c='blue', marker='o', s=60, label='test 0')
plt.scatter(X_test_std[y_test==1, 0], X_test_std[y_test==1, 1], c='red', marker='o', s=60, label='test 1')

plt.legend(loc='upper left')
plt.title('Training Data and Test Data plus a Boundary Line')
plt.xlabel('X1: Training Data and Test Data')
plt.ylabel('X2: Training Data and Test Data')

#plt.text(-2,-2, 'Boundary Line: x2 = (-w_1/w_2) * x1 - w_0/w_2 = ' + str(-w_1/w_2) + ' * x1 + ' + str(-w_0/w_2), size=10)
plt.text(math.floor(xmin) + (math.ceil(xmax) - math.floor(xmin)) * 0.05, math.floor(ymin) + (math.ceil(ymax) - math.floor(ymin)) * 0.20, 'Regularization: ' + str(pnlty), size=9)
plt.text(math.floor(xmin) + (math.ceil(xmax) - math.floor(xmin)) * 0.05, math.floor(ymin) + (math.ceil(ymax) - math.floor(ymin)) * 0.15, 'c = ' + str(c), size=9)
plt.text(math.floor(xmin) + (math.ceil(xmax) - math.floor(xmin)) * 0.05, math.floor(ymin) + (math.ceil(ymax) - math.floor(ymin)) * 0.10, 'Test score = ' + str(round(lr.score(X_test_std, y_test),3)*100) + '%', size=9)
plt.text(math.floor(xmin) + (math.ceil(xmax) - math.floor(xmin)) * 0.05, math.floor(ymin) + (math.ceil(ymax) - math.floor(ymin)) * 0.05, 'Boundary Line: x2 = ' + str(-w_1/w_2) + ' * x1 + ' + str(-w_0/w_2), size=9)

plt.savefig('Figure_3_Traing_Data_and_Test_Data_plus_Boundary_Line.png')
plt.show()
plt.close()


##### Confusion Matrix

cm = confusion_matrix(y_test, lr.predict(X_test_std))

print ("Confusion Matrix : \n", cm)
'''
Confusion Matrix :
Confusion Matrix : 
 [[ 7  1]
 [ 0 12]]
'''
'''
Out of 20 :
TruePostive + TrueNegative = 12 + 7
FalsePositive + FalseNegative = 1 + 0

 [[TrueNegative(TN)  FalsePositive(FP)]
 [ FalseNegative(FN) TruePositive(TP)]]


                            Predicted Labels:
                            Negative              Positive
Actual Results: Negative    TrueNegative(TN)     FalsePositive(FP)
                Positive    FalseNegative(FN)    TruePositive(TP)
'''
#use 1, 2 as there are two independent variables x1 and x2
class_names=[1,2]
#fig, ax = plt.subplots()
fig, ax = plt.subplots(figsize=(6,5))
tick_marks = np.arange(len(class_names))
plt.xticks(tick_marks, class_names)
plt.yticks(tick_marks, class_names)
#sns.heatmap(pd.DataFrame(cm), annot=True, cmap="Blues" ,fmt='g')
sns.heatmap(pd.DataFrame(cm), annot=True, cmap="coolwarm", fmt='g')
ax.xaxis.set_label_position("top")
#plt.tight_layout()
plt.tight_layout(pad=3.00)
plt.title('Confusion Matrix (Test Data)')
plt.xlabel('Predicted Labels')
plt.ylabel('Actual Results')
#print("Accuracy:",metrics.accuracy_score(y_test, y_pred))
plt.savefig("Figure_4_Confusion_Matrix_Test_Data.png")
plt.show()
plt.close()


########## Logistic Regression built by Standardized Training Data (+ Regularization parameter C)

'''
The larger the C, the weaker regularization is going to be.

C is 1.0 by default. We try 1, 10, 100, 1000 here.
'''


fig, axs = plt.subplots(2, 2, sharex=True, sharey=True)

plt.xlim([math.floor(xmin), math.ceil(xmax)])
plt.ylim([math.floor(ymin), math.ceil(ymax)])

plt.subplots_adjust(wspace=0.1, hspace=0.6)
c_params = [1.0, 10.0, 100.0, 1000.0]

#print(c_params)
#[1.0, 10.0, 100.0, 1000.0]
#
#print(type(c_params))
#<class 'list'>

#print(enumerate(c_params))
#<enumerate object at 0x127bec500>

for i, c in enumerate(c_params):
    #print(i, c)
    #
    #lr = LogisticRegression(C=c)
    lr = LogisticRegression(C=c, penalty=pnlty)
    lr.fit(X_train_std, y_train)
    #
    w_0 = lr.intercept_[0]
    w_1 = lr.coef_[0,0]
    w_2 = lr.coef_[0,1]
    score = lr.score(X_test_std, y_test)
    #
    #print(i/2, i%2)
    #print(math.floor(i/2), i%2)
    #####axs[i/2, i%2].set_title('C=' + str(c))
    axs[math.floor(i/2), i%2].set_title('C=' + str(c))
    #
    #####axs[i/2, i%2].plot([-2,2], map(lambda x: (-w_1 * x - w_0)/w_2, [-2,2]))
    axs[math.floor(i/2), i%2].plot([math.floor(xmin)-1, math.ceil(xmax)+1], list(map(lambda x: (-w_1 * x - w_0)/w_2, [math.floor(xmin)-1, math.ceil(xmax)+1])))
    #
    #####axs[i/2, i%2].scatter(X_train_std[y_train==0, 0], X_train_std[y_train==0, 1], c='red', marker='x', label='train 0')
    #axs[math.floor(i/2), i%2].scatter(X_train_std[y_train==0, 0], X_train_std[y_train==0, 1], c='red', marker='x', label='train 0')
    axs[math.floor(i/2), i%2].scatter(X_train_std[y_train==0, 0], X_train_std[y_train==0, 1], c='blue', marker='x', label='train 0')
    #
    #####axs[i/2, i%2].scatter(X_train_std[y_train==1, 0], X_train_std[y_train==1, 1], c='blue', marker='x', label='train 1')
    #axs[math.floor(i/2), i%2].scatter(X_train_std[y_train==1, 0], X_train_std[y_train==1, 1], c='blue', marker='x', label='train 1')
    axs[math.floor(i/2), i%2].scatter(X_train_std[y_train==1, 0], X_train_std[y_train==1, 1], c='red', marker='x', label='train 1')
    #
    #####axs[i/2, i%2].scatter(X_test_std[y_test==0, 0], X_test_std[y_test==0, 1], c='red', marker='o', s=60, label='test 0')
    #axs[math.floor(i/2), i%2].scatter(X_test_std[y_test==0, 0], X_test_std[y_test==0, 1], c='red', marker='o', s=60, label='test 0')
    axs[math.floor(i/2), i%2].scatter(X_test_std[y_test==0, 0], X_test_std[y_test==0, 1], c='blue', marker='o', s=60, label='test 0')
    #
    #####axs[i/2, i%2].scatter(X_test_std[y_test==1, 0], X_test_std[y_test==1, 1], c='blue', marker='o', s=60, label='test 1')
    #axs[math.floor(i/2), i%2].scatter(X_test_std[y_test==1, 0], X_test_std[y_test==1, 1], c='blue', marker='o', s=60, label='test 1')
    axs[math.floor(i/2), i%2].scatter(X_test_std[y_test==1, 0], X_test_std[y_test==1, 1], c='red', marker='o', s=60, label='test 1')
    #
    #
    if (i < 2):
        #####axs[i/2, i%2].text(0,-2.7, 'score ' + str(round(score,3)*100) + '%', size=13)
        axs[math.floor(i/2), i%2].text(math.floor(xmin) + (math.ceil(xmax) - math.floor(xmin)) * 0.10, math.ceil(ymax) - (math.ceil(ymax) - math.floor(ymin)) * 0.10, 'score ' + str(round(score,3)*100) + '%', size=10)
    else:
        #####axs[i/2, i%2].text(0,-3.3, 'score ' + str(round(score,3)*100) + '%', size=13)
        axs[math.floor(i/2), i%2].text(math.floor(xmin) + (math.ceil(xmax) - math.floor(xmin)) * 0.10, math.ceil(ymax) - (math.ceil(ymax) - math.floor(ymin)) * 0.10, 'score ' + str(round(score,3)*100) + '%', size=10)


plt.savefig('Figure_5_Traing_Data_and_Test_Data_plus_Boundary_Line_for_various_Cs.png')
plt.show()
plt.close()




Figures
Figure_1_Raw_Data.png



Figure_2_Traing_Data_and_Test_Data.png



Figure_3_Traing_Data_and_Test_Data_plus_Boundary_Line.png



Figure_4_Confusion_Matrix_Test_Data.png



Figure_5_Traing_Data_and_Test_Data_plus_Boundary_Line_for_various_Cs.png






Deep Learning (Regression, Multiple Features/Explanatory Variables, Supervised Learning): Impelementation and Showing Biases and Weights

Deep Learning (Regression, Multiple Features/Explanatory Variables, Supervised Learning): Impelementation and Showing Biases and Weights ...