parent
ae89e08979
commit
4536ebc6cf
14 changed files with 652 additions and 0 deletions
@ -0,0 +1,27 @@ |
||||
# Machine Learning Sample |
||||
Some machine learnings example for my activity on freeday XD |
||||
Built in with Scikit-learn library |
||||
|
||||
# 6 Algorithms Used on This Project |
||||
1. KNN |
||||
2. Logistic Linear |
||||
3. Decission Tree Classifier |
||||
4. SVM |
||||
5. Gaussian NB |
||||
6. Linear Discrimination Analysis |
||||
|
||||
# 2 Evaluate Models For The Datasets |
||||
1. K Fold CV ( K Fold Cross Validation ) |
||||
2. LOOCV ( Leave One Out Cross Validation ) |
||||
|
||||
# How it choose it's own best algorithm |
||||
There are 2 evaluate models in this project. Each models produce an array that filled up with 6 models and it's accuracy score. So it'll look for the best one and pick it up as the algorithm to use. |
||||
|
||||
# Next Step? |
||||
1. Glass Classification Section |
||||
2. Image Classification with default datasets from scikit-learn itself |
||||
3. Maybe fixed the method of choosing the best algorithm, because sometimes there are 2 algorithms that had same score. So it may use both of them and show it's result. |
||||
|
||||
# Author |
||||
Robby Muhammad Nst |
||||
OrionStark |
@ -0,0 +1,214 @@ |
||||
1,1.52101,13.64,4.49,1.10,71.78,0.06,8.75,0.00,0.00,1 |
||||
2,1.51761,13.89,3.60,1.36,72.73,0.48,7.83,0.00,0.00,1 |
||||
3,1.51618,13.53,3.55,1.54,72.99,0.39,7.78,0.00,0.00,1 |
||||
4,1.51766,13.21,3.69,1.29,72.61,0.57,8.22,0.00,0.00,1 |
||||
5,1.51742,13.27,3.62,1.24,73.08,0.55,8.07,0.00,0.00,1 |
||||
6,1.51596,12.79,3.61,1.62,72.97,0.64,8.07,0.00,0.26,1 |
||||
7,1.51743,13.30,3.60,1.14,73.09,0.58,8.17,0.00,0.00,1 |
||||
8,1.51756,13.15,3.61,1.05,73.24,0.57,8.24,0.00,0.00,1 |
||||
9,1.51918,14.04,3.58,1.37,72.08,0.56,8.30,0.00,0.00,1 |
||||
10,1.51755,13.00,3.60,1.36,72.99,0.57,8.40,0.00,0.11,1 |
||||
11,1.51571,12.72,3.46,1.56,73.20,0.67,8.09,0.00,0.24,1 |
||||
12,1.51763,12.80,3.66,1.27,73.01,0.60,8.56,0.00,0.00,1 |
||||
13,1.51589,12.88,3.43,1.40,73.28,0.69,8.05,0.00,0.24,1 |
||||
14,1.51748,12.86,3.56,1.27,73.21,0.54,8.38,0.00,0.17,1 |
||||
15,1.51763,12.61,3.59,1.31,73.29,0.58,8.50,0.00,0.00,1 |
||||
16,1.51761,12.81,3.54,1.23,73.24,0.58,8.39,0.00,0.00,1 |
||||
17,1.51784,12.68,3.67,1.16,73.11,0.61,8.70,0.00,0.00,1 |
||||
18,1.52196,14.36,3.85,0.89,71.36,0.15,9.15,0.00,0.00,1 |
||||
19,1.51911,13.90,3.73,1.18,72.12,0.06,8.89,0.00,0.00,1 |
||||
20,1.51735,13.02,3.54,1.69,72.73,0.54,8.44,0.00,0.07,1 |
||||
21,1.51750,12.82,3.55,1.49,72.75,0.54,8.52,0.00,0.19,1 |
||||
22,1.51966,14.77,3.75,0.29,72.02,0.03,9.00,0.00,0.00,1 |
||||
23,1.51736,12.78,3.62,1.29,72.79,0.59,8.70,0.00,0.00,1 |
||||
24,1.51751,12.81,3.57,1.35,73.02,0.62,8.59,0.00,0.00,1 |
||||
25,1.51720,13.38,3.50,1.15,72.85,0.50,8.43,0.00,0.00,1 |
||||
26,1.51764,12.98,3.54,1.21,73.00,0.65,8.53,0.00,0.00,1 |
||||
27,1.51793,13.21,3.48,1.41,72.64,0.59,8.43,0.00,0.00,1 |
||||
28,1.51721,12.87,3.48,1.33,73.04,0.56,8.43,0.00,0.00,1 |
||||
29,1.51768,12.56,3.52,1.43,73.15,0.57,8.54,0.00,0.00,1 |
||||
30,1.51784,13.08,3.49,1.28,72.86,0.60,8.49,0.00,0.00,1 |
||||
31,1.51768,12.65,3.56,1.30,73.08,0.61,8.69,0.00,0.14,1 |
||||
32,1.51747,12.84,3.50,1.14,73.27,0.56,8.55,0.00,0.00,1 |
||||
33,1.51775,12.85,3.48,1.23,72.97,0.61,8.56,0.09,0.22,1 |
||||
34,1.51753,12.57,3.47,1.38,73.39,0.60,8.55,0.00,0.06,1 |
||||
35,1.51783,12.69,3.54,1.34,72.95,0.57,8.75,0.00,0.00,1 |
||||
36,1.51567,13.29,3.45,1.21,72.74,0.56,8.57,0.00,0.00,1 |
||||
37,1.51909,13.89,3.53,1.32,71.81,0.51,8.78,0.11,0.00,1 |
||||
38,1.51797,12.74,3.48,1.35,72.96,0.64,8.68,0.00,0.00,1 |
||||
39,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 |
||||
40,1.52213,14.21,3.82,0.47,71.77,0.11,9.57,0.00,0.00,1 |
||||
41,1.51793,12.79,3.50,1.12,73.03,0.64,8.77,0.00,0.00,1 |
||||
42,1.51755,12.71,3.42,1.20,73.20,0.59,8.64,0.00,0.00,1 |
||||
43,1.51779,13.21,3.39,1.33,72.76,0.59,8.59,0.00,0.00,1 |
||||
44,1.52210,13.73,3.84,0.72,71.76,0.17,9.74,0.00,0.00,1 |
||||
45,1.51786,12.73,3.43,1.19,72.95,0.62,8.76,0.00,0.30,1 |
||||
46,1.51900,13.49,3.48,1.35,71.95,0.55,9.00,0.00,0.00,1 |
||||
47,1.51869,13.19,3.37,1.18,72.72,0.57,8.83,0.00,0.16,1 |
||||
48,1.52667,13.99,3.70,0.71,71.57,0.02,9.82,0.00,0.10,1 |
||||
49,1.52223,13.21,3.77,0.79,71.99,0.13,10.02,0.00,0.00,1 |
||||
50,1.51898,13.58,3.35,1.23,72.08,0.59,8.91,0.00,0.00,1 |
||||
51,1.52320,13.72,3.72,0.51,71.75,0.09,10.06,0.00,0.16,1 |
||||
52,1.51926,13.20,3.33,1.28,72.36,0.60,9.14,0.00,0.11,1 |
||||
53,1.51808,13.43,2.87,1.19,72.84,0.55,9.03,0.00,0.00,1 |
||||
54,1.51837,13.14,2.84,1.28,72.85,0.55,9.07,0.00,0.00,1 |
||||
55,1.51778,13.21,2.81,1.29,72.98,0.51,9.02,0.00,0.09,1 |
||||
56,1.51769,12.45,2.71,1.29,73.70,0.56,9.06,0.00,0.24,1 |
||||
57,1.51215,12.99,3.47,1.12,72.98,0.62,8.35,0.00,0.31,1 |
||||
58,1.51824,12.87,3.48,1.29,72.95,0.60,8.43,0.00,0.00,1 |
||||
59,1.51754,13.48,3.74,1.17,72.99,0.59,8.03,0.00,0.00,1 |
||||
60,1.51754,13.39,3.66,1.19,72.79,0.57,8.27,0.00,0.11,1 |
||||
61,1.51905,13.60,3.62,1.11,72.64,0.14,8.76,0.00,0.00,1 |
||||
62,1.51977,13.81,3.58,1.32,71.72,0.12,8.67,0.69,0.00,1 |
||||
63,1.52172,13.51,3.86,0.88,71.79,0.23,9.54,0.00,0.11,1 |
||||
64,1.52227,14.17,3.81,0.78,71.35,0.00,9.69,0.00,0.00,1 |
||||
65,1.52172,13.48,3.74,0.90,72.01,0.18,9.61,0.00,0.07,1 |
||||
66,1.52099,13.69,3.59,1.12,71.96,0.09,9.40,0.00,0.00,1 |
||||
67,1.52152,13.05,3.65,0.87,72.22,0.19,9.85,0.00,0.17,1 |
||||
68,1.52152,13.05,3.65,0.87,72.32,0.19,9.85,0.00,0.17,1 |
||||
69,1.52152,13.12,3.58,0.90,72.20,0.23,9.82,0.00,0.16,1 |
||||
70,1.52300,13.31,3.58,0.82,71.99,0.12,10.17,0.00,0.03,1 |
||||
71,1.51574,14.86,3.67,1.74,71.87,0.16,7.36,0.00,0.12,2 |
||||
72,1.51848,13.64,3.87,1.27,71.96,0.54,8.32,0.00,0.32,2 |
||||
73,1.51593,13.09,3.59,1.52,73.10,0.67,7.83,0.00,0.00,2 |
||||
74,1.51631,13.34,3.57,1.57,72.87,0.61,7.89,0.00,0.00,2 |
||||
75,1.51596,13.02,3.56,1.54,73.11,0.72,7.90,0.00,0.00,2 |
||||
76,1.51590,13.02,3.58,1.51,73.12,0.69,7.96,0.00,0.00,2 |
||||
77,1.51645,13.44,3.61,1.54,72.39,0.66,8.03,0.00,0.00,2 |
||||
78,1.51627,13.00,3.58,1.54,72.83,0.61,8.04,0.00,0.00,2 |
||||
79,1.51613,13.92,3.52,1.25,72.88,0.37,7.94,0.00,0.14,2 |
||||
80,1.51590,12.82,3.52,1.90,72.86,0.69,7.97,0.00,0.00,2 |
||||
81,1.51592,12.86,3.52,2.12,72.66,0.69,7.97,0.00,0.00,2 |
||||
82,1.51593,13.25,3.45,1.43,73.17,0.61,7.86,0.00,0.00,2 |
||||
83,1.51646,13.41,3.55,1.25,72.81,0.68,8.10,0.00,0.00,2 |
||||
84,1.51594,13.09,3.52,1.55,72.87,0.68,8.05,0.00,0.09,2 |
||||
85,1.51409,14.25,3.09,2.08,72.28,1.10,7.08,0.00,0.00,2 |
||||
86,1.51625,13.36,3.58,1.49,72.72,0.45,8.21,0.00,0.00,2 |
||||
87,1.51569,13.24,3.49,1.47,73.25,0.38,8.03,0.00,0.00,2 |
||||
88,1.51645,13.40,3.49,1.52,72.65,0.67,8.08,0.00,0.10,2 |
||||
89,1.51618,13.01,3.50,1.48,72.89,0.60,8.12,0.00,0.00,2 |
||||
90,1.51640,12.55,3.48,1.87,73.23,0.63,8.08,0.00,0.09,2 |
||||
91,1.51841,12.93,3.74,1.11,72.28,0.64,8.96,0.00,0.22,2 |
||||
92,1.51605,12.90,3.44,1.45,73.06,0.44,8.27,0.00,0.00,2 |
||||
93,1.51588,13.12,3.41,1.58,73.26,0.07,8.39,0.00,0.19,2 |
||||
94,1.51590,13.24,3.34,1.47,73.10,0.39,8.22,0.00,0.00,2 |
||||
95,1.51629,12.71,3.33,1.49,73.28,0.67,8.24,0.00,0.00,2 |
||||
96,1.51860,13.36,3.43,1.43,72.26,0.51,8.60,0.00,0.00,2 |
||||
97,1.51841,13.02,3.62,1.06,72.34,0.64,9.13,0.00,0.15,2 |
||||
98,1.51743,12.20,3.25,1.16,73.55,0.62,8.90,0.00,0.24,2 |
||||
99,1.51689,12.67,2.88,1.71,73.21,0.73,8.54,0.00,0.00,2 |
||||
100,1.51811,12.96,2.96,1.43,72.92,0.60,8.79,0.14,0.00,2 |
||||
101,1.51655,12.75,2.85,1.44,73.27,0.57,8.79,0.11,0.22,2 |
||||
102,1.51730,12.35,2.72,1.63,72.87,0.70,9.23,0.00,0.00,2 |
||||
103,1.51820,12.62,2.76,0.83,73.81,0.35,9.42,0.00,0.20,2 |
||||
104,1.52725,13.80,3.15,0.66,70.57,0.08,11.64,0.00,0.00,2 |
||||
105,1.52410,13.83,2.90,1.17,71.15,0.08,10.79,0.00,0.00,2 |
||||
106,1.52475,11.45,0.00,1.88,72.19,0.81,13.24,0.00,0.34,2 |
||||
107,1.53125,10.73,0.00,2.10,69.81,0.58,13.30,3.15,0.28,2 |
||||
108,1.53393,12.30,0.00,1.00,70.16,0.12,16.19,0.00,0.24,2 |
||||
109,1.52222,14.43,0.00,1.00,72.67,0.10,11.52,0.00,0.08,2 |
||||
110,1.51818,13.72,0.00,0.56,74.45,0.00,10.99,0.00,0.00,2 |
||||
111,1.52664,11.23,0.00,0.77,73.21,0.00,14.68,0.00,0.00,2 |
||||
112,1.52739,11.02,0.00,0.75,73.08,0.00,14.96,0.00,0.00,2 |
||||
113,1.52777,12.64,0.00,0.67,72.02,0.06,14.40,0.00,0.00,2 |
||||
114,1.51892,13.46,3.83,1.26,72.55,0.57,8.21,0.00,0.14,2 |
||||
115,1.51847,13.10,3.97,1.19,72.44,0.60,8.43,0.00,0.00,2 |
||||
116,1.51846,13.41,3.89,1.33,72.38,0.51,8.28,0.00,0.00,2 |
||||
117,1.51829,13.24,3.90,1.41,72.33,0.55,8.31,0.00,0.10,2 |
||||
118,1.51708,13.72,3.68,1.81,72.06,0.64,7.88,0.00,0.00,2 |
||||
119,1.51673,13.30,3.64,1.53,72.53,0.65,8.03,0.00,0.29,2 |
||||
120,1.51652,13.56,3.57,1.47,72.45,0.64,7.96,0.00,0.00,2 |
||||
121,1.51844,13.25,3.76,1.32,72.40,0.58,8.42,0.00,0.00,2 |
||||
122,1.51663,12.93,3.54,1.62,72.96,0.64,8.03,0.00,0.21,2 |
||||
123,1.51687,13.23,3.54,1.48,72.84,0.56,8.10,0.00,0.00,2 |
||||
124,1.51707,13.48,3.48,1.71,72.52,0.62,7.99,0.00,0.00,2 |
||||
125,1.52177,13.20,3.68,1.15,72.75,0.54,8.52,0.00,0.00,2 |
||||
126,1.51872,12.93,3.66,1.56,72.51,0.58,8.55,0.00,0.12,2 |
||||
127,1.51667,12.94,3.61,1.26,72.75,0.56,8.60,0.00,0.00,2 |
||||
128,1.52081,13.78,2.28,1.43,71.99,0.49,9.85,0.00,0.17,2 |
||||
129,1.52068,13.55,2.09,1.67,72.18,0.53,9.57,0.27,0.17,2 |
||||
130,1.52020,13.98,1.35,1.63,71.76,0.39,10.56,0.00,0.18,2 |
||||
131,1.52177,13.75,1.01,1.36,72.19,0.33,11.14,0.00,0.00,2 |
||||
132,1.52614,13.70,0.00,1.36,71.24,0.19,13.44,0.00,0.10,2 |
||||
133,1.51813,13.43,3.98,1.18,72.49,0.58,8.15,0.00,0.00,2 |
||||
134,1.51800,13.71,3.93,1.54,71.81,0.54,8.21,0.00,0.15,2 |
||||
135,1.51811,13.33,3.85,1.25,72.78,0.52,8.12,0.00,0.00,2 |
||||
136,1.51789,13.19,3.90,1.30,72.33,0.55,8.44,0.00,0.28,2 |
||||
137,1.51806,13.00,3.80,1.08,73.07,0.56,8.38,0.00,0.12,2 |
||||
138,1.51711,12.89,3.62,1.57,72.96,0.61,8.11,0.00,0.00,2 |
||||
139,1.51674,12.79,3.52,1.54,73.36,0.66,7.90,0.00,0.00,2 |
||||
140,1.51674,12.87,3.56,1.64,73.14,0.65,7.99,0.00,0.00,2 |
||||
141,1.51690,13.33,3.54,1.61,72.54,0.68,8.11,0.00,0.00,2 |
||||
142,1.51851,13.20,3.63,1.07,72.83,0.57,8.41,0.09,0.17,2 |
||||
143,1.51662,12.85,3.51,1.44,73.01,0.68,8.23,0.06,0.25,2 |
||||
144,1.51709,13.00,3.47,1.79,72.72,0.66,8.18,0.00,0.00,2 |
||||
145,1.51660,12.99,3.18,1.23,72.97,0.58,8.81,0.00,0.24,2 |
||||
146,1.51839,12.85,3.67,1.24,72.57,0.62,8.68,0.00,0.35,2 |
||||
147,1.51769,13.65,3.66,1.11,72.77,0.11,8.60,0.00,0.00,3 |
||||
148,1.51610,13.33,3.53,1.34,72.67,0.56,8.33,0.00,0.00,3 |
||||
149,1.51670,13.24,3.57,1.38,72.70,0.56,8.44,0.00,0.10,3 |
||||
150,1.51643,12.16,3.52,1.35,72.89,0.57,8.53,0.00,0.00,3 |
||||
151,1.51665,13.14,3.45,1.76,72.48,0.60,8.38,0.00,0.17,3 |
||||
152,1.52127,14.32,3.90,0.83,71.50,0.00,9.49,0.00,0.00,3 |
||||
153,1.51779,13.64,3.65,0.65,73.00,0.06,8.93,0.00,0.00,3 |
||||
154,1.51610,13.42,3.40,1.22,72.69,0.59,8.32,0.00,0.00,3 |
||||
155,1.51694,12.86,3.58,1.31,72.61,0.61,8.79,0.00,0.00,3 |
||||
156,1.51646,13.04,3.40,1.26,73.01,0.52,8.58,0.00,0.00,3 |
||||
157,1.51655,13.41,3.39,1.28,72.64,0.52,8.65,0.00,0.00,3 |
||||
158,1.52121,14.03,3.76,0.58,71.79,0.11,9.65,0.00,0.00,3 |
||||
159,1.51776,13.53,3.41,1.52,72.04,0.58,8.79,0.00,0.00,3 |
||||
160,1.51796,13.50,3.36,1.63,71.94,0.57,8.81,0.00,0.09,3 |
||||
161,1.51832,13.33,3.34,1.54,72.14,0.56,8.99,0.00,0.00,3 |
||||
162,1.51934,13.64,3.54,0.75,72.65,0.16,8.89,0.15,0.24,3 |
||||
163,1.52211,14.19,3.78,0.91,71.36,0.23,9.14,0.00,0.37,3 |
||||
164,1.51514,14.01,2.68,3.50,69.89,1.68,5.87,2.20,0.00,5 |
||||
165,1.51915,12.73,1.85,1.86,72.69,0.60,10.09,0.00,0.00,5 |
||||
166,1.52171,11.56,1.88,1.56,72.86,0.47,11.41,0.00,0.00,5 |
||||
167,1.52151,11.03,1.71,1.56,73.44,0.58,11.62,0.00,0.00,5 |
||||
168,1.51969,12.64,0.00,1.65,73.75,0.38,11.53,0.00,0.00,5 |
||||
169,1.51666,12.86,0.00,1.83,73.88,0.97,10.17,0.00,0.00,5 |
||||
170,1.51994,13.27,0.00,1.76,73.03,0.47,11.32,0.00,0.00,5 |
||||
171,1.52369,13.44,0.00,1.58,72.22,0.32,12.24,0.00,0.00,5 |
||||
172,1.51316,13.02,0.00,3.04,70.48,6.21,6.96,0.00,0.00,5 |
||||
173,1.51321,13.00,0.00,3.02,70.70,6.21,6.93,0.00,0.00,5 |
||||
174,1.52043,13.38,0.00,1.40,72.25,0.33,12.50,0.00,0.00,5 |
||||
175,1.52058,12.85,1.61,2.17,72.18,0.76,9.70,0.24,0.51,5 |
||||
176,1.52119,12.97,0.33,1.51,73.39,0.13,11.27,0.00,0.28,5 |
||||
177,1.51905,14.00,2.39,1.56,72.37,0.00,9.57,0.00,0.00,6 |
||||
178,1.51937,13.79,2.41,1.19,72.76,0.00,9.77,0.00,0.00,6 |
||||
179,1.51829,14.46,2.24,1.62,72.38,0.00,9.26,0.00,0.00,6 |
||||
180,1.51852,14.09,2.19,1.66,72.67,0.00,9.32,0.00,0.00,6 |
||||
181,1.51299,14.40,1.74,1.54,74.55,0.00,7.59,0.00,0.00,6 |
||||
182,1.51888,14.99,0.78,1.74,72.50,0.00,9.95,0.00,0.00,6 |
||||
183,1.51916,14.15,0.00,2.09,72.74,0.00,10.88,0.00,0.00,6 |
||||
184,1.51969,14.56,0.00,0.56,73.48,0.00,11.22,0.00,0.00,6 |
||||
185,1.51115,17.38,0.00,0.34,75.41,0.00,6.65,0.00,0.00,6 |
||||
186,1.51131,13.69,3.20,1.81,72.81,1.76,5.43,1.19,0.00,7 |
||||
187,1.51838,14.32,3.26,2.22,71.25,1.46,5.79,1.63,0.00,7 |
||||
188,1.52315,13.44,3.34,1.23,72.38,0.60,8.83,0.00,0.00,7 |
||||
189,1.52247,14.86,2.20,2.06,70.26,0.76,9.76,0.00,0.00,7 |
||||
190,1.52365,15.79,1.83,1.31,70.43,0.31,8.61,1.68,0.00,7 |
||||
191,1.51613,13.88,1.78,1.79,73.10,0.00,8.67,0.76,0.00,7 |
||||
192,1.51602,14.85,0.00,2.38,73.28,0.00,8.76,0.64,0.09,7 |
||||
193,1.51623,14.20,0.00,2.79,73.46,0.04,9.04,0.40,0.09,7 |
||||
194,1.51719,14.75,0.00,2.00,73.02,0.00,8.53,1.59,0.08,7 |
||||
195,1.51683,14.56,0.00,1.98,73.29,0.00,8.52,1.57,0.07,7 |
||||
196,1.51545,14.14,0.00,2.68,73.39,0.08,9.07,0.61,0.05,7 |
||||
197,1.51556,13.87,0.00,2.54,73.23,0.14,9.41,0.81,0.01,7 |
||||
198,1.51727,14.70,0.00,2.34,73.28,0.00,8.95,0.66,0.00,7 |
||||
199,1.51531,14.38,0.00,2.66,73.10,0.04,9.08,0.64,0.00,7 |
||||
200,1.51609,15.01,0.00,2.51,73.05,0.05,8.83,0.53,0.00,7 |
||||
201,1.51508,15.15,0.00,2.25,73.50,0.00,8.34,0.63,0.00,7 |
||||
202,1.51653,11.95,0.00,1.19,75.18,2.70,8.93,0.00,0.00,7 |
||||
203,1.51514,14.85,0.00,2.42,73.72,0.00,8.39,0.56,0.00,7 |
||||
204,1.51658,14.80,0.00,1.99,73.11,0.00,8.28,1.71,0.00,7 |
||||
205,1.51617,14.95,0.00,2.27,73.30,0.00,8.71,0.67,0.00,7 |
||||
206,1.51732,14.95,0.00,1.80,72.99,0.00,8.61,1.55,0.00,7 |
||||
207,1.51645,14.94,0.00,1.87,73.11,0.00,8.67,1.38,0.00,7 |
||||
208,1.51831,14.39,0.00,1.82,72.86,1.41,6.47,2.88,0.00,7 |
||||
209,1.51640,14.37,0.00,2.74,72.85,0.00,9.45,0.54,0.00,7 |
||||
210,1.51623,14.14,0.00,2.88,72.61,0.08,9.18,1.06,0.00,7 |
||||
211,1.51685,14.92,0.00,1.99,73.06,0.00,8.40,1.59,0.00,7 |
||||
212,1.52065,14.36,0.00,2.02,73.42,0.00,8.44,1.64,0.00,7 |
||||
213,1.51651,14.38,0.00,1.94,73.61,0.00,8.48,1.57,0.00,7 |
||||
214,1.51711,14.23,0.00,2.08,73.36,0.00,8.62,1.67,0.00,7 |
@ -0,0 +1,94 @@ |
||||
1. Title: Glass Identification Database |
||||
|
||||
2. Sources: |
||||
(a) Creator: B. German |
||||
-- Central Research Establishment |
||||
Home Office Forensic Science Service |
||||
Aldermaston, Reading, Berkshire RG7 4PN |
||||
(b) Donor: Vina Spiehler, Ph.D., DABFT |
||||
Diagnostic Products Corporation |
||||
(213) 776-0180 (ext 3014) |
||||
(c) Date: September, 1987 |
||||
|
||||
3. Past Usage: |
||||
-- Rule Induction in Forensic Science |
||||
-- Ian W. Evett and Ernest J. Spiehler |
||||
-- Central Research Establishment |
||||
Home Office Forensic Science Service |
||||
Aldermaston, Reading, Berkshire RG7 4PN |
||||
-- Unknown technical note number (sorry, not listed here) |
||||
-- General Results: nearest neighbor held its own with respect to the |
||||
rule-based system |
||||
|
||||
4. Relevant Information:n |
||||
Vina conducted a comparison test of her rule-based system, BEAGLE, the |
||||
nearest-neighbor algorithm, and discriminant analysis. BEAGLE is |
||||
a product available through VRS Consulting, Inc.; 4676 Admiralty Way, |
||||
Suite 206; Marina Del Ray, CA 90292 (213) 827-7890 and FAX: -3189. |
||||
In determining whether the glass was a type of "float" glass or not, |
||||
the following results were obtained (# incorrect answers): |
||||
|
||||
Type of Sample Beagle NN DA |
||||
Windows that were float processed (87) 10 12 21 |
||||
Windows that were not: (76) 19 16 22 |
||||
|
||||
The study of classification of types of glass was motivated by |
||||
criminological investigation. At the scene of the crime, the glass left |
||||
can be used as evidence...if it is correctly identified! |
||||
|
||||
5. Number of Instances: 214 |
||||
|
||||
6. Number of Attributes: 10 (including an Id#) plus the class attribute |
||||
-- all attributes are continuously valued |
||||
|
||||
7. Attribute Information: |
||||
1. Id number: 1 to 214 |
||||
2. RI: refractive index |
||||
3. Na: Sodium (unit measurement: weight percent in corresponding oxide, as |
||||
are attributes 4-10) |
||||
4. Mg: Magnesium |
||||
5. Al: Aluminum |
||||
6. Si: Silicon |
||||
7. K: Potassium |
||||
8. Ca: Calcium |
||||
9. Ba: Barium |
||||
10. Fe: Iron |
||||
11. Type of glass: (class attribute) |
||||
-- 1 building_windows_float_processed |
||||
-- 2 building_windows_non_float_processed |
||||
-- 3 vehicle_windows_float_processed |
||||
-- 4 vehicle_windows_non_float_processed (none in this database) |
||||
-- 5 containers |
||||
-- 6 tableware |
||||
-- 7 headlamps |
||||
|
||||
8. Missing Attribute Values: None |
||||
|
||||
Summary Statistics: |
||||
Attribute: Min Max Mean SD Correlation with class |
||||
2. RI: 1.5112 1.5339 1.5184 0.0030 -0.1642 |
||||
3. Na: 10.73 17.38 13.4079 0.8166 0.5030 |
||||
4. Mg: 0 4.49 2.6845 1.4424 -0.7447 |
||||
5. Al: 0.29 3.5 1.4449 0.4993 0.5988 |
||||
6. Si: 69.81 75.41 72.6509 0.7745 0.1515 |
||||
7. K: 0 6.21 0.4971 0.6522 -0.0100 |
||||
8. Ca: 5.43 16.19 8.9570 1.4232 0.0007 |
||||
9. Ba: 0 3.15 0.1750 0.4972 0.5751 |
||||
10. Fe: 0 0.51 0.0570 0.0974 -0.1879 |
||||
|
||||
9. Class Distribution: (out of 214 total instances) |
||||
-- 163 Window glass (building windows and vehicle windows) |
||||
-- 87 float processed |
||||
-- 70 building windows |
||||
-- 17 vehicle windows |
||||
-- 76 non-float processed |
||||
-- 76 building windows |
||||
-- 0 vehicle windows |
||||
-- 51 Non-window glass |
||||
-- 13 containers |
||||
-- 9 tableware |
||||
-- 29 headlamps |
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,50 @@ |
||||
# MIT License |
||||
|
||||
# Copyright (c) 2018 Robby Muhammad Nst |
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
||||
# of this software and associated documentation files (the "Software"), to deal |
||||
# in the Software without restriction, including without limitation the rights |
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
||||
# copies of the Software, and to permit persons to whom the Software is |
||||
# furnished to do so, subject to the following conditions: |
||||
|
||||
# The above copyright notice and this permission notice shall be included in all |
||||
# copies or substantial portions of the Software. |
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
||||
# SOFTWARE. |
||||
|
||||
import pandas |
||||
import numpy |
||||
from pandas.plotting import scatter_matrix |
||||
import matplotlib.pyplot as plt |
||||
import sys |
||||
from sklearn import model_selection |
||||
from sklearn.metrics import classification_report |
||||
from sklearn.metrics import confusion_matrix |
||||
from sklearn.metrics import accuracy_score |
||||
from sklearn.linear_model import LogisticRegression |
||||
from sklearn.tree import DecisionTreeClassifier |
||||
from sklearn.neighbors import KNeighborsClassifier |
||||
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis |
||||
from sklearn.naive_bayes import GaussianNB |
||||
from sklearn.svm import SVC |
||||
sys.path.append('..') |
||||
from utils.models_evaluation import ModelsEvaluation |
||||
from utils.model import Model |
||||
|
||||
# Load the dataset |
||||
path = "../datasets/Glass Dataset/glass.data" |
||||
names = ['id', 'RI', 'Na', 'Mg', 'Al', 'Si', 'K', 'Ca', 'Ba', 'Fe', 'class'] |
||||
dataset = pandas.read_csv(path, names = names) |
||||
array = dataset.values |
||||
X = array[:, 1:9] |
||||
Y = array[:, 10] |
||||
print(X) |
||||
print(Y) |
@ -0,0 +1,28 @@ |
||||
# MIT License |
||||
|
||||
# Copyright (c) 2018 Robby Muhammad Nst |
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
||||
# of this software and associated documentation files (the "Software"), to deal |
||||
# in the Software without restriction, including without limitation the rights |
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
||||
# copies of the Software, and to permit persons to whom the Software is |
||||
# furnished to do so, subject to the following conditions: |
||||
|
||||
# The above copyright notice and this permission notice shall be included in all |
||||
# copies or substantial portions of the Software. |
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
||||
# SOFTWARE. |
||||
|
||||
from sklearn.datasets import load_digits |
||||
import matplotlib.pyplot as plt |
||||
digits = load_digits() |
||||
plt.gray() |
||||
plt.matshow(digits.images[0]) |
||||
plt.show() |
@ -0,0 +1,54 @@ |
||||
import pandas |
||||
import numpy |
||||
from pandas.plotting import scatter_matrix |
||||
import matplotlib.pyplot as plt |
||||
import sys |
||||
from sklearn import model_selection |
||||
from sklearn.metrics import classification_report |
||||
from sklearn.metrics import confusion_matrix |
||||
from sklearn.metrics import accuracy_score |
||||
from sklearn.linear_model import LogisticRegression |
||||
from sklearn.tree import DecisionTreeClassifier |
||||
from sklearn.neighbors import KNeighborsClassifier |
||||
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis |
||||
from sklearn.naive_bayes import GaussianNB |
||||
from sklearn.svm import SVC |
||||
sys.path.append('..') |
||||
from utils.models_evaluation import ModelsEvaluation |
||||
from utils.model import Model |
||||
|
||||
# Load the dataset |
||||
url = "https://archive.ics.uci.edu/ml/machine-learning-databases/iris/iris.data" |
||||
names = ['sepal-length', 'sepal-width', 'petal-length', 'petal-width', 'class'] |
||||
dataset = pandas.read_csv(url, names=names) |
||||
|
||||
array = dataset.values |
||||
X = array[:,0:4] |
||||
Y = array[:,4] |
||||
validation_size = 0.20 |
||||
seed = 7 |
||||
X_train, X_validation, Y_train, Y_validation = model_selection.train_test_split(X, Y, test_size = validation_size, random_state = seed) |
||||
|
||||
MODEL_EVALUATION = ModelsEvaluation(X, Y) |
||||
HIGHEST_ACC_MODEL = MODEL_EVALUATION.evaluateAccuracy().name |
||||
HIGHEST_ACC_MODEL_SCORE = MODEL_EVALUATION.evaluateAccuracy().mean |
||||
HIGHEST_MODEL_INSTANCE = Model.getAlgorithmModel(HIGHEST_ACC_MODEL) |
||||
HIGHEST_MODEL_INSTANCE.fit(X, Y) |
||||
|
||||
# Predict data included sepal-length(cm), sepal-width(cm), petal-length(cm), petal-width(cm) |
||||
predict_data = [ |
||||
[5.7, 2.8, 4.1, 1.3], |
||||
[5.8, 2.7, 5.1, 1.9], |
||||
[7.7, 3.0, 6.1, 2.3] |
||||
] |
||||
prediction = HIGHEST_MODEL_INSTANCE.predict(predict_data) |
||||
|
||||
if ( prediction is not None ): |
||||
print("Match Algorithm is " + HIGHEST_ACC_MODEL) |
||||
print("Confident : ", HIGHEST_ACC_MODEL_SCORE * 100) |
||||
print(prediction) |
||||
dataset.plot(kind='box', subplots=True, layout=(2,2), sharex=False, sharey=False) |
||||
scatter_matrix(dataset) |
||||
plt.show() |
||||
else: |
||||
print("Couldn't find the match algorithm") |
@ -0,0 +1,21 @@ |
||||
# MIT License |
||||
|
||||
# Copyright (c) 2018 Robby Muhammad Nst |
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
||||
# of this software and associated documentation files (the "Software"), to deal |
||||
# in the Software without restriction, including without limitation the rights |
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
||||
# copies of the Software, and to permit persons to whom the Software is |
||||
# furnished to do so, subject to the following conditions: |
||||
|
||||
# The above copyright notice and this permission notice shall be included in all |
||||
# copies or substantial portions of the Software. |
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
||||
# SOFTWARE. |
Binary file not shown.
@ -0,0 +1,63 @@ |
||||
# MIT License |
||||
|
||||
# Copyright (c) 2018 Robby Muhammad Nst |
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
||||
# of this software and associated documentation files (the "Software"), to deal |
||||
# in the Software without restriction, including without limitation the rights |
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
||||
# copies of the Software, and to permit persons to whom the Software is |
||||
# furnished to do so, subject to the following conditions: |
||||
|
||||
# The above copyright notice and this permission notice shall be included in all |
||||
# copies or substantial portions of the Software. |
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
||||
# SOFTWARE. |
||||
|
||||
from sklearn.linear_model import LogisticRegression |
||||
from sklearn.tree import DecisionTreeClassifier |
||||
from sklearn.neighbors import KNeighborsClassifier |
||||
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis |
||||
from sklearn.naive_bayes import GaussianNB |
||||
from sklearn.svm import SVC |
||||
|
||||
class Model: |
||||
models = [] |
||||
|
||||
def __init__(self, name, mean, std): |
||||
""" Model constructor """ |
||||
self.name = name |
||||
self.mean = mean |
||||
self.std = std |
||||
|
||||
@staticmethod |
||||
def getHighestScore(): |
||||
""" Get the highest score algorithm method """ |
||||
__score = [] |
||||
__results = [] |
||||
for model in Model.models: |
||||
__score.append(model.mean) |
||||
__score.sort() |
||||
for i in range(len(Model.models)): |
||||
if Model.models[i].mean == __score[len(__score) - 1]: |
||||
__results.append(Model.models[i]) |
||||
return __results |
||||
|
||||
@staticmethod |
||||
def getAlgorithmModel(name): |
||||
""" Generate the result from getHighestScore name attribute """ |
||||
return { |
||||
'Logistic Regression' : LogisticRegression(), |
||||
'Linear Discrimination Analysis' : LinearDiscriminantAnalysis(), |
||||
'DecissionTreeClassifier' : DecisionTreeClassifier(), |
||||
'SVM' : SVC(), |
||||
'Gaussian NB' : GaussianNB(), |
||||
'K NeighborsClassifier' : KNeighborsClassifier() |
||||
}.get(name, None) |
||||
|
Binary file not shown.
@ -0,0 +1,101 @@ |
||||
# MIT License |
||||
|
||||
# Copyright (c) 2018 Robby Muhammad Nst |
||||
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy |
||||
# of this software and associated documentation files (the "Software"), to deal |
||||
# in the Software without restriction, including without limitation the rights |
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell |
||||
# copies of the Software, and to permit persons to whom the Software is |
||||
# furnished to do so, subject to the following conditions: |
||||
|
||||
# The above copyright notice and this permission notice shall be included in all |
||||
# copies or substantial portions of the Software. |
||||
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, |
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE |
||||
# SOFTWARE. |
||||
|
||||
import pandas |
||||
from pandas.plotting import scatter_matrix |
||||
import matplotlib.pyplot as plt |
||||
from sklearn import model_selection |
||||
from sklearn.metrics import classification_report |
||||
from sklearn.metrics import confusion_matrix |
||||
from sklearn.metrics import accuracy_score |
||||
from sklearn.linear_model import LogisticRegression |
||||
from sklearn.tree import DecisionTreeClassifier |
||||
from sklearn.neighbors import KNeighborsClassifier |
||||
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis |
||||
from sklearn.naive_bayes import GaussianNB |
||||
from sklearn.svm import SVC |
||||
from model import Model |
||||
|
||||
class ModelsEvaluation: |
||||
__models = [] |
||||
def __init__(self, x_train, y_train): |
||||
|
||||
""" Models Evaluation Constructor |
||||
Initiallize 6 models """ |
||||
|
||||
self.x_train = x_train |
||||
self.y_train = y_train |
||||
|
||||
# 6 default algorith models |
||||
# Maybe I will add more algorithm in the future |
||||
self.__models.append(('Logistic Regression', LogisticRegression())) |
||||
self.__models.append(('Linear Discrimination Analysis', LinearDiscriminantAnalysis())) |
||||
self.__models.append(('DecissionTreeClassifier', DecisionTreeClassifier())) |
||||
self.__models.append(('SVM', SVC())) |
||||
self.__models.append(('Gaussian NB', GaussianNB())) |
||||
self.__models.append(('K NeighborsClassifier', KNeighborsClassifier())) |
||||
|
||||
# Evaluate the accuracy of 6 models |
||||
# K fold Validation model |
||||
def evaluateAccuracy(self): |
||||
Model.models[:] = [] |
||||
|
||||
""" Evalueate the accuracy model by given data train. |
||||
So it could get the best Algorithm to use """ |
||||
|
||||
__results = [] |
||||
__names = [] |
||||
for name, model in self.__models: |
||||
kfold = model_selection.KFold(n_splits = 10, random_state = 7) |
||||
cv_results = model_selection.cross_val_score(model, self.x_train, self.y_train, cv = kfold, scoring = 'accuracy') |
||||
__results.append(cv_results) |
||||
__names.append(name) |
||||
Model.models.append(Model(name, cv_results.mean(), cv_results.std())) |
||||
|
||||
# NOT IMPLEMENTED YET |
||||
# |
||||
# if ( len(Model.getHighestScore()) > 1 ): |
||||
# if (Model.getHighestScore()[0].mean == Model.getHighestScore()[1].mean): |
||||
# return Model.getHighestScore()[0] |
||||
# else: |
||||
# return Model.getHighestScore() |
||||
# else: |
||||
# return Model.getHighestScore()[0] |
||||
|
||||
return Model.getHighestScore()[0] |
||||
|
||||
# LOOCV Validation Model |
||||
def leaveOneOutCrossValidationEvaluation(self): |
||||
Model.models[:] = [] |
||||
|
||||
__results = [] |
||||
__names = [] |
||||
__looCrossValidation = model_selection.LeaveOneOut() |
||||
for name, model in self.__models: |
||||
cv_results = model_selection.cross_val_score(model, self.x_train, self.y_train, cv = __looCrossValidation) |
||||
__results.append(cv_results) |
||||
__names.append(name) |
||||
Model.models.append(Model(name, cv_results.mean(), cv_results.std())) |
||||
|
||||
return Model.getHighestScore()[0] |
||||
|
||||
|
Binary file not shown.
Loading…
Reference in new issue