Skip to content
Snippets Groups Projects
Commit e0764cad authored by Bence Juhasz's avatar Bence Juhasz
Browse files

add files

parents
Branches
No related tags found
No related merge requests found
#!/usr/bin/python3
from keras.models import Sequential
from keras.layers import Dense
from numpy import loadtxt
data = loadtxt('data_banknote_authentication.txt', delimiter=',')
inputs = data[:,0:4]
outputs = data[:,4]
model = Sequential()
model.add(Dense(6, input_dim=4, activation='relu'))
model.add(Dense(3, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
model.fit(inputs, outputs, epochs=100, batch_size=10)
_, accuracy = model.evaluate(inputs, outputs)
print(accuracy)
This diff is collapsed.
30,64,1,1
30,62,3,1
30,65,0,1
31,59,2,1
31,65,4,1
33,58,10,1
33,60,0,1
34,59,0,2
34,66,9,2
34,58,30,1
34,60,1,1
34,61,10,1
34,67,7,1
34,60,0,1
35,64,13,1
35,63,0,1
36,60,1,1
36,69,0,1
37,60,0,1
37,63,0,1
37,58,0,1
37,59,6,1
37,60,15,1
37,63,0,1
38,69,21,2
38,59,2,1
38,60,0,1
38,60,0,1
38,62,3,1
38,64,1,1
38,66,0,1
38,66,11,1
38,60,1,1
38,67,5,1
39,66,0,2
39,63,0,1
39,67,0,1
39,58,0,1
39,59,2,1
39,63,4,1
40,58,2,1
40,58,0,1
40,65,0,1
41,60,23,2
41,64,0,2
41,67,0,2
41,58,0,1
41,59,8,1
41,59,0,1
41,64,0,1
41,69,8,1
41,65,0,1
41,65,0,1
42,69,1,2
42,59,0,2
42,58,0,1
42,60,1,1
42,59,2,1
42,61,4,1
42,62,20,1
42,65,0,1
42,63,1,1
43,58,52,2
43,59,2,2
43,64,0,2
43,64,0,2
43,63,14,1
43,64,2,1
43,64,3,1
43,60,0,1
43,63,2,1
43,65,0,1
43,66,4,1
44,64,6,2
44,58,9,2
44,63,19,2
44,61,0,1
44,63,1,1
44,61,0,1
44,67,16,1
45,65,6,2
45,66,0,2
45,67,1,2
45,60,0,1
45,67,0,1
45,59,14,1
45,64,0,1
45,68,0,1
45,67,1,1
46,58,2,2
46,69,3,2
46,62,5,2
46,65,20,2
46,62,0,1
46,58,3,1
46,63,0,1
47,63,23,2
47,62,0,2
47,65,0,2
47,61,0,1
47,63,6,1
47,66,0,1
47,67,0,1
47,58,3,1
47,60,4,1
47,68,4,1
47,66,12,1
48,58,11,2
48,58,11,2
48,67,7,2
48,61,8,1
48,62,2,1
48,64,0,1
48,66,0,1
49,63,0,2
49,64,10,2
49,61,1,1
49,62,0,1
49,66,0,1
49,60,1,1
49,62,1,1
49,63,3,1
49,61,0,1
49,67,1,1
50,63,13,2
50,64,0,2
50,59,0,1
50,61,6,1
50,61,0,1
50,63,1,1
50,58,1,1
50,59,2,1
50,61,0,1
50,64,0,1
50,65,4,1
50,66,1,1
51,59,13,2
51,59,3,2
51,64,7,1
51,59,1,1
51,65,0,1
51,66,1,1
52,69,3,2
52,59,2,2
52,62,3,2
52,66,4,2
52,61,0,1
52,63,4,1
52,69,0,1
52,60,4,1
52,60,5,1
52,62,0,1
52,62,1,1
52,64,0,1
52,65,0,1
52,68,0,1
53,58,4,2
53,65,1,2
53,59,3,2
53,60,9,2
53,63,24,2
53,65,12,2
53,58,1,1
53,60,1,1
53,60,2,1
53,61,1,1
53,63,0,1
54,60,11,2
54,65,23,2
54,65,5,2
54,68,7,2
54,59,7,1
54,60,3,1
54,66,0,1
54,67,46,1
54,62,0,1
54,69,7,1
54,63,19,1
54,58,1,1
54,62,0,1
55,63,6,2
55,68,15,2
55,58,1,1
55,58,0,1
55,58,1,1
55,66,18,1
55,66,0,1
55,69,3,1
55,69,22,1
55,67,1,1
56,65,9,2
56,66,3,2
56,60,0,1
56,66,2,1
56,66,1,1
56,67,0,1
56,60,0,1
57,61,5,2
57,62,14,2
57,64,1,2
57,64,9,1
57,69,0,1
57,61,0,1
57,62,0,1
57,63,0,1
57,64,0,1
57,64,0,1
57,67,0,1
58,59,0,1
58,60,3,1
58,61,1,1
58,67,0,1
58,58,0,1
58,58,3,1
58,61,2,1
59,62,35,2
59,60,0,1
59,63,0,1
59,64,1,1
59,64,4,1
59,64,0,1
59,64,7,1
59,67,3,1
60,59,17,2
60,65,0,2
60,61,1,1
60,67,2,1
60,61,25,1
60,64,0,1
61,62,5,2
61,65,0,2
61,68,1,2
61,59,0,1
61,59,0,1
61,64,0,1
61,65,8,1
61,68,0,1
61,59,0,1
62,59,13,2
62,58,0,2
62,65,19,2
62,62,6,1
62,66,0,1
62,66,0,1
62,58,0,1
63,60,1,2
63,61,0,1
63,62,0,1
63,63,0,1
63,63,0,1
63,66,0,1
63,61,9,1
63,61,28,1
64,58,0,1
64,65,22,1
64,66,0,1
64,61,0,1
64,68,0,1
65,58,0,2
65,61,2,2
65,62,22,2
65,66,15,2
65,58,0,1
65,64,0,1
65,67,0,1
65,59,2,1
65,64,0,1
65,67,1,1
66,58,0,2
66,61,13,2
66,58,0,1
66,58,1,1
66,68,0,1
67,64,8,2
67,63,1,2
67,66,0,1
67,66,0,1
67,61,0,1
67,65,0,1
68,67,0,1
68,68,0,1
69,67,8,2
69,60,0,1
69,65,0,1
69,66,0,1
70,58,0,2
70,58,4,2
70,66,14,1
70,67,0,1
70,68,0,1
70,59,8,1
70,63,0,1
71,68,2,1
72,63,0,2
72,58,0,1
72,64,0,1
72,67,3,1
73,62,0,1
73,68,0,1
74,65,3,2
74,63,0,1
75,62,1,1
76,67,0,1
77,65,3,1
78,65,1,2
83,58,2,2
1. Title: Haberman's Survival Data
2. Sources:
(a) Donor: Tjen-Sien Lim (limt@stat.wisc.edu)
(b) Date: March 4, 1999
3. Past Usage:
1. Haberman, S. J. (1976). Generalized Residuals for Log-Linear
Models, Proceedings of the 9th International Biometrics
Conference, Boston, pp. 104-122.
2. Landwehr, J. M., Pregibon, D., and Shoemaker, A. C. (1984),
Graphical Models for Assessing Logistic Regression Models (with
discussion), Journal of the American Statistical Association 79:
61-83.
3. Lo, W.-D. (1993). Logistic Regression Trees, PhD thesis,
Department of Statistics, University of Wisconsin, Madison, WI.
4. Relevant Information:
The dataset contains cases from a study that was conducted between
1958 and 1970 at the University of Chicago's Billings Hospital on
the survival of patients who had undergone surgery for breast
cancer.
5. Number of Instances: 306
6. Number of Attributes: 4 (including the class attribute)
7. Attribute Information:
1. Age of patient at time of operation (numerical)
2. Patient's year of operation (year - 1900, numerical)
3. Number of positive axillary nodes detected (numerical)
4. Survival status (class attribute)
1 = the patient survived 5 years or longer
2 = the patient died within 5 year
8. Missing Attribute Values: None
#!/usr/bin/python3
from keras.models import Sequential
from keras.layers import Dense
from numpy import loadtxt
import numpy as np
data = loadtxt('haberman.data', delimiter=',')
for i in range(306):
if(data[i,3]==1.0):
data[i,3]=0
else:
data[i,3]=1
inputs = data[:,0:3]
outputs = data[:,3]
model = Sequential()
model.add(Dense(6, input_dim=3, activation='relu'))
model.add(Dense(8, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
model.fit(inputs, outputs, epochs=30, batch_size=10)
_, accuracy = model.evaluate(inputs, outputs)
print(accuracy)
#print(inputs[6:8])
#print(model.predict(np.array(inputs[6:8])))
#print(model.get_weights())
#weights = model.get_weights()
#model.set_weights(weights)
#print(model.get_weights())
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment