-
Notifications
You must be signed in to change notification settings - Fork 0
/
proj3_3_classification_tree_simplified.py
80 lines (64 loc) · 2.28 KB
/
proj3_3_classification_tree_simplified.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 16 00:32:28 2021
@author: changai
"""
from proj1_1_load_data import *
from matplotlib.pyplot import figure, plot, xlabel, ylabel, legend, show, boxplot
from scipy.io import loadmat
from sklearn import model_selection, tree
import numpy as np
# y = X[:,9].astype('float')
y = y.squeeze()
# y = np.reshape(y,(244,1))
print(y)
X = X.squeeze()
# X = X[:,range(0,8)].astype(float)
X = X.astype(float)
N, M = X.shape
#normalizing matrix
X = X - np.ones((N,1)) * X.mean(axis=0)
X = X*(1/np.std(X,axis=0))
print(X.shape)
print(X)
# attributeNames = attributeNames1[range(0,8)].tolist()
attributeNames = attributeNames1.tolist()
classNames = classNames
C = len(classNames)
# Tree complexity parameter - constraint on maximum depth
tc = np.arange(2, 21, 1)
# K-fold crossvalidation
K = 10
CV = model_selection.KFold(n_splits=K,shuffle=True)
# Initialize variable
Error_train = np.empty((len(tc),K))
Error_test = np.empty((len(tc),K))
k=0
for train_index, test_index in CV.split(X):
print('Computing CV fold: {0}/{1}..'.format(k+1,K))
# print(len(train_index), len(test_index))
# extract training and test set for current CV fold
X_train, y_train = X[train_index,:], y[train_index]
X_test, y_test = X[test_index,:], y[test_index]
for i, t in enumerate(tc):
# Fit decision tree classifier, Gini split criterion, different pruning levels
dtc = tree.DecisionTreeClassifier(criterion='gini', max_depth=t)
dtc = dtc.fit(X_train,y_train.ravel())
y_est_test = dtc.predict(X_test)
y_est_train = dtc.predict(X_train)
# Evaluate misclassification rate over train/test data (in this CV fold)
misclass_rate_test = np.sum(y_est_test != y_test) / float(len(y_est_test))
misclass_rate_train = np.sum(y_est_train != y_train) / float(len(y_est_train))
Error_test[i,k], Error_train[i,k] = misclass_rate_test, misclass_rate_train
k+=1
f = figure()
boxplot(Error_test.T)
xlabel('Model complexity (max tree depth)')
ylabel('Test error across CV folds, K={0})'.format(K))
f = figure()
plot(tc, Error_train.mean(1))
plot(tc, Error_test.mean(1))
xlabel('Model complexity (max tree depth)')
ylabel('Error (misclassification rate, CV K={0})'.format(K))
legend(['Error_train','Error_test'])
show()