-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathsvm.py
More file actions
71 lines (45 loc) · 1.43 KB
/
svm.py
File metadata and controls
71 lines (45 loc) · 1.43 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Nov 20 15:47:12 2018
@author: sgdantas
"""
import numpy as np
from sklearn import svm
from load_show_data import labels, features
from sklearn.model_selection import train_test_split
empty = np.empty((10,4))
def split_data():
return train_test_split(features,labels,test_size = 0.2)
### SVM with different kernels
def svm_kernels(c,empty):
for j in range(10):
X,X_test,y,y_test = split_data()
kernels = ['linear', 'rbf', 'sigmoid', 'poly']
for i in range(len(kernels)):
clf = svm.SVC(C = c,kernel = kernels[i], gamma = 'scale')
clf.fit(X, np.argmax(y,axis = 1))
score = clf.score(X_test,np.argmax(y_test,axis = 1))
#print(score)
empty[j,i] = score
return empty
## for different degrees for polynomial kernel
#def svm_poly():
#
# deg = [3,5,7,10,15,20]
#
# for a in deg:
# X,X_test,y,y_test = split_data()
# clf = svm.SVC(kernel = 'poly',degree = a)
# clf.fit(X, np.argmax(y,axis = 1))
#
# print(clf.score(X_test,np.argmax(y_test,axis = 1)))
#
#### SVM for different C's
def svm_diff_C():
cs = [0.1,0.25,0.5,1,2,5,10]
for c in cs:
print('using c = %.1f'%c)
results = svm_kernels(c,empty)
print(np.mean(results,axis=0))
print(np.std(results,axis=0))