import sklearn
from sklearn import svm
#### SVC with rbf kernel
# default is rbf kernel
clf = svm.SVC()
x_data = [[0,0], [0,1], [1,0], [1,1]]
# linear
## And gate
y_data = [0, 0, 0, 1]
clf.fit(x_data, y_data)
# SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
# decision_function_shape=None, degree=3, gamma='auto', kernel='rbf',
# max_iter=-1, probability=False, random_state=None, shrinking=True,
# tol=0.001, verbose=False)
clf.predict(x_data)
# array([0, 0, 0, 0]) # wrong
## Or gate
y_data = [0, 1, 1, 1]
clf.fit(x_data, y_data)
clf.predict(x_data)
# array([1, 1, 1, 1])
# non-linear
## Xor gate
y_data = [0, 1, 1, 0]
clf.fit(x_data, y_data)
clf.predict(x_data)
# array([0, 1, 1, 0]) # Correct answer
#### SVC with Linear kernel
clf = svm.SVC(kernel='linear')
clf.fit(x_data, y_data)
# SVC(C=1.0, cache_size=200, class_weight=None, coef0=0.0,
# decision_function_shape=None, degree=3, gamma='auto', kernel='linear',
# max_iter=-1, probability=False, random_state=None, shrinking=True,
# tol=0.001, verbose=False)
clf.predict(x_data)
# array([0, 0, 0, 0])
#### LinearSVC
clf = svm.LinearSVC()
clf.fit(x_data, y_data)
# LinearSVC(C=1.0, class_weight=None, dual=True, fit_intercept=True,
# intercept_scaling=1, loss='squared_hinge', max_iter=1000,
# multi_class='ovr', penalty='l2', random_state=None, tol=0.0001,
# verbose=0)
clf.predict(x_data)
# array([0, 0, 0, 1]) # Correct answer
* non-linear는 분류하지만 linear는 분류하지 못하는 SVC?
- multi-class에서 분류 시에 voter의 결과에 따라 분류됨
- 결과에 0이 많으면(And gate) 모두 0이 되고
y_data = [0, 0, 0, 1]면 결과가 [0, 0, 0, 0]
- 1이 더 많은 경우(Or gate) 학습 결과가 다음과 같음
y_data = [0, 1, 1, 1]면 결과가 [1, 1, 1, 1]
* SVC와
LinearSVC의 차이란?