- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
. H* f b# n3 I4 z0 D7 Iimport matplotlib.pyplot as plt
0 q, c6 N/ ^! Z; A0 }( g, L) r5 ~3 p+ T% s, ?. d- ~
import utilities
3 E# N: v+ L8 _- v, \
) U ]! _! W+ c# Load input data% Z9 B1 M$ J4 L& }- j& n+ o
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'4 N7 |) {- u- e N7 W8 A
X, y = utilities.load_data(input_file)
9 y, }+ j+ @. i; x0 x- p
T% ?) |( o& |. h' l9 s###############################################: e! i0 r8 y/ k# R
# Separate the data into classes based on 'y'+ |% G9 H! e9 D
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])3 g7 g! i. X2 T+ I0 C1 S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]): \( E% y$ U6 n* T% h! B4 F0 G+ d
( K$ |- a6 A6 }. R' r8 r3 e
# Plot the input data5 ?. I6 ^' F c; |
plt.figure()+ _* `, Y& m4 n1 {0 B2 N& u
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')# Y! w9 U! F, P: P, Q+ W
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')2 D+ M% J" n: q/ I" k
plt.title('Input data')( j! l0 S& S, n2 P: s3 s
* I5 c% G- j/ g8 e! o###############################################
0 T5 C% Z4 Z0 B5 A8 Z# Train test split and SVM training" _. Y& b1 _4 s/ r9 I
from sklearn import cross_validation
, ~$ f8 v/ y6 W! p$ {+ ifrom sklearn.svm import SVC! L R/ c& H' A9 V
0 l' B+ Q0 Q! R8 o: [% ]$ k7 e
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
5 O+ w4 m; O9 Y! q( i% Y: c- Y7 e/ S C8 ^- ~% C$ R0 P* R
#params = {'kernel': 'linear'}- a$ ^' Z& g, R0 P+ o) F- r P% J
#params = {'kernel': 'poly', 'degree': 3}' P( U/ N1 f/ ~) g- E' K
params = {'kernel': 'rbf'}
& L* v; z. F, J! Tclassifier = SVC(**params)& s4 ^& w) i j( y* l/ l
classifier.fit(X_train, y_train)9 g1 d8 w4 h- v% m- g3 e
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')" Z6 z+ }" z3 x: p6 }5 z
8 Q: M* Q2 U/ [9 z0 v1 V
y_test_pred = classifier.predict(X_test)
" b4 j: K* [8 a* y8 e, qutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 z, m; f6 y& P; S' c7 E8 ^& `
" m6 |. ?0 T) g) a, P8 E/ Q% q###############################################
' ^$ o8 J! T$ Z) J# Evaluate classifier performance0 ]8 D/ _/ c0 A# m1 Y: y- H* U$ k. W
- k; _9 q) } x
from sklearn.metrics import classification_report. t% p/ `. [7 S& C. x
1 Q6 X2 B& w" E. M4 @) E* W5 d
target_names = ['Class-' + str(int(i)) for i in set(y)]
8 i q- r! c& |. o7 |3 rprint "\n" + "#"*30
' W) C+ n7 R4 f- Fprint "\nClassifier performance on training dataset\n"
* T6 H, b! H: m1 A ]7 ]* f* }: Bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)2 X. R. M' E+ n
print "#"*30 + "\n"$ b3 \& S* @$ {; B$ Y, W+ \5 q
7 G! V* f1 [$ {: c# D* B1 S
print "#"*302 } a/ W0 {/ c- v
print "\nClassification report on test dataset\n"
. m7 A1 S) _# B$ y$ H. y( n nprint classification_report(y_test, y_test_pred, target_names=target_names)" |; \/ ~" y/ }, D) d+ P2 S; U) t2 e
print "#"*30 + "\n"
" j" ~, H1 ]9 ]4 i! _ G
, X" J/ v( z& {8 R |
|