- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np! G; |0 n( ?0 J& Y4 Q( |
import matplotlib.pyplot as plt
$ y2 s8 @) h) _$ y$ a7 Z6 N1 {1 J
import utilities
- V2 r3 o+ C$ d* s6 A9 N% W
I6 |! l* T2 b. B1 j8 x# Load input data
3 J# F7 V4 c4 w3 u* e9 zinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
! k1 U. x0 y$ s& e4 ?' CX, y = utilities.load_data(input_file)( B) K" \3 Z) y& f( `4 f
, J- M8 t7 m& x; ?###############################################
- c( C! f3 x' Q) u# Separate the data into classes based on 'y'
9 U0 u% v8 k* g4 _" {class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])0 r- _8 u# p, |: x
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
' s3 W4 S& `( ^9 `, w( }- N1 {1 b8 r5 J
# Plot the input data
$ P) u' E$ |5 f: @( s# l4 G1 q, Rplt.figure()
5 r+ v0 r6 D2 q) u/ x, E+ c$ dplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
! \# c+ S3 `; Y- jplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
) E0 F9 X9 q! u. L' G; N# b% t+ v2 Lplt.title('Input data')
( S g% `+ ^; z
! q/ f! } L1 j& V###############################################
. j! r" s" y* Q# Train test split and SVM training. Y2 ]$ F1 w. V( {2 e
from sklearn import cross_validation
+ a7 F: o8 I+ z( m$ U3 gfrom sklearn.svm import SVC. q) r. o4 A5 l% S1 X5 ?
0 J. E6 o8 ]% o/ M6 x
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
$ [8 t0 F% H* A# P1 m# l, J
* ^# r6 ^( K6 L#params = {'kernel': 'linear'}! z7 r/ H W6 d; E" R9 D& Y9 y
#params = {'kernel': 'poly', 'degree': 3}
7 \5 s4 G8 L& o* ^params = {'kernel': 'rbf'}+ O D* N1 l# U1 v; o
classifier = SVC(**params)
+ ?0 t0 O% X/ ?9 Pclassifier.fit(X_train, y_train)
; T1 f2 U" @( D7 o4 Z! zutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')% A1 G0 d& U: x e
- a$ f# U4 l$ K' Z8 ]3 \3 r
y_test_pred = classifier.predict(X_test)
: N) F9 m9 ]% Qutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
& y+ \/ n. N! x7 \) `* |! S" W; ~9 \* b2 s
###############################################/ u9 Y! J/ [# X
# Evaluate classifier performance
# V8 i7 f/ N& q) B$ \
+ }+ S2 r6 [( Z* k8 v& ]from sklearn.metrics import classification_report( C5 Z& z, ]' E* |. C2 P" q$ I0 o
* \! B; @- P1 j. H6 \target_names = ['Class-' + str(int(i)) for i in set(y)]
% P7 P% {8 D, l0 f: h: Aprint "\n" + "#"*30
# j* C1 [4 G! Uprint "\nClassifier performance on training dataset\n"
* W6 m( _7 Q8 X- Xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)% `7 ]) M$ q0 x7 r/ `6 A
print "#"*30 + "\n"9 S2 E7 V, @$ I p7 Q6 C# C, |& _
. L4 b. @8 X9 T3 w8 n% N9 [print "#"*301 t" ?6 a5 I; p4 }" f
print "\nClassification report on test dataset\n", X' o, i* w( h7 u9 S9 H4 l
print classification_report(y_test, y_test_pred, target_names=target_names) z' t& Z0 ]2 \9 a. a* D; U8 E1 z
print "#"*30 + "\n"
2 O( Q/ E$ ^0 [( C }
. @7 a" v% G0 W# f' B3 o7 q; b) p9 _ |
|