- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
% _! Z- H! c: D4 T3 W+ Y3 F8 iimport matplotlib.pyplot as plt* {- v' E g" N9 ]" H
; L9 E6 X& u6 X5 Cimport utilities
$ ]+ u' F/ J+ e5 r, _) V1 ?* c% n$ [5 Q! f
# Load input data
& ~( o7 H( b; A: uinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt') H7 X" P4 G0 T' W4 R. t$ K$ ]- J
X, y = utilities.load_data(input_file)
2 {& U) O0 v9 `, b0 N& D! d
9 Q. T% C9 c2 t( G" z9 O1 h###############################################
, j& W, o& C3 B/ ?3 ?+ D7 I# Separate the data into classes based on 'y') I o% }' C$ {8 @$ H% l
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])% x1 j7 H( Z& K; e- v( ?* ~' v
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( I3 T |. N( ~" n2 A& u. ]# L, ^: ~) n! U; I
# Plot the input data' K0 M+ k( [" X% h S1 @8 \& i
plt.figure()
, Z$ \3 [' R( ^6 `" `7 vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
8 B& \; b6 H- u0 @* B8 ~2 Rplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
9 m" P* v' S1 iplt.title('Input data')
' V; G! y" E7 N: B- j% z' i
% |1 E4 ]( q0 B& F5 y###############################################- n Q8 \# r8 I% S3 c
# Train test split and SVM training3 a% l& F+ y" f+ V
from sklearn import cross_validation0 L d2 z- K [
from sklearn.svm import SVC
2 b7 a! g% e7 x* \9 ^
3 Z Q+ G6 X9 W0 T+ S& mX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- |/ s% B, x& X8 e
, T, s+ i9 g, T5 E#params = {'kernel': 'linear'} _2 {6 w% @% a3 t7 L% I, z
#params = {'kernel': 'poly', 'degree': 3}# W6 a% z2 M: j
params = {'kernel': 'rbf'}! Q, q5 F) X+ W. R
classifier = SVC(**params)4 e o9 x1 K6 L5 k0 [
classifier.fit(X_train, y_train)5 a% M3 u. a6 Z: |9 P+ u
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 c0 {( l/ ~/ x5 K
$ J0 ]" u: v* N( Yy_test_pred = classifier.predict(X_test)/ }9 u$ q# V# Y2 B8 p# C
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')' s: [% C2 b& A/ H, e# k2 Y
, |6 [8 w: T! Z( P3 I( f% u
###############################################
/ I& R h" C) h9 _4 h# Evaluate classifier performance
6 p/ }1 t* A1 q4 R1 k2 R* G
5 j* Q9 S+ B8 [7 @* n( W3 z$ nfrom sklearn.metrics import classification_report, {4 o; {- U$ P& k, Z4 Y
/ X# R- M* C! D- \
target_names = ['Class-' + str(int(i)) for i in set(y)]& M$ x% J' a$ R: m
print "\n" + "#"*30, d4 T$ ]( v8 N6 H
print "\nClassifier performance on training dataset\n". g" c" O, O' i
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
! A+ C; x- c) [, K$ G$ wprint "#"*30 + "\n"6 n$ E; E# K4 ~7 s7 a: o& x
- C+ z% L n; o. o; P( w' w
print "#"*30
0 r* d5 x" y8 F6 J( ^print "\nClassification report on test dataset\n"; t8 d$ H6 V7 ?& _. v V
print classification_report(y_test, y_test_pred, target_names=target_names)- |5 ?5 `6 s# I/ M; e/ r
print "#"*30 + "\n"
+ a6 T3 l8 w. N' ^, ^) e' Z4 t/ @0 j! ~+ t; O! G6 U
|
|