- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np4 K* b1 |" p( E; W: d
import matplotlib.pyplot as plt
0 }8 [- C$ E. l1 m
; u; {$ }; U3 ~5 ~# simport utilities 9 R/ L0 P9 \; L: U! A0 V
) R5 N i ]# \. T: N
# Load input data
8 ~ K; s/ n/ k/ G& a; Uinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'' a; G! R2 ~" f6 P; }* A( y
X, y = utilities.load_data(input_file)9 m6 Z: r$ r- ?% Q' x: }1 ~
, w0 X+ h0 Q9 m+ Q: l" c6 Y5 {
###############################################$ l W1 H) I$ S {4 Q$ T
# Separate the data into classes based on 'y'% y6 Y8 m9 d. {: W/ a$ z0 b
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
7 J w) s# H6 `1 }* m' O) @% Q2 o/ aclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
! }3 g' c% H% C& g, n& A3 I
8 U- H$ K5 A E4 l# Plot the input data7 @, Y, x/ J+ F: J: u0 _, v
plt.figure()9 H# ?+ M9 N# k+ |* E7 r
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 t; i( ~5 J5 |6 }
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
7 V; t9 q* x# I, d4 B( M6 dplt.title('Input data')& f1 e1 @& i* h4 ?" ~+ d
1 t* H4 H) ?) ^) O* h" M###############################################. u7 n, k- D; Y* f8 V) o
# Train test split and SVM training
$ k0 v0 ]; l7 M/ w, i, u: N- cfrom sklearn import cross_validation+ s6 X; F" s/ J' V& V9 G" X- W1 Y
from sklearn.svm import SVC5 e, B4 U5 @4 Z% F; H
+ Z: X: q+ \9 X F, i( W# N. C, @X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)! { |$ r' w; l7 `/ ^( f1 V
, K- }; L2 N/ }; A#params = {'kernel': 'linear'}6 q; ?1 ]9 q/ Z3 T( M! }% R6 u
#params = {'kernel': 'poly', 'degree': 3}
6 k( c* L# j; @; M* `params = {'kernel': 'rbf'}
# J! h% w; a2 h% }4 H/ G. Nclassifier = SVC(**params)
+ g5 K3 J( ]0 N: f L# kclassifier.fit(X_train, y_train)
* }0 ]3 i# i' `; f# n o+ Kutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
0 g% j/ _5 t) ]0 p) c* E9 o9 E6 a: m, U; _3 p
$ H* p& _& k& _+ Zy_test_pred = classifier.predict(X_test)
6 D0 Z, I3 P- x1 e9 v* N4 k$ o8 Vutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'). u2 R& |) X4 o4 U7 }
: G, {5 b8 B3 C" v8 y" k+ a% C###############################################
- s0 U& d4 @" a' j# Evaluate classifier performance* K2 O: r) [; V; e+ q9 q" x
/ {& G4 j ^$ L* |2 P) k' C! l
from sklearn.metrics import classification_report
& W9 H) s2 c& s2 E7 h0 y1 c
7 G8 k2 K8 }# w3 ?target_names = ['Class-' + str(int(i)) for i in set(y)]+ S9 r* s: o8 e7 \
print "\n" + "#"*300 C* l1 Q- l8 G8 X( a7 {
print "\nClassifier performance on training dataset\n"* h& E7 K0 n: J8 J# Y7 f( ~
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
: i. d2 z6 _% h7 n! J9 B: zprint "#"*30 + "\n"3 f7 V2 ^* G/ U- W
! O y$ y! K; `4 j) ^9 Eprint "#"*30" N$ W1 R3 n$ i8 A9 b! P% q
print "\nClassification report on test dataset\n"
6 n6 J, T/ i5 T. Kprint classification_report(y_test, y_test_pred, target_names=target_names), q% }2 N# f% [ y& y% o
print "#"*30 + "\n"# j( z& ~4 D5 X, Z* N
* x+ V, \' l; N6 A, ?/ P
|
|