- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np5 x" x+ ?: d' i- k2 j$ w# m4 ~
import matplotlib.pyplot as plt
2 l' g; C; ?: I& e L M* ?, I
( \6 G' K" Z2 m+ ?& s" B1 Pimport utilities # U, j, _6 Y! D f0 U" b3 b) x
5 j9 s/ r, }/ [0 S4 U# Load input data; l' n; h# S; w0 E& v
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'1 ?. v# j$ F4 `' B9 D& r) ]
X, y = utilities.load_data(input_file)
. o" Z# Q# a: x# {9 C6 o, y# R) }. P" c) b
###############################################
5 Q% K K2 [% R! S: p$ j$ @+ w# Separate the data into classes based on 'y'
. i8 H+ K y3 l' ~class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]): [7 S8 ~7 j' i5 B, u! J4 `0 V/ H
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); U/ Q+ Q% E3 G5 s9 e
. @0 v" ^! W3 q7 I# Z* A: S. ^8 c7 H+ T# Plot the input data3 p+ M& J# M# |
plt.figure()+ j1 Z8 S4 _4 O% v
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
7 y+ q6 q+ b! }! M+ F3 b$ ]; iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) O# r- ^/ i: g/ T7 g
plt.title('Input data')6 |+ ~8 m) A+ `/ K# }
: e, e+ G% M% j+ P
###############################################
& ]. p* U& O7 I# Train test split and SVM training2 P" ?$ B* N' C# a; s
from sklearn import cross_validation: f* u# s* k1 a5 W2 g- Y
from sklearn.svm import SVC
+ H- o5 L! n; _2 G5 p
+ t( F: j j$ a- |X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)( x6 h' m" A7 N( U
( S9 w+ c4 Q- s0 d+ D6 Q* r#params = {'kernel': 'linear'}( d1 @3 I# H; O+ r
#params = {'kernel': 'poly', 'degree': 3}1 Y. {6 h# S( }" `
params = {'kernel': 'rbf'}, w# @) {$ }& V- J X
classifier = SVC(**params). Q K+ a3 s$ W- P
classifier.fit(X_train, y_train), u+ _: ?, I9 b6 d+ _% n* I
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
. J, @* A, n p5 v6 ]" @+ b% I
) B' [# B2 u$ |1 u, w3 ^, U- gy_test_pred = classifier.predict(X_test)/ l4 ?0 { Q; ? G- `
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% v$ V" A% Y( y% T( `% W, x" G' e; v( L2 `7 u% o; L
###############################################& E8 j. F( d. Y
# Evaluate classifier performance9 q3 Z8 b9 o0 {. z8 p1 x4 G# ?5 n
! U+ |, E) c" ?from sklearn.metrics import classification_report
+ B" E- n% S3 U J+ F* x# }+ v& ^6 ^( c! r+ b3 d. h3 S
target_names = ['Class-' + str(int(i)) for i in set(y)]5 V! S9 F+ G4 _( }$ k9 ]4 _
print "\n" + "#"*30
' |: T) E( k6 Q5 c8 O" y$ pprint "\nClassifier performance on training dataset\n"# L! X c6 _) ^' o: e8 `% U
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
3 M |! k& M# O) c& f' Rprint "#"*30 + "\n"
3 k2 u8 x" b; T1 v0 W n' I# `/ J
print "#"*30
, k9 X9 e0 t8 J5 oprint "\nClassification report on test dataset\n"
1 D, w3 o1 v- c' p/ K% A( {/ I) Iprint classification_report(y_test, y_test_pred, target_names=target_names)2 u/ b. O3 k6 Y! p c0 o& J' l
print "#"*30 + "\n"
- v5 ]6 O R) V1 l7 n8 E% o- v
3 g- f; d0 T1 _+ q4 c7 g( E2 Z# X |
|