- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
' I) q6 g( M7 ^, `import matplotlib.pyplot as plt: J9 M( n/ d0 Q/ L, f
% w0 W: J7 H* T: M" Wimport utilities
! a. [) ~7 B) i! q( P8 R9 [6 v/ U& d5 I) T3 ~
# Load input data
! {+ C5 W* h N, _3 c: t3 v9 Pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
4 P0 U+ U) J4 i7 [X, y = utilities.load_data(input_file). _! \& S# u& [5 b9 N
( Z" c" ^% F# t* F9 K2 X& g###############################################
/ f' J: [/ ~# C- l/ A( v9 [, T5 r" R# Separate the data into classes based on 'y'
6 x% C+ C& v/ D8 V3 rclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
# J- x8 j& o: y, d' aclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
1 z3 x% D9 E* M2 A9 L5 C C Q
/ C' X8 X; E. L* n% H6 \# Plot the input data
# J4 z( o" ? b! nplt.figure()
' R$ u0 T. \+ u7 Y0 Lplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
+ u% S e. R0 @# ^4 R0 \3 Jplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
- R8 k9 ] p: e3 B' s* xplt.title('Input data')
$ f5 }! J. N% B: p4 Q2 r' Y% V, B+ r8 O T8 r
###############################################
/ B5 _( g8 K8 n# Train test split and SVM training
' n8 x: n+ x7 \9 Mfrom sklearn import cross_validation8 F, H. N6 E; Q
from sklearn.svm import SVC, p$ G( k3 f9 J: z
8 i9 i0 B; c. m CX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
* U v! v+ ?' e l; ]& P6 m4 b' c4 n
#params = {'kernel': 'linear'}. q" m d! S: B" d/ I; I
#params = {'kernel': 'poly', 'degree': 3}7 L' c. B( Z# l- M1 l- V* c
params = {'kernel': 'rbf'}
) x. f& @ g/ n; |# \- wclassifier = SVC(**params)
$ w2 Q$ f9 p7 `% p& dclassifier.fit(X_train, y_train)6 ]/ K. o0 A) z8 q
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
5 i6 z% ]) R6 Y# u4 a6 t# y
# w" N( y' B ^6 {4 k# k. W- \& ^y_test_pred = classifier.predict(X_test)) z: D0 W; {; ^5 n8 [7 \
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') u( [ r x* r8 ^. t# r
+ K$ v R+ f% Y. X###############################################
) [! P \4 b9 o/ x# Evaluate classifier performance- v4 `3 B0 L& _+ [
5 e3 L7 B% B4 x) _+ r0 T
from sklearn.metrics import classification_report2 K* A5 |" e/ J& r# J8 W
2 X! `$ R) a( E8 a% m/ Ttarget_names = ['Class-' + str(int(i)) for i in set(y)]
0 k( V* M7 B! }: J" ]print "\n" + "#"*30& B' _) }! }( B# J
print "\nClassifier performance on training dataset\n"
0 ~4 B" y* @2 A0 w: m! b0 Sprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
( W, s+ }( d) C- n0 Z1 F* Fprint "#"*30 + "\n"
0 {" f0 K# |. H( n1 O
! R! {" k# D, u! C+ L6 lprint "#"*300 M3 f0 y% Q0 u; _7 U. A P" @- I
print "\nClassification report on test dataset\n"$ x2 x( L4 o# j
print classification_report(y_test, y_test_pred, target_names=target_names)4 n/ f0 f! W! ^9 \6 Y; O! _ X
print "#"*30 + "\n"* y/ a; l4 M0 v% }4 u5 Y: L! ^
, `4 K3 }& M+ t( [6 n6 [4 l |
|