- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np& h) m* W' U" [5 E
import matplotlib.pyplot as plt+ s7 j+ M2 T9 f
! D s% u9 c _9 s( V. y) ~
import utilities 3 g7 T9 T& i/ [, L/ c
6 D* h. r5 d. I( z. f# Load input data
% p0 ~9 i6 _) ~. H s/ o8 finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'# f% K! t0 ~( J! Q
X, y = utilities.load_data(input_file)
+ D. j& o3 k' n1 |7 k3 i) b: P
$ m5 `7 k% `# O" Y+ x y1 C4 M+ _, L. i###############################################
9 m8 K: M& c/ r o. Y, g0 l9 z# Separate the data into classes based on 'y'
7 s" [6 n) O$ q2 jclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])1 R3 U X! i/ W9 |3 V
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])+ ?2 x( E' }3 s$ Z, \6 }' y5 d
5 E9 T3 [+ e: M& F% V$ W' \# Plot the input data
/ q4 H, n" x/ v2 i& o- L& C5 f2 gplt.figure()
, n& O. Y3 ^0 e5 t, a3 Aplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'), O p' D* y! i3 B
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ D# Z/ y- h+ h: |) h& T, l
plt.title('Input data')
! l- v: w# {0 n& f+ e
4 t) p1 V5 r F###############################################2 Y" X; G) @+ c% K( ]0 I9 l4 Q, l
# Train test split and SVM training) W% H1 D- P# U1 G6 [
from sklearn import cross_validation8 Y) U; O7 z7 S
from sklearn.svm import SVC
9 x' K: R, |. K! q" s8 N9 a' |: G. ^7 Y" B3 f2 \2 d
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
- L8 T& h) {+ G* I, m
& G6 s% o3 R. O( {& U/ W' M#params = {'kernel': 'linear'}$ w5 E+ R) O, h- B
#params = {'kernel': 'poly', 'degree': 3}4 ^1 i5 h% a* T5 J* ^6 d! A, U. \0 A
params = {'kernel': 'rbf'}
3 k& v" G7 h$ uclassifier = SVC(**params)
$ S, M+ s, [2 f" F) c0 A9 m7 }' aclassifier.fit(X_train, y_train)1 C: Y* C$ W; u# _
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 a2 J4 x* S( V9 Z9 |1 Z' F# J
F" G. s; d" M
y_test_pred = classifier.predict(X_test)
+ S5 q. V, k/ sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
8 r) q- U b4 q/ R: K, T+ ]) P& s+ ]& k
###############################################
/ ?9 d4 ?- u* [$ p. i# Evaluate classifier performance
3 N$ z/ l" X9 n. J4 Y: E F' S9 F( P$ V* B
from sklearn.metrics import classification_report" x$ b3 s) P: G$ ~: k2 J9 y
T% R* R$ [% o- b3 U0 |% htarget_names = ['Class-' + str(int(i)) for i in set(y)]
% f5 j7 D: z" l2 p$ Z# Iprint "\n" + "#"*30
/ K: r& y. a$ q9 g( T# bprint "\nClassifier performance on training dataset\n"' ]/ }( @ S4 w- T- u$ ^! W
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
9 `) M3 c# @1 a: u1 x( i1 Nprint "#"*30 + "\n"
$ t2 E) b6 U+ C" B3 J/ P" s
0 X# \* O) ?: {print "#"*30
* ^$ |5 ~" g! w4 zprint "\nClassification report on test dataset\n"
7 F* [- _% M1 u, z! Lprint classification_report(y_test, y_test_pred, target_names=target_names)
+ ^+ O) J0 ?" iprint "#"*30 + "\n"" R+ f4 z: J6 U6 n( D
+ O" l+ m! A" v& |1 Y
|
|