- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np- A4 g2 T2 ?2 d X" Z0 E r
import matplotlib.pyplot as plt) X. }9 p0 f; U+ Q
3 D& M) R$ X; S3 U
import utilities 6 s" a X% u2 J, j
8 S1 h- u% N; E% v# ^; o# Load input data4 W! R1 d3 X: t. E I
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 r1 @9 O, n% EX, y = utilities.load_data(input_file)
8 d% f5 n7 s M, C7 h H9 m5 z6 I! t" l* D/ ~9 u" ~
###############################################- j* v# p9 a+ F' r7 w- S3 t; k
# Separate the data into classes based on 'y'; e! ?6 y1 T- {" H( Y. ^
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])& O& K) a& v B4 ?: C: L: f& F
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])# F0 G* I5 n- S0 ^/ A6 U. W7 V
$ O7 B& q) `5 C) A5 G/ e% l8 V# Plot the input data
9 N S% m, s6 F! o( u8 W0 m! |plt.figure()
) S$ P/ j$ {) X9 L' z1 Lplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& g( _8 A9 `. ~$ I2 I3 Q, jplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
0 K6 D9 _4 D4 `, N. x7 q' nplt.title('Input data')8 B4 M. m0 m1 D" d4 N. a
7 o: B/ `$ x7 T: B###############################################
7 j4 T8 {6 G( R0 w4 z- W# Train test split and SVM training
* i; T$ }9 |0 Z: l, X. g2 [from sklearn import cross_validation y0 F$ V" t/ B) s8 S
from sklearn.svm import SVC
# E3 M0 \/ g5 l% x
/ X- h4 C- B) R7 K( oX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: ]/ ]* l0 b* L2 [# [1 N
- h' e% x# w* b% v, S#params = {'kernel': 'linear'}
* Q6 R- Y2 D% `& X9 V#params = {'kernel': 'poly', 'degree': 3}
* T8 W9 P; k- K8 H: vparams = {'kernel': 'rbf'}1 w, ~/ T% s+ D9 n0 i G) b
classifier = SVC(**params)' h% m6 u& h# j
classifier.fit(X_train, y_train)& R m. _8 Z) |
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')/ y# i: Z8 I1 b8 P# _: i8 w- `+ l
1 N1 ?' _. b, C- \" Z* g4 A m* `y_test_pred = classifier.predict(X_test)
% n$ ?. e: S4 ]3 c8 H4 }0 R$ g3 sutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')" ~/ p4 f- l/ l1 A% K2 r
7 \5 r: h: H, d5 n###############################################/ k8 }. Z, {2 c$ C e; l
# Evaluate classifier performance
+ `3 J4 i6 P: Y: Z j8 e5 }4 q6 v% a; p4 R
from sklearn.metrics import classification_report- g0 l) g9 B) Z0 i* b
, _# g2 R2 U. \* [ M0 E$ y- ?
target_names = ['Class-' + str(int(i)) for i in set(y)]) U" J0 |, w- Z" u6 c5 B
print "\n" + "#"*30
0 K3 B6 L& @* M5 n, Nprint "\nClassifier performance on training dataset\n"4 m0 u" c+ E% T5 m: ?& v
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)) R* M1 Q# X2 h
print "#"*30 + "\n"
" j& e% [0 h4 D2 ]8 k
2 T" M/ I1 E4 T" A* eprint "#"*30) z; [% U% s9 j2 |$ I- Y- w- S% A
print "\nClassification report on test dataset\n": d/ P% z- N( U
print classification_report(y_test, y_test_pred, target_names=target_names)
9 L/ | S' E/ g9 J+ Oprint "#"*30 + "\n"1 j: G/ p/ g, F! @
`. X, J j4 F& z |
|