- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np s w/ e+ K) C9 s
import matplotlib.pyplot as plt
1 i" n8 Y6 v# r6 a
. y6 q7 i1 ]) A/ `import utilities
% Q$ x: m6 U- N9 d, g3 L: B
( G0 m' E. D e4 o( A1 F/ x# Load input data# n, @$ h! F: }6 R9 b
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: ?$ X: ?& N0 K* _ B9 uX, y = utilities.load_data(input_file)
% A/ r M! O" {5 v( h5 A: K/ F0 V a' L8 k F6 T
###############################################. Q- l/ Q/ N1 E# c$ }" X
# Separate the data into classes based on 'y'
$ G6 r/ r% _. Y# xclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])/ h. [/ @ k, V. Q
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ C+ h y% ~4 [6 n$ v7 |9 ]' I2 U. q
# Plot the input data
7 Q H, G) ?( b1 ]0 N0 ]) D! gplt.figure()! m4 X2 y9 g0 O
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')% ?) @ O6 l( j
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
8 U# G$ r, A; t) \plt.title('Input data')- K+ g' ?2 ? s$ d3 S' ], x; _
0 D+ l: i5 O7 n4 F9 X& P###############################################
6 a3 y$ @: a* H& J# Train test split and SVM training
! N! b! X4 l" y# o' F+ gfrom sklearn import cross_validation
3 d) P, D% c+ P+ M3 Y% c* Hfrom sklearn.svm import SVC% f9 c9 s) p5 O1 v' Q" X
, B4 R% f$ L; Q" ?$ j. Q
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
2 T' Z# s0 {: q) i, h
/ B2 Q, i% Z+ U Z" @) q#params = {'kernel': 'linear'}
3 H2 z6 X5 T. |3 i/ v#params = {'kernel': 'poly', 'degree': 3}
7 j( \2 \6 ^+ F# h3 J5 Nparams = {'kernel': 'rbf'}8 [* u2 Y: w9 O
classifier = SVC(**params)
( V h G" H9 H) q5 o/ t9 Cclassifier.fit(X_train, y_train)
4 O0 j @4 Q( A& S0 Tutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
3 t) j, q8 k+ M0 H: Y! u( @8 @
( A t2 \7 l! ]2 q, my_test_pred = classifier.predict(X_test)" u' F/ z: b. V0 l
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
$ h1 d5 g# }+ }/ o; T! Q0 g; t! p9 x' p2 [
###############################################2 h* v" E6 X S& P* l( s& P
# Evaluate classifier performance
* ^& L5 `. n& {- O+ r5 y$ O9 h+ X- J: Y. p( q) i4 u
from sklearn.metrics import classification_report
$ M) R F( R( h. O7 D! i6 V! t9 u) W A/ U
target_names = ['Class-' + str(int(i)) for i in set(y)]( P& F# e( _5 |% A" I% l5 N; C- \
print "\n" + "#"*30
1 T4 b1 ^5 T7 U3 H( {print "\nClassifier performance on training dataset\n"4 p* y J% \/ H$ G( E
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)* r6 e- T4 C, D7 ], M1 z) r+ z
print "#"*30 + "\n"& f( i5 h. W0 S& j0 |
& M* G1 N5 Q# U" L- W$ g3 Jprint "#"*30
V, C# u- ~8 O8 M* [1 S Aprint "\nClassification report on test dataset\n"
" x, X# f2 U, V, @. ^4 Lprint classification_report(y_test, y_test_pred, target_names=target_names)- H" m& b+ \9 I
print "#"*30 + "\n"
* C3 S1 K7 h& }" R8 {5 A: \: A1 H3 N7 n' o
|
|