- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np& p! O0 ?9 A9 ]) v3 Y
import matplotlib.pyplot as plt( [1 C" @, g) u, e1 y
8 h9 |9 }" K( x+ L6 W# d
import utilities
1 _$ r0 u4 w" \% d( a/ ]8 Q, r) |/ i0 A G
# Load input data* d( R9 D% Y# |8 j
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'( k" l, Y" x6 J! T( ^
X, y = utilities.load_data(input_file)
8 K6 U. M5 Q& u4 x4 n4 x
l, B+ L! D$ q###############################################, \2 |, X( ~0 l2 |0 \9 ]5 K* r: `
# Separate the data into classes based on 'y'5 b, E2 G# a9 z$ W* ~
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])6 E$ `1 t" U: L& t6 A1 o$ l4 Y5 u/ S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
4 |. F* e& B; O' ^$ L7 u( W/ @- ^% h
( `* s' c$ X; O- `% T# Plot the input data, X4 J3 f. b+ @1 ~- l* F, D
plt.figure()2 Y( g& z% Q3 F1 f) r0 }$ W" @
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')* \, M8 K+ {0 I# D4 K- v
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')2 k" k& L) h( L% l& D
plt.title('Input data')/ H# k7 `3 e# |
% j) v7 Z$ n% [6 D& y Z( g* e7 m8 ^###############################################- t. r8 u% C# L4 e; e
# Train test split and SVM training
+ @# x6 D0 B( R" ? ?+ ?) B& Q ~1 jfrom sklearn import cross_validation6 Z" W% }) i. C7 V% V7 |2 q
from sklearn.svm import SVC+ R) [+ h: U# A* }5 Y
) P& s# T4 o$ ~& ?2 SX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: u. m f2 E+ i5 b0 Z8 j I& P) a/ s" V' s! w, _0 F
#params = {'kernel': 'linear'}8 y0 _, c- t& U/ P/ y9 h
#params = {'kernel': 'poly', 'degree': 3}& \9 e" l3 e( r& N" \1 w8 @4 k& }
params = {'kernel': 'rbf'}, A: n7 Q0 F/ {9 R4 e
classifier = SVC(**params)
# q/ p, b. }8 g( |( a0 ]classifier.fit(X_train, y_train)
' Y, V# x* o* R8 Z' W( p: k/ `1 t( zutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')6 [7 B: {# k5 [
X8 l* Q# i* h8 ~6 @1 m
y_test_pred = classifier.predict(X_test)% j5 t$ g; K F5 D* _8 D
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')0 c; q- O, j; e6 V
* D" q9 ?- F# g$ f1 i$ o/ k
###############################################
- O6 h. H' h3 [2 j# Evaluate classifier performance
/ p" j) [+ i; F5 {, {5 U: `) N3 z0 i p/ \ h. y
from sklearn.metrics import classification_report3 v8 {( Y. ^+ i# W# E
$ N! K6 X8 ^+ K5 u5 L& atarget_names = ['Class-' + str(int(i)) for i in set(y)]4 Z5 C" A* N9 ^3 G) x+ K0 i
print "\n" + "#"*306 {! a6 [0 Q# b
print "\nClassifier performance on training dataset\n": R- I$ {- u0 y+ @3 }' A2 H
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)3 M, Y$ H8 E2 W3 F' t) l" W- k
print "#"*30 + "\n"6 x. p$ r# [4 k3 z
8 C: m% P v" Y. P+ wprint "#"*30+ w7 T6 ~4 [. Y" j! Y4 i2 b
print "\nClassification report on test dataset\n"
* l3 i1 ]; C" s& c0 K6 I% N- n& y* g# `5 lprint classification_report(y_test, y_test_pred, target_names=target_names)
+ h* M" v0 ]. Z' A6 e3 Vprint "#"*30 + "\n"0 X( L$ ^+ C: p0 r% s5 a# v
, W6 [9 `6 T9 `6 U* P# l
|
|