- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np& [# Z, b0 H' ^2 l4 f* ]
import matplotlib.pyplot as plt
" f! @8 }1 X2 z% ~1 u* O% R5 \( {1 i! Z$ |" F. o
import utilities
* R* }3 b& `& }( ~4 A1 u1 h/ S* K2 C0 R0 b ?
# Load input data$ v& U& ?2 F- y) |0 u5 l0 z9 C
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
2 N. S# } j( \7 gX, y = utilities.load_data(input_file)
: F; c! V/ E3 _$ t
+ c' S# S# f2 i2 ?( t###############################################
1 ^0 y" Y$ j7 N+ f2 l8 ~8 B$ Q# Separate the data into classes based on 'y'
! `- B! _2 H! Hclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])- \- r- j6 [ ] v
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( ^4 f U2 o# n1 ]& f1 @( |5 j) R, L9 Q5 z. |* s, q
# Plot the input data
3 w H2 T2 a* ], `5 [4 u: P9 bplt.figure()2 w- O) d# a9 c6 M+ c/ m2 y
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
5 T! J: r4 @/ C1 h: p* M" {plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')' V! W+ t4 T" S
plt.title('Input data')# v3 a& S; y2 U" C
# N6 q; H6 W' G( d- g2 ~' a$ J, I5 a( I###############################################
5 C0 A- ^! v9 H! t7 ~6 f X2 U7 y# Train test split and SVM training9 ?5 I( _' F6 I5 z, w+ o v; `4 t7 Z
from sklearn import cross_validation
8 z e4 n9 S9 h9 r3 `0 O% l0 Wfrom sklearn.svm import SVC2 I. P: F: S7 Z4 |% x7 e
z" e2 y% K+ c" _: O3 A; {X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
- M0 x, `5 `8 G/ M2 g2 |1 p6 z7 \/ C" N
#params = {'kernel': 'linear'}
2 E! X! c2 |: V1 M#params = {'kernel': 'poly', 'degree': 3}' _/ {; |/ |' l" x% V" ]9 W
params = {'kernel': 'rbf'}
: r) y$ t, [: g. @; O* B. y( lclassifier = SVC(**params)
! ?0 P l+ E$ ?& N, A9 G/ S9 j, Xclassifier.fit(X_train, y_train)
8 h7 z9 i- n( M' M0 n4 Tutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! W" \) p. J, ?& v4 L, L% C$ a
z% J# I& ]/ m- dy_test_pred = classifier.predict(X_test)
8 `) T5 N- J% ~% ]utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')3 |7 i! u8 Y2 ]4 g6 u
8 Z$ F3 j0 S( x8 I) B [
###############################################% Z# D9 r i8 ~8 k
# Evaluate classifier performance) ~# k! y2 |; C% {) X2 U
1 l% K$ B0 X) q7 E; `5 V
from sklearn.metrics import classification_report
3 v6 A1 Y6 u3 q9 `. ?) C5 Z2 }. Q+ p
/ t- [, j8 t$ Y. W6 V y+ u. c2 Etarget_names = ['Class-' + str(int(i)) for i in set(y)]
( V: q% K2 `. y4 v! i! I5 } Wprint "\n" + "#"*30
% z1 N# @/ P# C- \print "\nClassifier performance on training dataset\n"
& r" ~1 E+ I" q/ u# B% \2 F0 Xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
9 i+ p. b. j' B4 Jprint "#"*30 + "\n"
! u. K5 h& U' _' k6 G
: J) T8 K. G1 Z. ^/ r; iprint "#"*307 G0 F( S4 F3 b9 {) x
print "\nClassification report on test dataset\n"& S& m7 ?, _) W0 _! i
print classification_report(y_test, y_test_pred, target_names=target_names)
/ a4 |; [& f3 c& u+ Oprint "#"*30 + "\n"
) s7 [& y6 ?4 z* v- [7 Q o/ e0 z& ?- s2 ], n) C% l# B* r, D- _
|
|