- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np( m7 j) r m Y% ^" p
import matplotlib.pyplot as plt
5 V1 @) k x& D5 c8 F
8 [8 M- ~) R* G5 r1 ~import utilities
& V3 P" a3 `6 S: \' E- G& O$ F# z. y1 J6 G) Z" E% S* [0 w
# Load input data) R1 |3 A9 o% z( V7 n6 I. c
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
$ u- j) Q1 K0 V$ c3 j" r3 ^% K& bX, y = utilities.load_data(input_file)8 H$ X* ]/ R- L# G0 R$ P
: @/ x# W2 _" A5 g- v }+ P###############################################& Y8 K5 M. r) ^/ Y7 ]
# Separate the data into classes based on 'y'
1 `) ^5 Y y) G+ \, V; l9 zclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
* ^" D% O' O3 V- m7 O) X6 Mclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
! \- I0 h6 W+ _' Y! u& d9 Z4 p$ x' x5 F2 ?+ h( S
# Plot the input data; \ l0 W) o- b T) Y
plt.figure(); t" a V8 s+ h* |
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')9 q. ^ h$ K+ ~1 z% V
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; c G" m# y( m& u+ ~plt.title('Input data'); b9 c8 V0 v4 _4 Z7 V8 Q
& l4 M( B$ u# p* ^
###############################################9 n" D. s) h! ~& V- m* d. |
# Train test split and SVM training
# c$ L8 i- r5 z- ~3 V, Lfrom sklearn import cross_validation" p$ V0 |) }6 C+ K. E1 `( Q) j. v9 E: p
from sklearn.svm import SVC1 q/ R. ~+ B5 j; a# R* E
; K' E3 U" p* S* N
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)! O' J/ @, F3 s/ R
6 k; s. P/ D$ V; i+ w6 ^#params = {'kernel': 'linear'}
L1 Y5 r( g, l) \, C7 [& E#params = {'kernel': 'poly', 'degree': 3}* J% j$ g9 |- b$ g
params = {'kernel': 'rbf'}
# p- H/ Z G: n/ M' b5 _classifier = SVC(**params)0 ^ a& |3 F0 m
classifier.fit(X_train, y_train)' T6 Y" r! o5 s& _
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
f) n- U% ^$ V$ B
2 Q. W# E) q1 k0 c9 b6 |y_test_pred = classifier.predict(X_test)
/ h" ^: ]( u* ]/ @) N7 zutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')- f3 g7 i$ y- }, x
; k2 b j4 U1 T" E! Y J, A
###############################################
* I+ T. a' |) g1 j' q& }8 N% `% P# Evaluate classifier performance( s6 l0 G/ o' k9 [: S# _
$ \/ m% [! V- ?* J! e1 Zfrom sklearn.metrics import classification_report* b4 r: K0 N' @/ f) O s8 X
) R- ]6 r0 C# o) y b! F) Y- |target_names = ['Class-' + str(int(i)) for i in set(y)]! A* K6 o% ~' ]" }) D# ]
print "\n" + "#"*30% P% U! j5 }8 _. r* i
print "\nClassifier performance on training dataset\n"9 d# ] X+ ]; h5 O' z% o$ @
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ u1 [! ~5 \3 G
print "#"*30 + "\n"
- Z, t" A. _& `: J7 R) G8 \) N4 O) s$ w/ h- I
print "#"*30) u5 K: m& [$ R- M! x
print "\nClassification report on test dataset\n"
: b+ }9 ~' g" K/ V" m- \; U: Fprint classification_report(y_test, y_test_pred, target_names=target_names)4 ^& n0 w. ^, p
print "#"*30 + "\n"! z: U/ T0 A( F, q7 ^( T: ]4 }5 x* r/ t
/ V, K0 \3 C) n |
|