- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np6 S: o% d2 |' M- l9 b1 ~) X
import matplotlib.pyplot as plt8 D, v F: J. C& w1 |! O
5 h: A* ]8 J4 b1 a7 i
import utilities 8 Z- w6 B( C) w1 V1 S& W
) O% t. S, t# G- H+ Y* Y# Load input data* x6 F8 \$ Z, t; G
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
8 P! b5 S2 w: s( }8 E1 L3 I5 vX, y = utilities.load_data(input_file)
3 O# f4 [5 H# H0 u- ^4 T1 |/ [2 D5 o$ M3 |
###############################################
c3 I0 L# t2 b! [# p; h# Q% ?& Z# Separate the data into classes based on 'y'
" K+ i, `# a M2 Q0 Nclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])5 r; x+ ~2 O M& Z6 d% b) B- E: [
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 Y' b3 N1 D5 h% M
9 \; e& n U5 \( y1 Q# Plot the input data4 V5 p" q8 B! H6 ~& A& J
plt.figure()9 O3 H+ F" \- {
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s') `! l% u8 ]% J. q; o# d
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')7 J" `9 d2 G! G& s! Q8 U) c
plt.title('Input data')$ m, Q( i9 t! y0 G; o' M8 W
( E& y. }: J9 J; V" u: s0 m
###############################################
2 o. d/ A B' M5 B3 j# Train test split and SVM training" J" |. {( h$ ~6 r: J0 v
from sklearn import cross_validation! D& r F; S9 R' @9 k+ O+ U& {
from sklearn.svm import SVC
/ g8 _( u O" i9 c
! E* N3 M0 H: E: a. S* aX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
6 F9 x6 K, b7 _. y6 ~" f6 Y( T; J, {8 }, P n
#params = {'kernel': 'linear'}
" W1 r9 y5 k/ `) k) Q" U3 F#params = {'kernel': 'poly', 'degree': 3}
+ {; [. w- ?7 Z; pparams = {'kernel': 'rbf'}
: K& y, K5 B! bclassifier = SVC(**params)
2 ~: X1 w7 a! N6 Z2 }, Gclassifier.fit(X_train, y_train)9 B+ L, ^% I& [, Y: I
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
- s/ W' z5 C# f- A$ A `
+ C- G9 h A' v1 N7 {+ Py_test_pred = classifier.predict(X_test)
3 i! V( J. w: X! c- @# lutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')+ |6 G0 x+ l3 W {, Z4 m
- v" }" T1 a& U, n5 J
###############################################5 c8 ]& v1 O. E5 q& W# q, u; u
# Evaluate classifier performance1 h0 v ]0 U1 s8 M
. f H' F1 Y0 J& e) `from sklearn.metrics import classification_report$ N* ]* Z" I* M- s
' i* G! w( h/ ytarget_names = ['Class-' + str(int(i)) for i in set(y)]
. U( [( v# `+ hprint "\n" + "#"*30
8 _1 \& s2 s. sprint "\nClassifier performance on training dataset\n"
- Q5 {* x! J! \print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
+ [5 k" b# r% _/ C Tprint "#"*30 + "\n"
+ A2 U- \: q2 k) a# ]% D
4 @2 ^ e q% s, oprint "#"*30. y( A( ^. G3 |/ \
print "\nClassification report on test dataset\n"( ^6 P4 p2 |& l" x
print classification_report(y_test, y_test_pred, target_names=target_names)
, W2 w1 y# o, J/ b7 n9 s. \; P+ {print "#"*30 + "\n"5 h. Q3 s7 E; u. R3 ? o$ V3 p
! a0 \% ~& d8 ?% H$ X |
|