- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
4 m( |- ^% |' d! `import matplotlib.pyplot as plt
8 o: b! B; |. H3 j6 y5 |3 A, t- \1 ^3 \- ~2 V0 \
import utilities
1 W& w# y* Z5 C6 o, f* ^7 m
$ L9 [! u c5 X, I, c, [1 A# Load input data
" H# A t1 V8 \7 D4 p" m: G7 @input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'2 n3 @# {" V( u+ Q5 @
X, y = utilities.load_data(input_file)" [; ]& C( n+ Y
2 p# r4 e/ ^$ h. f" w! A$ [###############################################
1 k& |( P9 m+ E+ g( V# Separate the data into classes based on 'y'
6 u0 m" E$ Y0 sclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
8 ~+ {, G7 E" X$ H2 \; ]class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]) d$ H1 I" p' ^
1 E L$ q6 F8 `! {( B' }+ |6 ^9 U) y# T
# Plot the input data% v' g6 R8 y k7 @; q9 K3 S5 s! A+ ~. j. H
plt.figure()
5 F2 n; b4 n+ F l* u5 a& Rplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& ]( f7 u5 g4 Y! x. Y6 Bplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& P a+ f0 o0 F" w2 g' _: R
plt.title('Input data')
* S& e+ \ k2 b+ K. O( X+ F
. X, y+ |/ z" M) ^) g###############################################6 C, k* r0 |9 R) h E @
# Train test split and SVM training
q! A; A0 O& z$ V! Afrom sklearn import cross_validation/ K4 {; j0 X1 I/ G. z: n
from sklearn.svm import SVC7 c6 R3 u; U; f' a
% e9 R* ]4 v; j; n
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
f; }4 Q# A8 [" J* K
8 m6 B1 d( F0 j#params = {'kernel': 'linear'}% i1 \& d; N2 y+ @ G
#params = {'kernel': 'poly', 'degree': 3}3 X6 d4 p% y1 s: C
params = {'kernel': 'rbf'}
1 c% R) B3 J; \/ cclassifier = SVC(**params)
# h" p0 T5 b/ i; D. M2 I' aclassifier.fit(X_train, y_train)9 N6 O& O: L, I" G6 l
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
Y: t* i) u; T9 I8 f' P" @) ~6 y+ _" D5 f4 s
y_test_pred = classifier.predict(X_test)
8 T. }$ |. W7 ^$ [3 c7 b1 X* Cutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'): @# _8 J+ j I0 N
6 @) ^, O# [5 Z' D7 B7 O' \
###############################################, D5 O/ x8 D+ f. _ M/ @
# Evaluate classifier performance
) N! j% b7 g- G, [+ i' T$ o+ r q/ p
from sklearn.metrics import classification_report7 j( c% _, r$ Q/ p8 l
$ P. l. G' f& k C) Qtarget_names = ['Class-' + str(int(i)) for i in set(y)]0 R3 y4 n2 L: Q: h/ ], k# ~. j
print "\n" + "#"*30
: ^/ d% y# H* H- ~0 X3 fprint "\nClassifier performance on training dataset\n" q: L& y8 x$ ?! s
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)+ ^; X# X3 j/ `. f @$ U0 M, ?
print "#"*30 + "\n"1 I0 O- Q9 u- o
+ O; e2 O6 t# f2 B! G5 W" Vprint "#"*306 }: }+ f0 j1 S
print "\nClassification report on test dataset\n"2 y# b \4 M* {$ m5 s
print classification_report(y_test, y_test_pred, target_names=target_names)" D, j/ W0 y, S7 }5 X8 L. C0 Q3 v
print "#"*30 + "\n"
& r U( U d/ x$ _: b' U# J+ A1 ]
1 k0 W" {% g, Z, S, S |
|