- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np- w2 s7 E( P6 ?5 A
import matplotlib.pyplot as plt& c4 ]: T% E: X- n+ y
* @: v% J/ e2 o2 R
import utilities / w h9 H( P' u' w; X7 c5 H* `5 S
% ~% G. c/ _6 n; [/ C8 D, i# Load input data9 H" _ y( G+ u& p6 ?
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
' Z2 i% h9 A: BX, y = utilities.load_data(input_file)
/ W8 y9 Y! v) b& J1 Q- y' W; d
" g) T" @0 Z7 ?3 u {/ S9 C7 z) n, |( b###############################################
& ?+ ~1 f3 o; |/ Q7 \9 m+ ^! o. F# Separate the data into classes based on 'y'
* k* l6 }& w+ M) c `class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
0 Z5 }2 C3 e: N" ~' i8 k8 W( ]class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
/ K! o8 c/ P- n- ?- W+ c- m) L* m8 g! O
# Plot the input data* o: B: l' S: h" o* ^0 C" z8 U
plt.figure(). m7 @7 G$ J, d! i# k
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'): ]# u! {2 h: a- ]: }
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
& U" W4 d& w% u% C3 ~: `7 Xplt.title('Input data')
# Z0 P) u+ m7 t1 ^! Y6 v
4 Z0 s% f5 F' K9 L; S###############################################
, J0 [% y5 v* }. z# Train test split and SVM training
. }' k" P5 R7 E* N0 D d' X# jfrom sklearn import cross_validation" x, f7 [) \3 h2 ~8 C) L
from sklearn.svm import SVC8 T1 ?6 w2 b0 r) _2 i4 h# M
: Y4 f; E- }/ H# C% B! ~X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
" c, E7 j6 M, G8 F. V& t
) Y; B/ j! b# u- D2 F+ d' C#params = {'kernel': 'linear'}3 G! i7 w1 E5 ]# u) U
#params = {'kernel': 'poly', 'degree': 3}
' ?9 @1 V/ C# B ~. Kparams = {'kernel': 'rbf'}' H" c! H( _& u/ ?2 L
classifier = SVC(**params)
6 A! e2 B: K. H* T% c# Rclassifier.fit(X_train, y_train)' g. m* C6 W. l" _
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')7 F& j2 W: Y1 t& }& S9 x
9 m; a; L3 k: }: v5 f+ }3 K/ _2 i$ ?0 M( ly_test_pred = classifier.predict(X_test) ^9 w* e5 n( i
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% f1 e8 X$ d5 X- J/ E% t
8 h; i3 A; C- Q0 T###############################################
; C/ n7 k2 l' K4 `, ~# C- F/ }2 I# Evaluate classifier performance
- r; ]' k+ C9 ?" `+ X L s' B. V/ Y7 V) L) J/ o
from sklearn.metrics import classification_report( @$ K1 E/ E# S7 t8 c
3 p/ r8 T0 @) u# u% ]: `1 Y# P2 Ntarget_names = ['Class-' + str(int(i)) for i in set(y)]# I& z7 @9 P; P: y
print "\n" + "#"*30$ N( p1 _/ g0 |0 Z5 K+ J
print "\nClassifier performance on training dataset\n"8 ]& Q& ~* G) g- L; [
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
3 a, |+ a8 h& u' u, ~3 Tprint "#"*30 + "\n"/ B+ T4 z/ @" H/ z2 `9 u
8 U4 h3 Z- n& ~# M. e* G7 }6 f5 yprint "#"*30 i% j1 r' t* w9 a* Z
print "\nClassification report on test dataset\n"5 u9 h: ?4 N( W2 j3 g1 b! ^, w# j
print classification_report(y_test, y_test_pred, target_names=target_names)
1 {. e0 Z$ u- `9 c9 `* ]print "#"*30 + "\n"7 e f$ {( r9 ?% b+ I, P7 X
# x. r- G$ P) g
|
|