- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
( h" ?) z; y# C; ]6 \! b- fimport matplotlib.pyplot as plt# H7 p$ ^% }" o. q6 s
1 c# _0 G8 ~9 R# a5 _import utilities
: w6 I9 B" y7 i( T4 M" {6 P! D4 F: A5 O q# U0 T
# Load input data* A, B5 b' Z6 K% ` h' L/ ~1 t* n$ }
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
) u; X- X, S( j) k) J8 x2 fX, y = utilities.load_data(input_file)/ `$ x: ?, p1 z( G# I
. d, q) W! r1 A! t###############################################
" @! C) n* D0 Y6 [# Q* F# Separate the data into classes based on 'y'& _, y1 h4 N5 ]
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
4 R% z- H3 W8 m% o3 T. t3 Jclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
" ^3 } ?" q& C# \ u4 }
5 K1 o7 S. p+ r# Plot the input data4 N2 G8 ]* k$ A Q z
plt.figure()
; n& Y7 ]6 A8 W) Nplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')' c0 L `% T8 D4 d! D2 {) J
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')5 ~4 l u: ]6 t/ W6 I/ g
plt.title('Input data')
" N: a2 j K& Z; K& R* k7 Q. h8 f; f8 ^' G: O
###############################################
4 @$ k! R2 |- t# Train test split and SVM training2 A$ C8 a( C& b1 Z# y
from sklearn import cross_validation3 p5 n- v( w1 E5 f7 l2 S6 _ l
from sklearn.svm import SVC# S5 _( S" Z: g& g0 [* `, ?' S
. v# X: J6 ~( A m1 o/ ]X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5), j: m: n0 C, k( \! _
/ _/ o' M% U( C4 H- s
#params = {'kernel': 'linear'}+ G' [. d2 `, a8 A. x$ N
#params = {'kernel': 'poly', 'degree': 3}
d7 o1 s9 K' @# \4 L" }. B9 tparams = {'kernel': 'rbf'}0 v5 q) v. m7 d# X9 V9 R8 _8 e5 P1 O
classifier = SVC(**params)6 B# s2 f+ k( c
classifier.fit(X_train, y_train)( n+ X5 h q8 v1 W, l+ B4 l8 d8 @
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
0 N; f( @+ g# t, B/ g* I( _. V8 r" W9 u$ N8 K: Z; v& q7 N
y_test_pred = classifier.predict(X_test)/ j( O" H1 K+ i* x# U
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
- F m- X% S! [% r; U
0 H6 P2 z' M0 K% v. m###############################################
. J8 s4 S; j1 z4 ?8 j# Evaluate classifier performance
2 a: I& N+ c$ e
9 z, Z6 F) f% o* c* a* Yfrom sklearn.metrics import classification_report
+ i4 s- L% U0 X& H2 N S0 D
3 P( l6 D9 x# ^" G) btarget_names = ['Class-' + str(int(i)) for i in set(y)]; g& j% d: i9 F7 N
print "\n" + "#"*303 K/ M5 n# O4 V" ]& s w! m
print "\nClassifier performance on training dataset\n"
4 L' n; l! J) i$ a& |3 E( V* V5 Mprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)1 b0 @6 o; h1 ?2 c! x+ Y, s% E' Y2 f3 T
print "#"*30 + "\n"0 P. T3 z0 g2 k0 H1 T
% ]1 J* \$ n6 `( Y" W
print "#"*30
/ G, U. H- i) X5 Dprint "\nClassification report on test dataset\n"
5 u: q6 C S# y1 ~+ P3 e$ Iprint classification_report(y_test, y_test_pred, target_names=target_names)3 r- L' u) N4 ~+ q( j) w, W
print "#"*30 + "\n"
7 b) a% f' W" U, }. ^" K: ]5 j5 \. \+ _: e4 I3 K- A' Y: F
|
|