- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
( z' n, p0 M! Z, G: timport matplotlib.pyplot as plt
( P' O3 R! v* H) j* O& ~* {/ B' \* Q) V, D8 p3 ]% B
import utilities \% W+ ~- k8 W0 k
/ u: J1 w' i3 l- u
# Load input data6 a b8 e% Q0 y# E( T; _
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
6 R9 U. u4 b( t( W( \( r$ fX, y = utilities.load_data(input_file)
6 }- m$ E s. R( E" `6 o
4 C$ N+ U( a/ i! R [2 m6 G! \###############################################- W F6 Y2 K' _6 Y/ l9 U4 w2 b% n
# Separate the data into classes based on 'y'
( `0 T s9 F' }2 z' m2 c# m/ cclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]); a0 w% e1 u+ G7 ^7 X: B5 I
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 s7 f: b1 o; W6 z/ f0 D" D. M1 O
% g$ j* |7 t6 [7 s& F# Plot the input data
) J) d* \4 Y6 }% d2 U b& Iplt.figure()- P; Y |9 o7 p
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
- j# b! V! M1 v$ I4 }$ Z& k* b# vplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
7 `/ @* u4 S' ^, h f$ X/ rplt.title('Input data')
; \" _ a8 _) G) G. E
+ e) w B# }9 o+ K# @1 s& K5 }###############################################
( D" l9 C _$ ~* q# F: o+ I# r' W# Train test split and SVM training
5 b1 Z: K; [8 g- Z$ yfrom sklearn import cross_validation
8 T' D4 g3 u5 ~) s: Gfrom sklearn.svm import SVC/ m' a& T) T2 z
0 m& T( Q+ G8 c ^/ M+ F5 m
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
6 y9 \/ K" {: [) e8 w8 \( a, H- Y8 t4 k0 \% }5 v" v
#params = {'kernel': 'linear'}1 t" \, \) u, F" B
#params = {'kernel': 'poly', 'degree': 3}
' d* @2 L2 V+ o. z2 \params = {'kernel': 'rbf'}; F2 ?- E/ e$ W7 G. X: }! u
classifier = SVC(**params)! [ b2 P' d/ f+ C6 _. T! y4 u) Y
classifier.fit(X_train, y_train)
5 N3 z7 S$ {$ Y- U: m7 x! c7 S6 j% ^& Hutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
& \- R/ m# u$ h& H' A
0 H" K* D: P0 g/ o: ty_test_pred = classifier.predict(X_test)
' X) q: B1 O0 }utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 d3 P8 l- M6 k: p' L
1 d9 u* B- x+ D m) N###############################################
6 t H- Y* ]% J0 c# Evaluate classifier performance
$ J q: _0 a" s2 e# A
( [" Y* s, N% A9 F3 x) nfrom sklearn.metrics import classification_report
! d/ F* h4 V3 \! B: a3 `/ r# X! `6 F1 n8 C# R+ h
target_names = ['Class-' + str(int(i)) for i in set(y)]
4 r+ V: j, o8 _- ?0 ]1 J: `print "\n" + "#"*30! B; R5 }/ D) i
print "\nClassifier performance on training dataset\n"
; F! |1 M# d G0 C2 ~9 m' _print classification_report(y_train, classifier.predict(X_train), target_names=target_names)* }0 t, j/ W% d B& B8 d0 x
print "#"*30 + "\n"
- ?* W* r; f4 p" F
) e1 t# T! O2 a0 t6 n4 f3 E Xprint "#"*30
5 _- V! a8 X8 f6 Xprint "\nClassification report on test dataset\n"3 H( P% U5 r& c3 H
print classification_report(y_test, y_test_pred, target_names=target_names)
8 E$ _! [- P7 E, Lprint "#"*30 + "\n"
. Q, U* c* i# C6 ]+ l# x8 D0 e% |; u- G: I& L0 C! k
|
|