- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
( i. D2 Q. t1 t- yimport matplotlib.pyplot as plt- P Q, |9 V3 }' j
7 [9 j8 L# ^( P- Gimport utilities . O4 M( P. M7 @7 s
: V' r& J9 S* o# Load input data% @, @# x3 a# z2 w) C& L5 J+ S8 K
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'6 Y. G7 [' ^7 C C$ X
X, y = utilities.load_data(input_file)
. O9 n7 c Z9 s& E% S. X. U+ l6 b! z
###############################################+ d: {6 R4 u( y G% b4 A
# Separate the data into classes based on 'y'3 N& v+ K6 ^3 n5 W
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
@9 o, A" t+ \5 l+ N0 {. ?class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
: c; g- I1 O1 c1 c1 W: O' m
& _) B$ t0 r( N. u/ P# R3 x% T# Plot the input data) {9 @# x2 S2 d! ^+ e
plt.figure()
7 o; d. D6 K& ~plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
6 Y/ u( W& h- w" Q# N0 E; Rplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
. \& i& k `- G7 ]+ [plt.title('Input data')
/ T8 }9 D' A$ d6 P b
- @, q- [! s9 H+ @1 j###############################################
" _) ^* R6 V9 b( F* U7 s A& H# Train test split and SVM training
" ?: f8 W5 R) K1 i$ ofrom sklearn import cross_validation
( e5 ]4 N2 P1 [% \5 K1 X8 Sfrom sklearn.svm import SVC
4 R+ Y; L! }. D( f3 l& @0 X, P5 n. s) [8 d5 I' o
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
, p7 W# o7 j% @/ a% z* G: B2 w
3 n- e* ]! N. V#params = {'kernel': 'linear'}
. q R# |7 @3 _( Y#params = {'kernel': 'poly', 'degree': 3}
2 u ~5 K+ \9 b' x. V2 nparams = {'kernel': 'rbf'}
9 [- T0 l1 n' J6 J, W6 c4 _* S' Wclassifier = SVC(**params)
7 ^1 Y x9 ?% g8 y ~; zclassifier.fit(X_train, y_train)
0 K+ I, r$ p5 z d5 `, lutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 N( y! [+ r1 H0 V. k
4 I/ o7 g6 J7 C& g& m
y_test_pred = classifier.predict(X_test)
% {8 ~# l: w: \; X0 x+ d/ Qutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')0 R. }7 P1 k9 f: I" k
1 D! f( B5 @& e8 o5 G/ F! I. Z9 _& E################################################ M) z5 Q* e9 N2 }
# Evaluate classifier performance+ q5 G+ i; j9 A+ I1 a( R
$ L4 ?4 Z2 B2 W6 q
from sklearn.metrics import classification_report6 b4 \ @& P$ E5 ^. b
& K$ e$ a8 r* P4 N
target_names = ['Class-' + str(int(i)) for i in set(y)]" H. b9 y3 d# B S
print "\n" + "#"*30% |! S8 B( W8 @
print "\nClassifier performance on training dataset\n"
- F7 P: r8 I: D' Q0 \% Bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)4 z$ F7 W4 [& R8 X/ @" f
print "#"*30 + "\n"# S2 `/ _6 ^: R+ f: L
% B& G. |, j" D& x# k5 ?
print "#"*303 s" P( K/ D7 H& I# [
print "\nClassification report on test dataset\n"8 V% n8 S' m- k" P: S$ x
print classification_report(y_test, y_test_pred, target_names=target_names)% q6 n" {* B7 j( A, R9 K2 M/ A" w
print "#"*30 + "\n"
7 n6 u1 U r3 F |. ~0 O3 I: I
! ?1 X" G) X4 Y; m) ~/ D, u |
|