- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np) L1 c, B8 e( _4 B
import matplotlib.pyplot as plt, |) H; O5 D8 w& ?
% i) L& G& @# Z C# P
import utilities
# t( q: k# ?( P, Z" I/ X
! |1 c) z5 U1 Y! [# Load input data
7 x0 T" N5 b& G, q& ?input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
/ g; O, p: t7 ] C7 {; B0 KX, y = utilities.load_data(input_file): |7 s; L$ S( V3 g' y7 r, @6 }( j
9 X/ |9 `* ?6 z
###############################################
5 \$ \6 D! m7 v( M. F# Separate the data into classes based on 'y'
9 Q; {% `+ m" A3 Q' rclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])* R, z4 Y4 b& q' b% U
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
8 n8 {8 l T% P
" M* P! @+ q9 o* y# t! \' d$ G# Plot the input data+ u: r* [) W, o' H# d$ z% B
plt.figure()
I) x4 m- C7 J, [8 Q' \2 v: H; zplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
8 R* h. p" q9 y8 u0 q7 Oplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
; E' m4 j& w0 S- @; @$ dplt.title('Input data'); y4 T9 L( S6 R" M: q5 U* E, P3 @
1 Z9 y8 s4 ~9 K. v! f
###############################################! A/ b5 h- U' {; Q" e! C" l* A# w( C
# Train test split and SVM training5 X, j% H' t3 t
from sklearn import cross_validation
3 x; s: Z0 n% @9 Yfrom sklearn.svm import SVC
3 @0 b0 P6 M! j3 @5 e
) W, L$ U t' U* `X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
d8 i+ s3 a1 Z2 ]& g9 T' _( q, f, Z7 O7 \9 M' U ]8 ?
#params = {'kernel': 'linear'}
4 \& j2 Y! K r+ y#params = {'kernel': 'poly', 'degree': 3}
! c1 {; H2 e3 G. S, m! Iparams = {'kernel': 'rbf'}
5 Y) A0 E" W7 _ S* j: z wclassifier = SVC(**params)
8 u; Q: ?, ]$ Y8 vclassifier.fit(X_train, y_train)$ n7 p) j. f$ K' M: R& I: l( u
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
2 F" y4 x+ {! W" }: S1 S) v$ O+ X0 c/ _
y_test_pred = classifier.predict(X_test)
% ]- i" n! | z- { I; U4 Autilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
. r% X5 R# l+ r$ a- z0 E1 N, S0 ~! y. X N# H3 `9 K1 Z K
###############################################
' j8 ?& f! r% |& m1 w9 e# Evaluate classifier performance
( f% j" ]' ]. x, |. {# ~8 F# {4 M) u- g" \$ f
from sklearn.metrics import classification_report
: z$ L; `; C7 |- h! B H& g+ H5 A! c" x
target_names = ['Class-' + str(int(i)) for i in set(y)]
* W% `$ r2 c& C2 u: r/ N1 eprint "\n" + "#"*30
9 M+ u- L. ?! }0 O! p* R* Jprint "\nClassifier performance on training dataset\n"; }% l. X+ T' w0 C/ Y; j: p
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)8 o5 M( @1 ?5 U6 \, U% W
print "#"*30 + "\n"7 a; F) g& J0 }
H" D# m7 s9 w* pprint "#"*30- D% ?# p* I5 u- l
print "\nClassification report on test dataset\n"
3 C* _$ R+ }8 v P$ vprint classification_report(y_test, y_test_pred, target_names=target_names)' L: W& B/ x% u7 O. B6 K
print "#"*30 + "\n"
9 R d5 J# o7 }9 |0 o" W- y1 K' u
|
|