- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
. A" f- h/ `" o3 Q5 Limport matplotlib.pyplot as plt
6 k5 I: d4 [1 ]+ \4 Z
4 W: r& ?8 q. G N X# Rimport utilities
- ]% x- q# y3 S) l1 z5 o W9 W" X- e* \7 W, u" b \
# Load input data
! j, B% k) }) u# H: C+ Ginput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'% y3 M( o+ y: k+ P5 [
X, y = utilities.load_data(input_file)& a2 u% o2 h. @4 q# e8 y
, T$ _' x$ h' e###############################################
: |, l6 B, Q Z# V5 o# Separate the data into classes based on 'y'& ^) F8 J0 J5 V% Y& p
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
9 d( F# |. j' f I6 |class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( a! I' [" d3 W: s# [0 B: t; n/ \ o
# Plot the input data
, x$ E9 z+ E. Q4 Dplt.figure()
6 A# E, k0 U( rplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
$ z6 s0 b; K3 V" U: l& v& }* eplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'); [4 t# |# o3 n% x
plt.title('Input data')
. \# L6 E: Q% m- @+ I
) o- Y; E: x7 Y2 F' e. {( u' @$ E###############################################& K6 \5 T1 P+ d
# Train test split and SVM training
8 \' O8 d% Y# vfrom sklearn import cross_validation
3 G/ N% E5 c/ Y+ A! P$ [from sklearn.svm import SVC( b- x' { k% n; C
& T, m* F& I3 P/ E/ o
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5) Q7 c) Q, n1 m- k1 h0 @% V3 e
& H9 K0 S3 x7 n+ V- t; @( u! B
#params = {'kernel': 'linear'}
! p& C* T. O/ f: {' j. ~8 a#params = {'kernel': 'poly', 'degree': 3}
3 v2 {" v0 }- n! U8 g. C+ G1 xparams = {'kernel': 'rbf'}
* h- I4 v' c: E+ pclassifier = SVC(**params)! v5 u, U* f8 ^3 k& I7 N# } e. a
classifier.fit(X_train, y_train)6 P: [+ g: `* ^7 s H
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 s/ K. r: h! Q! | u e* K: I; m3 q ?: c4 L) b% |
y_test_pred = classifier.predict(X_test)
4 y( E5 C7 V1 \utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
; p8 g3 U6 l6 S) o
5 f" n$ X: e# P' g; F4 s###############################################* q, ~; B- r8 o$ N7 b3 L+ e
# Evaluate classifier performance
" e" G% q# Z- Q& F
8 \$ h1 ?( O0 N( a9 Z& A, \from sklearn.metrics import classification_report/ m' O; y& ^1 s
) [. r) u2 F+ Dtarget_names = ['Class-' + str(int(i)) for i in set(y)]" Z0 E7 l5 r8 m" o( b2 I
print "\n" + "#"*30
8 A. w1 d" h4 T* `3 S! Bprint "\nClassifier performance on training dataset\n"
0 Z5 J- V: ?2 c" nprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
( P/ w& l$ t2 G4 V* T5 xprint "#"*30 + "\n"
8 @3 o1 f/ r9 _/ }
t# e: v! @% I( v" P0 zprint "#"*30: A) q; d" W u, |1 V- @, v
print "\nClassification report on test dataset\n"
5 E: k/ S7 E$ p# X$ T; G+ r8 Bprint classification_report(y_test, y_test_pred, target_names=target_names)
5 R2 j- l3 `7 @print "#"*30 + "\n"% U" N% Q |- Z% Z; j
+ K0 m; M# e1 Y |
|