- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
0 p6 N1 \2 Q# l0 R3 N1 O% F8 ~import matplotlib.pyplot as plt
3 S& S2 v6 R" `* n9 }% g) o
: h8 S$ m& I3 b$ K K, rimport utilities
/ J5 ]6 N; k: i
" c' @* W$ H4 {+ \# Load input data
4 y4 W& C% F) z# Winput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
$ q- [: {7 b$ G8 |, h s& L5 sX, y = utilities.load_data(input_file)# h0 }5 x) c0 Q7 ^7 b6 o' }! p5 L
9 g9 Z! G2 k6 ^###############################################! [2 r2 b1 [3 p$ E" L
# Separate the data into classes based on 'y'$ Z, s6 B3 K1 `1 O0 c' X
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])& j" l7 n0 r% P# r; Y
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])) w( r4 W* F8 k* f. A
n: z6 T8 \8 [( I9 t4 g
# Plot the input data
# n# n& {( ^+ Gplt.figure()
" |; G# M8 y! I$ S' x$ e4 j5 J3 jplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')9 F7 R/ c! \6 d
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'): f6 X+ i7 a+ A1 W
plt.title('Input data')0 z; S, d P7 Q3 k3 |
& e' r. s4 M* r% K8 O
###############################################
' |5 M9 |, t; B3 j# Train test split and SVM training* D3 G8 v3 J9 [/ d2 D1 ~
from sklearn import cross_validation0 X% ~4 O6 _+ S2 `7 }( f9 G4 ?; g6 q
from sklearn.svm import SVC& S' S$ O/ t! l, c
* g( b! J1 Y' S' y+ ]X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: h4 F- i3 @' F- n& i1 P, q6 K' F( t" \4 O; t: E
#params = {'kernel': 'linear'}
, g8 D. @" P+ P- l#params = {'kernel': 'poly', 'degree': 3}
L" U& K5 X: P" {& sparams = {'kernel': 'rbf'}
; i6 w* Z# u. e+ c, r5 U( kclassifier = SVC(**params): b8 I* c% Z X) v. t
classifier.fit(X_train, y_train)
$ ~+ |) C* u6 c0 r- D) S6 `* Qutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
) |% m7 ]; h. H# [
8 r9 j. x2 N/ P5 I, P( P# P8 q# {y_test_pred = classifier.predict(X_test)9 Q; g& Q. r1 u) O# i
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
6 M$ j9 w7 f: C3 j
9 k+ }+ ^: M0 ^0 N###############################################/ O% V' Y. |2 T' I
# Evaluate classifier performance$ H8 l7 Y O: X5 S
5 P: o' \ M! L% t7 _from sklearn.metrics import classification_report5 h. d( {( J' n- h* ?& T
$ r# G( n: Y3 B" @
target_names = ['Class-' + str(int(i)) for i in set(y)]# j9 F. T; U; M
print "\n" + "#"*302 ?! I& b" u( r) f# F
print "\nClassifier performance on training dataset\n"
0 d/ g# [' }) Y& u0 r1 @3 tprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)# a/ ]) d8 W ~. U# f# \- D, R
print "#"*30 + "\n"
; s& h* @" `, n# x/ c8 B& I
$ i3 Y" ?- \% [8 Jprint "#"*30" u# G, u4 `9 a5 C+ q1 V8 a
print "\nClassification report on test dataset\n"2 E4 ?& t8 _6 L: ~$ X3 m
print classification_report(y_test, y_test_pred, target_names=target_names)3 E/ N; o1 d! R2 p% z
print "#"*30 + "\n"
6 y1 ~1 [0 n# S5 b% u; x+ O
/ d& W7 g6 \9 p, y% e5 a5 P |
|