- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 o2 o$ K3 }7 v, c2 U$ iimport matplotlib.pyplot as plt( |" @) }1 ^7 ]1 e6 W. G1 T
& p" b4 h) K/ B' f% T2 T
import utilities
" v U$ R" W* @7 p; J/ d! b7 |/ X4 C' f5 Q u a- p
# Load input data, H$ B" K; [* X% J
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 x8 L! \2 m4 P! [+ [0 yX, y = utilities.load_data(input_file)& ~9 \% E. Q# o7 p
9 U J/ |4 h' D, W1 u
###############################################4 y5 R) u5 Q) K+ q: U0 X
# Separate the data into classes based on 'y'3 ~3 U4 R$ G/ o) W8 ?
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
/ l) h1 g/ y/ h+ c4 K9 Lclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); t: ~% d8 l3 ?" p, g- N, E& g8 R. Y* M
4 m8 N% u) d; Z. ~ m
# Plot the input data' B7 H: H& j, I+ q
plt.figure()2 e1 k) {( [5 r! B
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
: B. s0 I4 ?& ^, ]) M8 _plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'): {, j6 U7 Z% S# g
plt.title('Input data')1 o. f& Q7 q" o: Z" [
7 g! U `! f' f###############################################6 f6 u# `1 Y6 I3 u; C/ S/ D
# Train test split and SVM training; s& A' N, D4 V: Z7 n* Z" P
from sklearn import cross_validation( _$ ~! O# c: R- J0 w o5 M- p1 e
from sklearn.svm import SVC/ P* E8 ?! F' ?8 b+ E
3 K4 l. y4 Y. J
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
: `3 y+ D6 A. X+ P A6 T% J$ w
3 }+ P6 g; w" |#params = {'kernel': 'linear'}
$ i/ N* Q4 i. z. h- h) z' v#params = {'kernel': 'poly', 'degree': 3}
; g" S4 b- Z* i6 D" Uparams = {'kernel': 'rbf'}2 Q; H- q$ M$ f3 k& \
classifier = SVC(**params)
: g- W4 |$ G% P+ r* w& {3 Mclassifier.fit(X_train, y_train)
# u ~' \! e- h# K% G; hutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')) g* P- j7 ~" e1 H5 q# V8 s8 u4 e% I
) A2 {# g# B/ Q: J. J
y_test_pred = classifier.predict(X_test)6 t2 ^) X$ g0 `7 _, w2 J) {* m
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')% ~/ y m( g& `1 F/ a
8 u* B& z4 C; E! f
###############################################2 c6 N4 G) F! q3 f% h% S
# Evaluate classifier performance8 u" O$ j5 |5 g# C: ^, k
- ~* C* H6 M9 s! V$ s/ w6 bfrom sklearn.metrics import classification_report
) s. u3 G r, \9 r- s0 \( E8 {; ]. F9 r& h6 a, G4 m+ X
target_names = ['Class-' + str(int(i)) for i in set(y)]
L( P/ m" {% g8 Rprint "\n" + "#"*30
; M) m, s) G9 E' Tprint "\nClassifier performance on training dataset\n"- B+ c, p. u/ K) _: `" [
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)9 } y7 |; N- L u: v2 l/ Y& `
print "#"*30 + "\n"- V0 M; u8 e1 h
3 o2 k* r+ ]2 W( `print "#"*30
% T, z+ ~2 @: Y% B# _! ^' V2 Uprint "\nClassification report on test dataset\n"! Y# N: l+ [* D; Z/ B( }
print classification_report(y_test, y_test_pred, target_names=target_names)' U6 c+ I0 [' Z& R+ u
print "#"*30 + "\n"" a" g5 F1 P" |( q4 m! ~0 {
2 a7 o/ N" Q% {& b5 o
|
|