- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
' V3 V" R) x9 w* k; Qimport matplotlib.pyplot as plt0 A5 N- C- P: v$ y" B
8 ^& G/ O9 M* G: _0 G* s
import utilities " a, ^/ f5 K' s$ w
/ k( l" N K1 `, _0 l9 ?+ u8 c
# Load input data$ ~) |' F3 ~# j
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 G0 T0 n0 f9 e! d. S) q8 ]! d+ F: @7 xX, y = utilities.load_data(input_file)
+ ]# u% r, E, u: A9 j% M' C: G" ^
+ n2 G# q6 {" {% ^, P0 o: S1 L0 M###############################################
' c5 e2 Y; b% N# Separate the data into classes based on 'y'$ w5 {6 _8 [$ T, k
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])$ R/ x/ |2 z! s3 X9 i, O, U6 G
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])9 C$ w; ~- h& Q
" c0 S/ N1 h3 Q% Z7 z {# Plot the input data+ r2 O: r, u3 S8 ~# s. g
plt.figure()
# u( t% a* w8 P! [, H2 vplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
5 A# O/ l- l i3 }" C7 }0 R$ O- y4 Uplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ H* Z6 H% r5 f- {, ?9 V+ Jplt.title('Input data')
' j `2 {" Y9 X2 g# ?1 u1 o7 Q# g+ F8 `! r0 [3 {- F0 X( F/ Z6 ?+ ^
###############################################
8 i6 T' S$ A( X5 c# Train test split and SVM training2 y3 {) J. s1 |) {6 Y1 [7 R
from sklearn import cross_validation
/ F0 X: P' p9 {from sklearn.svm import SVC
" ~& s; c# N- Q% y
6 V, o D }! W d g$ N6 HX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
7 Z5 }5 }9 B! Z2 W+ d% \
. j- B9 e4 E. @- \+ H) b( e6 d#params = {'kernel': 'linear'}7 w4 O6 }5 H, F/ e; b5 m
#params = {'kernel': 'poly', 'degree': 3}
" R9 i, I; G9 J% ]2 D& jparams = {'kernel': 'rbf'}! v& o) _! t. E% t7 P* d/ v
classifier = SVC(**params)4 |$ q# T3 W; d% c
classifier.fit(X_train, y_train)
5 A; t: Y* S3 E- F% U+ cutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
0 e& k& l9 t$ ~" r7 x& T; C9 H, z6 f$ I! m+ ~( s# N
y_test_pred = classifier.predict(X_test)- ^3 M4 B* L* ~
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')) j% d/ ^$ o; ~+ c
/ M: t! A/ ~9 ?$ o. ~###############################################
4 Q( ]4 M( }; b! p# Evaluate classifier performance' Z* j$ f+ Q1 F8 O, B3 r7 c
( X2 H! |+ C+ Z1 L# V( Ifrom sklearn.metrics import classification_report2 k* Z6 L' `1 u( ~0 @
( p0 f+ E5 L2 \1 R htarget_names = ['Class-' + str(int(i)) for i in set(y)]/ n/ f6 Y# i2 l5 I' F) u$ k
print "\n" + "#"*30! W7 k+ s/ a" l0 t7 c' u' T( P
print "\nClassifier performance on training dataset\n") s0 f) K) [9 V; |8 F
print classification_report(y_train, classifier.predict(X_train), target_names=target_names). N! u" i) j S/ g6 O% F
print "#"*30 + "\n"2 r& q% @ `4 @0 x7 Y
6 ~+ n# R$ D2 M* o$ g+ m1 dprint "#"*30& ?$ z9 X, Q# ~, O" E# L$ R& i
print "\nClassification report on test dataset\n"
L7 W& b& g2 d# W, g; O8 Fprint classification_report(y_test, y_test_pred, target_names=target_names)1 X s; U2 t2 K# D" b3 r6 W4 h
print "#"*30 + "\n"4 t+ j9 n3 S& v" I/ x
9 G& B; ]" u# M1 k7 L. | |
|