- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np- ~1 M7 D- b5 ^4 X& f
import matplotlib.pyplot as plt( S: J( V. F3 v$ V9 ?
. x3 @/ v( q, `import utilities & s$ t3 R/ S; A( \: J
1 B" r8 j- n" q/ d! d7 B9 l# Load input data
8 t( ^% b5 w7 ~# x: t8 Binput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
/ F3 Z. G! C h/ O- w, |6 ~X, y = utilities.load_data(input_file)$ Y7 j/ Q1 X% I& S
' u% l: N& Q# v# m###############################################) d& U5 ] Q7 Z! p8 F2 U
# Separate the data into classes based on 'y'
& h1 y" o# }+ P5 @- ~0 Aclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
$ N' z0 _. R" f0 X* e, T- |class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])2 _9 P6 i) z, j: g
8 K; C$ E9 R9 D+ A# G) l
# Plot the input data
& k9 D' o1 T; b, `7 h9 Jplt.figure()9 c" i# i9 E2 { i4 m
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
/ x5 J6 B$ Y) y* @; M( E+ x9 Splt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
. a' K* E$ d4 @plt.title('Input data')
4 M7 i$ _/ {) a$ \" b' g8 \) B) e
9 ]1 l z! O) d& s; x: ?4 Q###############################################
' @5 `: a7 `* [! q# T' ~' r# Train test split and SVM training' L" q% d# s/ J$ I# u* B
from sklearn import cross_validation
% N* w8 H1 q$ T7 T+ ?from sklearn.svm import SVC# S2 B' b# x8 z7 E
1 P6 a( q+ r4 X9 P) R, R$ F
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)! G8 h V( U, H/ l. T+ _
% @1 f, G2 K/ C; C% Q p
#params = {'kernel': 'linear'}
/ X% z' P" E" X2 i3 W#params = {'kernel': 'poly', 'degree': 3}7 v) B* ]; u5 y+ @2 q
params = {'kernel': 'rbf'}
7 d, ~1 j/ c5 A4 y- [8 y) a; uclassifier = SVC(**params)
& L; y7 I$ R+ f; X# s& V# Cclassifier.fit(X_train, y_train)4 p3 P% [& R3 r+ l+ H. L
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')( Y6 {* j- F+ }' k. R
) p5 ]9 ? S/ H% F# ?5 N4 gy_test_pred = classifier.predict(X_test)
$ i& E3 D, }+ n6 Eutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
4 T) j0 ~$ @% l2 ~3 h% N- j, N. \
###############################################$ e/ D; ^1 n+ `& I: z0 |
# Evaluate classifier performance
) u7 R% C6 f2 R4 I- b* ?/ h- f Y9 }/ F0 Z& ^. h# ]' f* E, n1 B
from sklearn.metrics import classification_report$ Y1 G" l% g. V O# E# u
/ i- m# u; v, Starget_names = ['Class-' + str(int(i)) for i in set(y)]
4 k1 e7 M/ k/ \1 b$ Z) J* k/ l1 @8 Jprint "\n" + "#"*30
0 w2 j, s) U) J6 d4 w. f& rprint "\nClassifier performance on training dataset\n"
- n! i& k& h( I# C) f; oprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)' x, _9 Y1 Q6 T: t" Q4 R4 q
print "#"*30 + "\n"- z4 G% Y m2 V4 C/ L9 ~7 y1 w. m3 m
3 ]( D I- v- X- q6 m
print "#"*30
0 o: M9 w/ B6 \2 tprint "\nClassification report on test dataset\n"
% v. y3 Z1 W1 |, S2 u# z4 [& h. }print classification_report(y_test, y_test_pred, target_names=target_names)
1 M v8 ~+ w) ~' O- F9 Z6 W) l9 wprint "#"*30 + "\n": N3 Q3 M: n# p& F3 x4 z3 w
) r- S d. I9 |( g |
|