- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np: M( k H0 q$ ?. {9 r
import matplotlib.pyplot as plt
9 ^" W3 T7 |6 F" ^2 \! H, _' c: Y3 t
3 e# Q+ p: I9 v4 T. ^# x; cimport utilities 2 h1 h u- d% \
) ]# F1 t. ]8 H* x% ~+ O) ]& U
# Load input data
, T1 [1 B: U, T& e A) ~/ c0 c7 yinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'/ c6 u8 X8 Q; e9 Z( s0 t
X, y = utilities.load_data(input_file)
: A* m! B* n1 @$ q8 f5 Q
# j f) |# C: |7 J; J% k% r###############################################6 `4 l- F% `/ T: U2 F
# Separate the data into classes based on 'y'
) @8 L2 L4 D) k6 e7 Z1 r6 Xclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
- r2 ^/ d. g; G* qclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])* I9 A# E2 ~) ~ P0 H
6 J: S! N" ^6 P( R* x# H# Plot the input data0 F( v$ B' c, P5 W/ x) T
plt.figure()
( W6 t2 K) L n5 eplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')- b! z0 ^8 U8 m" m
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')1 R6 `! ?" s% L: Z- v
plt.title('Input data')
4 g( b9 F! C9 ~. p, |
8 h v/ a/ [* @; T( u; K###############################################
/ X9 q- Y5 Q7 B* B3 z# Train test split and SVM training
1 F1 j: a* m. T% G# \' N1 dfrom sklearn import cross_validation
/ s4 a1 p. J5 @from sklearn.svm import SVC* ?' Y/ ~ v' {: f4 d
: [% z I3 n ` ?X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
( n' W0 g# w$ S+ O
2 I. M3 ?+ L- m/ x#params = {'kernel': 'linear'}
, Q) k3 }# j/ b# _1 X#params = {'kernel': 'poly', 'degree': 3}
" j' f9 ] I; l% |! [params = {'kernel': 'rbf'}/ O) Z$ J L" n+ b$ [9 f% }( j
classifier = SVC(**params)
* ]8 i6 t1 \! k0 Xclassifier.fit(X_train, y_train)4 |7 P: F/ T" n$ [" w
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
' i7 B: \" x: H& [) f' S; [; }) }5 A2 K$ [5 G
y_test_pred = classifier.predict(X_test)
" a2 M3 a9 x2 ^7 }) futilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') k$ E+ E# v% u4 B/ C
4 V$ U5 L0 d! I4 N! l4 }
###############################################! ^4 C c% g, _: ?7 O9 _. x
# Evaluate classifier performance; N p( A" p4 X! g6 t+ C7 T
, F* f* I3 g0 sfrom sklearn.metrics import classification_report6 ?5 _8 I0 i- ]
2 _4 N9 b1 n6 ]0 o
target_names = ['Class-' + str(int(i)) for i in set(y)]+ k5 h) B' h: B3 V1 E
print "\n" + "#"*30, s. E! c! a/ }6 d& w
print "\nClassifier performance on training dataset\n"
8 v- Y7 F/ `9 r: ~# \print classification_report(y_train, classifier.predict(X_train), target_names=target_names)$ @) F% E* p; _( J6 A# | j3 ?! L
print "#"*30 + "\n"
N. N! @6 i' j/ i3 s! p0 |" q5 u. S: R' Q
print "#"*30
) P3 w1 Q$ |( a9 P# J' zprint "\nClassification report on test dataset\n". K/ ^9 d2 v+ F& t
print classification_report(y_test, y_test_pred, target_names=target_names)/ |9 n, n$ L3 ]# p" C1 \( y
print "#"*30 + "\n"2 w2 d6 J$ T3 N% X6 S u- b
0 b& d+ }6 m* [" C" K( k- p) L
|
|