- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 T1 |7 ?. Z' c$ N' oimport matplotlib.pyplot as plt
2 a' D! U. R5 g# A3 d7 `' U$ A1 x5 G+ ]" h8 _% C; [+ g. H
import utilities
8 }5 c% C; K( X/ U* o
9 k5 B$ S( q' F4 E' q: Y/ q+ @+ Z% [# Load input data4 z i W Z ]+ H3 |( S0 m
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ \3 e$ l3 E8 G8 Y* A# R+ J0 y& RX, y = utilities.load_data(input_file)* p) @& {! c+ X. d8 b3 Z
1 t; f- r# Y% v$ \2 @( ]% t8 Y6 K& e
###############################################
! O) e. l' D0 ?# Separate the data into classes based on 'y'
" _0 S! g5 W( Q9 l7 iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
! i$ M+ w9 s- `* o! _9 u& O% dclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
2 y7 M1 P+ j3 }5 y- B3 z2 [9 {2 e' S1 g$ X( ^8 w$ e$ {
# Plot the input data
" Y: I( @8 ]. ?, G- E1 A) {1 E4 splt.figure()
0 c" E: N6 g. J4 tplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
! r5 w j7 w3 ?% Nplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
7 [+ B; { ^2 K) p) V1 Eplt.title('Input data')$ U8 q4 @8 @ p/ T( I$ q
3 N$ c; N) {8 T8 @$ x###############################################, h8 G8 c0 g# L8 \% m
# Train test split and SVM training8 g& d, w. |/ @3 `. m s1 p
from sklearn import cross_validation3 {8 n5 D9 m4 _/ N, R
from sklearn.svm import SVC
5 `4 W4 W% i: [( F" i
: v) l9 m! ~: n0 Y5 ?5 \X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 W6 t' I. F8 V" e8 g% h0 A4 A
; [0 l% C% h2 z n c#params = {'kernel': 'linear'}
8 F" D* x9 |4 U#params = {'kernel': 'poly', 'degree': 3}8 n0 W) _- J3 v: M
params = {'kernel': 'rbf'}
$ h, r6 c" s: C! I' R1 fclassifier = SVC(**params)
3 K/ l$ z: x9 y6 hclassifier.fit(X_train, y_train)$ B, v# X' k, W4 d: n
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
" u0 y# i8 H' g
" H( H' i$ Y; ^/ @) |, I# By_test_pred = classifier.predict(X_test)
. o) }7 h. d, {4 y$ [8 _- S: Tutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 |/ n9 |8 s; @* G# ]7 S n8 ]! a0 L6 i' V4 @* n
###############################################
5 ~: y* s- u/ i0 L1 n b# Evaluate classifier performance5 I* q$ E3 W% M8 g
" \/ f5 j5 z+ rfrom sklearn.metrics import classification_report. _4 d+ a! X6 ^' P% @% T2 X8 s
5 P+ E1 {. h' Z" K6 J# K4 c) F
target_names = ['Class-' + str(int(i)) for i in set(y)]' K- c8 J9 e) j
print "\n" + "#"*30
$ D* z5 L5 K- Hprint "\nClassifier performance on training dataset\n", s$ k* O0 H( Y: V8 @
print classification_report(y_train, classifier.predict(X_train), target_names=target_names): B M$ p9 i; k P! o! z+ T
print "#"*30 + "\n". j/ E# R# g& F, S9 t7 J
* x" q" Q0 Q4 f( [
print "#"*300 S: ?$ X& K+ k) W4 \
print "\nClassification report on test dataset\n"
& e" U, M* T- [( l" x0 `: mprint classification_report(y_test, y_test_pred, target_names=target_names)& R) ^% L1 z, M& u4 M
print "#"*30 + "\n"" b) v& r4 ^( G8 `" W/ ?) V6 Z
# w) m0 \, w% u- f" R
|
|