- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np5 m G& | N) S4 Z
import matplotlib.pyplot as plt
6 Z: i$ B9 ?% V8 A
; S( G& i: d1 C' _: ximport utilities " ^4 R/ ]: _) g- O& a; j
. C/ u& ~0 ~! F# Load input data
; t! B4 w1 |2 uinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'4 m! K( S3 @9 v J
X, y = utilities.load_data(input_file)) r7 Z% H- c E- r" c
9 X+ o) { L% H
###############################################
' R5 ?9 y" M$ I! `; V# Separate the data into classes based on 'y'
/ l0 [( s% s) r3 p x5 R" Kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
0 `5 \" S$ I% u2 m& X2 aclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
5 T Q( Z8 N) U. }
' l0 ` z$ O& u. W. j$ X% L# Plot the input data9 L9 l1 E* F' D# {9 f6 ]6 R
plt.figure(): J1 j- e+ \! E6 H4 d7 B
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')8 v; c: X6 X0 N q( o( n5 s: |# L) d+ }
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
. ]6 z, b5 B" fplt.title('Input data')
) c! p* l7 B, S Y2 K0 l7 v
" o+ z) W# z: k/ m) k* l/ {6 m9 k###############################################! q# h+ c0 ~3 n0 ^, @1 k5 D6 b
# Train test split and SVM training
% ^1 T" x; }1 t+ Rfrom sklearn import cross_validation
) q' Q) C/ ~" k3 Ifrom sklearn.svm import SVC6 [0 n) Q9 d) t8 g: V: n
0 V- F) r) ]: L! vX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
4 u7 ~* N' j+ L; t$ H* d, [5 D8 v9 o; H1 V
#params = {'kernel': 'linear'}; p6 o& E# |, j1 [5 U
#params = {'kernel': 'poly', 'degree': 3}
. H3 B# t: R y- u3 ^1 F% Wparams = {'kernel': 'rbf'}
: s3 y- g7 n5 E$ c8 Sclassifier = SVC(**params)
; n6 v8 K! J/ n% |/ D$ d5 {6 Wclassifier.fit(X_train, y_train)0 l8 ]; U# W7 D
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'). j4 A- {; S- ` q% G
. N3 g5 E" E! k. z/ E! f2 }y_test_pred = classifier.predict(X_test)* w) a2 E ], D3 Y5 g
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
) J9 N) ?- t6 D0 p3 D, O Q0 T( w5 b+ m: J' d& ~; t
###############################################' f6 W" B3 P" z+ N7 P2 |% g5 N* f
# Evaluate classifier performance
3 e# s. u. o0 b" A) o- Z
" d( x4 f9 R- I [3 Dfrom sklearn.metrics import classification_report
6 b6 W. ?& _6 B- B1 N; _0 k% }) Y$ Y& D5 Y# J
target_names = ['Class-' + str(int(i)) for i in set(y)] u3 Q# ~5 B! m+ J( t1 x5 l, s
print "\n" + "#"*30 X2 Y9 ` y1 v
print "\nClassifier performance on training dataset\n"
" A6 C! C9 L3 q; N! A6 Xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)1 e! m% `1 x! l$ q& @ U$ Q- @
print "#"*30 + "\n"* w0 X5 ~; F4 Y, B
3 g" ?1 [! Z6 o7 A
print "#"*30% F' f+ F0 r7 l: Y. Z
print "\nClassification report on test dataset\n"' f' P! t+ a) r5 t
print classification_report(y_test, y_test_pred, target_names=target_names)# r( g! W# M2 Y) U% ?' c
print "#"*30 + "\n"6 X4 T8 g- w6 [5 |. y2 ^. t
: _$ K( i% Z$ _; b |
|