- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 557
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np- ~+ j. C3 v. w# c/ ~* i
import matplotlib.pyplot as plt" e2 }7 m* r- n u( O. D8 o
" b4 b y5 ?0 A: y7 q
import utilities . w; j- P4 u) q) v
7 R6 L' ]4 @+ M2 _5 J3 M& m0 Z
# Load input data
4 y9 m9 [ l o8 d- `input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
! Y& q3 {* i( ^9 z+ G! vX, y = utilities.load_data(input_file)# i/ g; O o1 f4 H( \+ t! c# ^
( R) T% l+ O' ^9 N0 ?# H
###############################################& |& u. ?6 G3 l5 O, ]. @
# Separate the data into classes based on 'y'
( K1 M5 K* O8 r- Iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])& [6 L% x; A" g& p- C9 B
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
5 H3 b, a) h1 @* B1 Y/ t( k* ]& C6 q( e' J2 _9 g' B& y: } G
# Plot the input data: d/ o& g' `2 e8 d) k# k& U
plt.figure(). n& R0 S6 v+ f3 q; Q
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) d$ v6 B8 i" L- d/ @+ Zplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
! p8 M4 w0 Z; }, o r' ~7 Hplt.title('Input data')2 ]" C( e& q0 o$ e
4 P+ ^: p$ k( q! q2 }
###############################################! d/ ?+ |, c6 ^3 V& e
# Train test split and SVM training
8 d6 V" [& E E# O# I: _ j" z3 cfrom sklearn import cross_validation
1 X) R" [( d2 }4 N0 U0 Nfrom sklearn.svm import SVC
! O: D# r) W) Z* P# H5 n5 k& |
& X$ e1 r4 d8 E, f eX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5); b* {7 X7 M6 Q" w( b: P" ^% a4 }
: i: M. i$ m0 [$ s& a; m3 v7 b2 b#params = {'kernel': 'linear'}
2 Y9 l }, i7 t& x. ^#params = {'kernel': 'poly', 'degree': 3}" u7 }; _7 v( Y/ m# g
params = {'kernel': 'rbf'}
) p. L: x) i' ~5 c8 R! _- O4 yclassifier = SVC(**params)
& g% q$ A' t* R6 \0 B" o. b8 g7 Zclassifier.fit(X_train, y_train)
4 c: D7 n" A* _7 H! `! P9 Butilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')9 @0 ?1 u7 \* M7 f8 I, P/ B
" J+ b" G7 w2 C" h7 P& o% D
y_test_pred = classifier.predict(X_test)
% @- f$ w/ J! r5 Z! Qutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
& G P8 S) R3 p* b4 d' D% A! j7 e' M
###############################################' X% l0 G$ l8 ?( I8 p
# Evaluate classifier performance
% L4 n8 w; R2 {% T$ q y! Q0 U; t8 h' J4 y, u* Y7 T4 _' {! [
from sklearn.metrics import classification_report7 e" _. a/ M( b2 C
! f/ z) J) f; H0 c! m% d, K& m0 ^
target_names = ['Class-' + str(int(i)) for i in set(y)]! K/ S3 K# b. P1 ^7 _: V& V$ |
print "\n" + "#"*307 w( }- r9 {# W0 K3 I
print "\nClassifier performance on training dataset\n"( M& D1 o7 v, T+ K& L% O
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)- ^" v1 ^* Z* M
print "#"*30 + "\n"+ _7 B( ]" O. \8 Q# z6 p3 j
3 s1 {2 k$ o3 pprint "#"*30
( U& |- t8 C$ m6 E) iprint "\nClassification report on test dataset\n"; e8 R" s/ w& x; K
print classification_report(y_test, y_test_pred, target_names=target_names), Z4 w1 [3 d# Y \5 X9 |( f5 B) ]* E
print "#"*30 + "\n"
5 k( E6 E# _% w& V" M. D6 @$ k1 q" i$ _
1 s1 {3 h! H& _' n* I# }: c |
|