- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np* A1 w5 d _4 s
import matplotlib.pyplot as plt2 M$ n! P' f) x
& u; J% ]; T3 l# r' N7 X7 bimport utilities - q8 I, X# k5 y1 z9 u
7 a5 K# S7 O) w! c$ y# Load input data
# N4 ^/ p- G. H( { K$ j7 kinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'6 H8 q8 m c' O) f) c/ i1 S
X, y = utilities.load_data(input_file)
" [# ]* l& e* L( @. p
- e: Z7 } F+ I* G# N8 t- D###############################################
) E$ }0 u0 Q2 u. Q( R# Separate the data into classes based on 'y'
. b1 H t; @& C4 V6 B# iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])+ K+ S8 G, Z' B0 s f) c( Z( D; B
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 o% r% r- @1 M
' N+ \. w: Z' V% x
# Plot the input data P, \. @7 E3 v5 N2 o/ z
plt.figure()3 j \$ k- B" n) e) b' `8 I* @
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')& j7 ]2 L( q8 t
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
6 `( G0 Y7 Q( V+ }" eplt.title('Input data')
: M9 j" x& G- v" Z+ q8 {- u" _$ o4 p$ w9 j6 a( y
###############################################1 S1 S4 F1 m" v3 I
# Train test split and SVM training1 O% D4 @2 [2 l/ j
from sklearn import cross_validation. ?+ y+ N$ F- J
from sklearn.svm import SVC4 V5 h4 f& ^) ?0 M8 H& g
* G2 U5 a- k' Y- v
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
9 L, X9 e& i; Y' d: I; q- G
4 y; f& m: L$ e% Z$ n0 v0 Z4 R#params = {'kernel': 'linear'}; d! P: I% R1 H/ L* q8 g
#params = {'kernel': 'poly', 'degree': 3}: T1 t: L3 I, w% F
params = {'kernel': 'rbf'}# z& }! }. @7 `' E d% e2 d
classifier = SVC(**params). m( w6 P7 ]$ z$ g
classifier.fit(X_train, y_train)
2 p' _( G- M# X+ ?: ~$ \utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')! q5 Z a/ {& Q- Y+ u
$ |0 m2 N$ p. E- q7 k3 ?* by_test_pred = classifier.predict(X_test)
; T6 w5 ~8 e& Y; K6 u1 Butilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
1 k0 w- {" Q: V' U7 x" f+ h& i" |6 _5 _, A. F3 @8 w( o
###############################################
, ^4 B2 {, j) m) ~5 X# Evaluate classifier performance1 p6 J3 p5 y. D
5 |1 i9 e% U* \6 lfrom sklearn.metrics import classification_report
# Q& }# z. k0 n" f/ P
( b! N9 E4 t/ P, u; Ltarget_names = ['Class-' + str(int(i)) for i in set(y)]
- \1 n6 y5 R8 x# }" bprint "\n" + "#"*30( j* c: V, H5 l i- P4 t
print "\nClassifier performance on training dataset\n"
3 F" d q. l# l- R- I5 k% w) Xprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)+ L2 p0 Z# [& c k9 r4 x- A0 E) \
print "#"*30 + "\n"
! G8 P, ]. N6 e$ b7 ], _- k4 L
6 F. q3 ]6 j+ I9 ^/ M7 y- Oprint "#"*30
# i; x4 }5 o1 Aprint "\nClassification report on test dataset\n"
8 ?# y, B4 F- W' J$ w+ G% [4 `print classification_report(y_test, y_test_pred, target_names=target_names)
( _) \/ d5 A |/ gprint "#"*30 + "\n"+ G# N o4 @4 _8 G* r7 k: [
( W a: h/ B" ~ |
|