- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 557
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np5 k5 F8 y$ a" _3 z, }
import matplotlib.pyplot as plt2 n4 R, \/ r- R7 G
# E8 F3 v7 l% ]5 b, H8 ]
import utilities
. d% }2 d3 K' N3 W9 m; h
2 H5 ?# b$ g9 Q) P# Load input data
8 h) D4 x4 ^0 w7 pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt' m% L" w8 p' s& h) Z p
X, y = utilities.load_data(input_file)
2 A8 O9 ^" b- y2 n: Z+ r
! } F/ n) O( q###############################################1 a3 m/ x+ }: l5 p
# Separate the data into classes based on 'y'
5 G- p" Q: [& M% n- nclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])* h2 X0 r8 z2 f+ T+ A' [+ ~
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
6 }# Z0 ?# M$ P4 q* _6 n" X% ?' y% u9 x+ Q
# Plot the input data
* I% l2 } u/ I) J% |) v! P6 ^( I' Eplt.figure()
( L% u9 I0 H4 s6 G$ a; }5 ?plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')" ~7 R4 Z; t# y3 ^5 }+ {" k% u
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ _; k" z# Z7 y% ?/ F9 \. M( Vplt.title('Input data')! h6 _& a) Y, z
" r( O# f5 r1 Y4 w3 v###############################################3 c) u" v4 E2 w) J
# Train test split and SVM training
2 O, r: o$ h8 dfrom sklearn import cross_validation
! N2 E& O% f. {% d" \from sklearn.svm import SVC# J' w1 p; h# s6 V4 J6 d3 j1 \
, T: s! T0 i) F* |% dX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
% Z7 _6 \+ o3 r. O/ e* H
. V% e- K6 `7 r. c' ?4 y( A#params = {'kernel': 'linear'}
& w( N0 j' \: E6 A#params = {'kernel': 'poly', 'degree': 3}
4 l2 q- V5 Z: Y. Vparams = {'kernel': 'rbf'}
8 \+ z) U' Q- q/ uclassifier = SVC(**params)6 N o0 d- p& |- u# {, ^$ B
classifier.fit(X_train, y_train), }) k9 B; f7 H% J0 u3 X
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
3 A2 n; S; E3 l0 n X6 T Y5 E% M6 M: a
y_test_pred = classifier.predict(X_test)1 e: n$ w+ _' E$ D& z& P7 h K; r! B6 Z
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')2 {1 j1 v5 F6 R- \5 Z* ^
- f# C* k, w2 F
###############################################$ A9 k. T( Y+ x0 k1 j0 i _- G
# Evaluate classifier performance
3 z7 T9 D, c/ c1 _4 z8 M! C
: L0 j0 x+ f( f2 ~6 {from sklearn.metrics import classification_report
H6 m0 R* L6 q: W0 u' r# i0 g1 U
j O4 y) P1 }# B( U" S: }target_names = ['Class-' + str(int(i)) for i in set(y)]
2 |- r2 p2 y9 D. c& m) ~( sprint "\n" + "#"*30/ f- q" R% B4 _. h& }& z* C
print "\nClassifier performance on training dataset\n"
/ ?/ m5 g$ g# Q+ d aprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
1 E/ k8 Q+ @: t3 Zprint "#"*30 + "\n"
* ?: x7 f% t- O& ~# G/ q9 i- w6 @7 ~
0 g0 P0 U; u, a% H( K8 Yprint "#"*30
, r) |) X; l& H, Kprint "\nClassification report on test dataset\n"
% N5 H* I: l& ?: F. C7 Nprint classification_report(y_test, y_test_pred, target_names=target_names)0 x# q3 [8 w5 x" ?: T9 ^5 I4 c0 f
print "#"*30 + "\n"' X2 V+ _/ ~, y1 r9 ]+ H' Y7 S% G/ a- e
4 N) Q& c% h8 K& M) c |
|