- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
5 x B' \1 s2 uimport matplotlib.pyplot as plt3 q/ V0 w% X0 \1 X; K
3 e! G$ Z% d( ~4 X3 K8 D6 T$ nimport utilities
" x2 ?" \) k9 n4 a y$ M* j; |% X1 z& N2 U, J& v& {, Y Z
# Load input data/ T/ e: ?* h+ |* ^4 A8 S4 }
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" `6 ~0 z) p, f- z7 v+ O+ ^X, y = utilities.load_data(input_file)
. M( L3 T! t% b8 x: Z
. \+ M/ Q, \ U3 X###############################################& I% l) m+ _" p/ H+ n# Q
# Separate the data into classes based on 'y'& q2 J9 |' ^+ K+ M+ |) w4 \
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])2 [& y5 U: b7 R* l
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
7 O$ x! A) p y) o! C7 z
# X9 g( g4 t8 W# s+ A: }# Plot the input data
" a& k! G8 \* Z/ b3 a6 v# Nplt.figure()
* e6 Z: s& D; f1 p9 y; V2 Xplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')1 d9 B% f8 ?3 S# N! _, q0 L
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
$ C2 H/ \+ L. @plt.title('Input data')
1 G5 X3 a/ ?, H5 C0 V, k! s% j
! j. J$ G) A" t###############################################3 X! W& V$ Q6 U- q p n
# Train test split and SVM training& P K% g9 Q T) u5 s8 l! p% o1 X
from sklearn import cross_validation3 R$ K1 b+ i' G) p+ K/ Y: s5 W
from sklearn.svm import SVC
1 L+ C& i+ u8 c X( C# B' x
7 `3 u- k7 s/ K' ]% A1 r; ]( sX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
! G& _% `1 a6 h1 O
: k6 v3 O. f z, a, z' s#params = {'kernel': 'linear'}9 \) k0 H6 Y% z% f) _& Q" W8 }
#params = {'kernel': 'poly', 'degree': 3}! Z. ]1 o \8 K
params = {'kernel': 'rbf'}
2 S; k4 S6 I( o# v% i9 [classifier = SVC(**params)+ R5 b5 }8 ^2 ?( D
classifier.fit(X_train, y_train)
. x" t: d9 r- [9 h3 m0 C1 K4 \utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
, V: \3 O4 u; a) a+ n
8 z& N& b/ a0 f4 o" V* ^y_test_pred = classifier.predict(X_test)( N2 B4 t$ S: r w4 S
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
0 y; k( W2 S* `4 X! |* F
( {8 y! d( e6 T/ c& S8 s7 a0 [/ {###############################################; V% f' }6 M6 u1 B0 K
# Evaluate classifier performance
- G6 \. C I) V: _& F. c& K. M3 Y% @* X2 Q& @0 U6 x K3 ^$ z. p, L
from sklearn.metrics import classification_report# m w( t: w' E. {" M+ K
6 |' p: @! N& j7 i/ p0 mtarget_names = ['Class-' + str(int(i)) for i in set(y)]
1 \3 J. m" G* t4 Tprint "\n" + "#"*30' K% Q& J0 e% ^5 }
print "\nClassifier performance on training dataset\n"
# ?6 r1 ]8 _8 n* v* i6 p) Q" zprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)# e+ o+ A/ i! \
print "#"*30 + "\n"
; j6 |; W( t( B: \3 o5 ?- W# C9 T1 w8 U! q& G# r
print "#"*303 N7 s3 J O! E. [, W) K
print "\nClassification report on test dataset\n"' X2 B% t J) v7 b: Q+ T
print classification_report(y_test, y_test_pred, target_names=target_names)( x- [4 Y5 O n1 ~
print "#"*30 + "\n": q$ F! w3 a( `
" X+ n b+ n2 I. A# L1 Q |
|