- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np) ~' ]/ H: Y2 ~) u
import matplotlib.pyplot as plt
0 S; g. D# {8 M" L& c3 g: U. q# W6 H( _) a
import utilities + \9 O9 a$ |8 h9 ]5 y
. m/ d2 h1 d. T/ T3 O) G
# Load input data
1 J8 S' B6 y7 s6 ~& b0 i' Xinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
5 j2 N. E+ V% e% {7 kX, y = utilities.load_data(input_file)- E- |9 N9 p/ y+ n) F
t7 e0 K% }) X. M###############################################
8 S4 [( v8 P& e2 W" s# Separate the data into classes based on 'y'& y/ {7 a- a2 v1 }8 o# f
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; m# A- n, e. z( Q# |: W3 @class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
. g2 O3 m7 G3 i5 G9 b- d' p! S' t- N5 O0 X* C2 J3 b
# Plot the input data. I: h7 A E$ ?+ [' E: r
plt.figure()" e+ O l: x6 ]. s( L% @! T; @
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
" T: |% V0 w( v9 P8 d1 K* wplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
# J' \' d" g+ W, \1 `plt.title('Input data')7 H$ V& }! _ a* K! ?8 Q
3 U1 j" {' Y7 ^5 s& g
###############################################8 |: X! J; ]8 L
# Train test split and SVM training
0 I, S! x- N2 O; l# {from sklearn import cross_validation
5 {) \' m3 Y5 j9 G7 C; ifrom sklearn.svm import SVC
9 y, S8 `3 R! o) G( }' J/ L( t% X% I
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
# H0 g9 ?5 [* m8 f% S$ m, h
8 x: P* n7 U0 E: f+ I#params = {'kernel': 'linear'}' y3 L% Y n& l( c
#params = {'kernel': 'poly', 'degree': 3}
. V+ Y8 U( _% x. G6 B9 k, Tparams = {'kernel': 'rbf'}
- B# e- J) L3 y7 V; a5 c7 u/ Zclassifier = SVC(**params)
; h" S; n3 S' l) k5 Iclassifier.fit(X_train, y_train)6 n: O! Z; }8 w
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ G7 ^. E! i- b U2 n, X3 f
, ^7 L; x& V1 k7 Q: }1 ^y_test_pred = classifier.predict(X_test)
- t9 G1 z; y7 w6 F) K% E) jutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 N6 F5 k4 t) s# d* [3 f) O/ B7 ?2 _9 S" }
###############################################
9 `) ]1 h, L9 [4 P) {# Evaluate classifier performance+ U6 ~7 U6 `0 l8 a {. k9 }
, d0 L+ {6 |% p' T0 P! Pfrom sklearn.metrics import classification_report
6 S4 _0 ~, x- }; ~' j2 T9 v0 H5 H# X9 k7 b' V X" r* x
target_names = ['Class-' + str(int(i)) for i in set(y)]: X/ I, D: Q2 v! d/ q
print "\n" + "#"*30
# P' z9 |& A1 |( G- [3 r. kprint "\nClassifier performance on training dataset\n"
0 x/ x( }/ z) a" @( Fprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
- f" g- j$ K0 A) Y$ Rprint "#"*30 + "\n"
9 j' B: o6 d; |/ S g+ q% X8 s3 x; |( ?: ]8 |5 T. Y
print "#"*30
6 Y, ?1 j w; d( | kprint "\nClassification report on test dataset\n". L# Y! K2 w. j% n _+ w6 w
print classification_report(y_test, y_test_pred, target_names=target_names)6 S$ J! ~5 k- D, Z. H/ m- ~* M
print "#"*30 + "\n"
8 d( r' }2 M5 d4 q3 c8 A* i
2 n, {) j( w* I; e |
|