- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np3 ]6 z1 }- }2 X0 ^: i% R: l2 s. l5 s, Z
import matplotlib.pyplot as plt
3 m- p; R- X6 h% U' R: R* x' i* @ j# W- l0 Q# Q0 O; ]
import utilities
# C* m: z I5 D, Y5 f! Z2 y L
" q5 Z4 _9 ^; x; K2 d9 s$ _$ y# Load input data# T6 B" w7 H- Z
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
! y4 Z8 k! {2 z: F. x" O e+ v9 ~X, y = utilities.load_data(input_file)% W& C) t# K4 E( u& T! x
8 o, [7 E" @% Z- I# m" ~0 C9 G* n. z
###############################################/ l- ~4 R. L& {7 B
# Separate the data into classes based on 'y'" i! {* i/ b& u4 x4 U1 `: A; I' e
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
! Y- M! _+ b( Q G; o) M# B! vclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])& \3 U+ v, c4 b+ x+ F" q
, e7 k& H. c) l1 O
# Plot the input data+ f5 A) Q2 m& o' H
plt.figure(). u3 J1 D: T& X
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
- X, S0 u9 |# p! g: {# z6 ]plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
: m6 O2 J3 N) H1 kplt.title('Input data'), Y6 |0 M! O' I
6 ^3 }( j% O$ ^: H/ ^4 R5 e; L" M9 U
###############################################" e& c* }% U2 V: J
# Train test split and SVM training
6 K. O3 c8 f1 P! C' O- nfrom sklearn import cross_validation
2 H' W/ |/ m; J' w- k- ufrom sklearn.svm import SVC. @8 ~& F4 C4 ^7 B. V
" E2 H1 v) o6 h; x/ K, I$ u, |
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
' ^! v' N% _4 \6 u7 _) p% e( Y/ z; M! U7 E, e; V
#params = {'kernel': 'linear'}0 Q3 J% r3 Q0 k/ c
#params = {'kernel': 'poly', 'degree': 3}
1 e0 u/ _ h& |" iparams = {'kernel': 'rbf'}
5 c) v, e4 j2 Y) \* ?& H% v. V) Vclassifier = SVC(**params). P) }4 R2 O* _8 \
classifier.fit(X_train, y_train)
- L8 a6 K. u- } d6 b Q3 L5 nutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
0 A7 _9 \& O& J( l/ H& Q8 _$ N2 q! ^$ X* q: v) C/ s
y_test_pred = classifier.predict(X_test)
& \9 C" M6 N( l' iutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')+ o+ l3 P& R# }+ w# G7 ^- g
% D1 ~* o, ~! G7 H/ O###############################################3 o" x& S+ L( X0 S4 C5 [. R
# Evaluate classifier performance; S, k5 n1 x) N9 G% k
# L+ P( A% P# c0 j+ n+ ^
from sklearn.metrics import classification_report$ I% e; E3 d2 |; _4 R8 y' ?
* S4 N; z9 k: h( P1 r, U; vtarget_names = ['Class-' + str(int(i)) for i in set(y)]
* o0 s/ ~1 v/ oprint "\n" + "#"*306 \' y! a* z: O8 }5 X w
print "\nClassifier performance on training dataset\n"
# S9 l) L7 E9 z9 W# d) P9 K+ f& rprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
8 ?7 `: L |( z4 J8 C) i! ?+ v# sprint "#"*30 + "\n": v7 `3 |( s0 o$ L2 \. p' `
+ C# O% G; p( T( h% U
print "#"*30
5 C; j6 L4 B3 E3 jprint "\nClassification report on test dataset\n"" m; d0 Q4 a+ _- }
print classification_report(y_test, y_test_pred, target_names=target_names)% u7 k- p( Q9 Z. c4 V( s
print "#"*30 + "\n"
\2 [' l5 P) i( X8 S. t& k% v! P$ t; j, d3 F) t
|
|