- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np3 @. q& A$ u5 c0 X
import matplotlib.pyplot as plt5 m) @4 Z$ x* O
- M9 t9 i7 \- L% \# ^( r6 ? V
import utilities % J7 D% X( L! a. A
$ n2 L2 B' D/ a8 |' a
# Load input data: o+ ?# c0 h7 ?: S) ^6 H! Y; i
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'2 f! I1 l @" k# L8 N3 \
X, y = utilities.load_data(input_file)8 [* p# w0 q" Q0 L; J3 S( t& u
. Z* g z! J$ T4 w% w###############################################0 W1 t- v+ I c2 z
# Separate the data into classes based on 'y'
2 D/ g% g1 n# I' b9 e5 o5 {4 Gclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])' q6 S4 ~) }2 _! w
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])6 P, ~9 p4 B. C. a1 O* O# L
( o: v, s- ~) i- I c' d' g3 v# Plot the input data2 l- P& i) ~0 n, B
plt.figure()& p# K3 \2 A( h* ?
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
- v" `% u3 o: j: o$ H) M! @' D* G, Gplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')/ _: T3 B, \$ G/ p( W6 F- x7 a
plt.title('Input data')9 f! h7 C) k- J3 _- R
$ Q9 k7 F4 @5 {1 \( q. ]1 V
###############################################7 o8 c$ a) g0 _4 c
# Train test split and SVM training- [9 {) b# S ?5 l
from sklearn import cross_validation
! l z7 y% I C! f- }; p$ l: \ nfrom sklearn.svm import SVC0 [6 B: z9 O. l, ]- E S! B3 J
$ j! i, b! Y* t& u
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
4 P% G5 j: ?" h. c4 I: `3 j: M+ ~2 k! d6 F9 @. ]3 A6 e, \9 k5 o G
#params = {'kernel': 'linear'}
: ]& Q n' }4 T; H5 J+ B#params = {'kernel': 'poly', 'degree': 3}$ M) ]( @1 f5 p9 K# Q, _
params = {'kernel': 'rbf'}
2 f5 U# Y; A; |, r% C! iclassifier = SVC(**params). f; S" E- k' z/ x
classifier.fit(X_train, y_train)
0 r( P/ r J3 z. [0 Dutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'), D/ O# F* u9 r4 R4 F( g
+ V6 O) Y/ U4 T8 Py_test_pred = classifier.predict(X_test)
, K. C4 s" @8 _+ d" _utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
" \/ E) A% W$ \2 Y& J2 R N+ K5 X; @ l
###############################################
6 B$ F+ d( S8 ~( x& X# Evaluate classifier performance- X; i( I/ i3 m/ a7 W
8 G- x% v, Y9 K% e- t& n2 Zfrom sklearn.metrics import classification_report
5 R9 T; F1 j9 X. B) C1 b+ W+ d
9 I- @# Q! n1 A P/ b3 Atarget_names = ['Class-' + str(int(i)) for i in set(y)]/ e5 }8 @9 M6 p
print "\n" + "#"*30
0 s7 J# F0 O% n0 oprint "\nClassifier performance on training dataset\n"1 C, J- d" ~6 C a' c
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)% R; ~6 o* E. K" D0 x
print "#"*30 + "\n"2 k4 g$ C! z8 k$ k( X( Y
# G4 M7 v, {) ]& i
print "#"*30
( j" z/ f: x3 i5 a, K& l$ m+ S7 nprint "\nClassification report on test dataset\n"
1 U! _& [9 e$ a: i. aprint classification_report(y_test, y_test_pred, target_names=target_names)- W6 R* [9 a9 X, B9 h: q
print "#"*30 + "\n"- B# N7 x( m4 v/ Y! Q$ u5 L
. m1 T) N. Q4 Z" n( X
|
|