- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np9 q3 P& g, M( y4 h$ S# `! I
import matplotlib.pyplot as plt% {$ e2 i+ f8 u L r1 p1 d
" @4 y5 c) @0 `+ {
import utilities 0 R" X6 u/ p6 t' [6 v. L' Y& `' M0 n* v8 s# k
" G6 S) X% c: a7 ?$ m# L
# Load input data
# a; r" R3 k& a+ E4 y( Pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
3 ]" X6 Y/ ] J9 f1 eX, y = utilities.load_data(input_file)
& g8 Q8 |; G& }% ]/ a, f" Z a9 F) ]; S% B
###############################################
/ R; C2 [" Q* `) Q# Separate the data into classes based on 'y'
/ ?0 M1 t! H1 U: b: L( |' I2 fclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])! l8 h6 q8 A% S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 p& k& l: H1 Z
$ y$ f* j; E2 E# Plot the input data
& c% g' p: p7 M% F: jplt.figure()# @9 g1 M3 z+ {6 j# k- I
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')( {7 W4 O; k# l3 {
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
" ~8 H1 W, L" t5 Zplt.title('Input data')
3 Q+ w, Q; S0 K# |0 p+ ]) \2 {: c/ H7 j- `- I
###############################################
) ?) E4 g* Z/ G; h3 v# Train test split and SVM training5 h) v Q' e, w: _* W& \
from sklearn import cross_validation
, d% Z8 D! Z5 \* f3 bfrom sklearn.svm import SVC1 a6 W* _. {. l( D
9 u$ q+ j+ u9 X6 O: z
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)- b' R9 `% O l8 J8 `3 L
7 l" M) o5 y/ k' }: T& u+ j#params = {'kernel': 'linear'}8 E: ^6 q9 B7 I
#params = {'kernel': 'poly', 'degree': 3}4 s( h n# S A; L1 T
params = {'kernel': 'rbf'}$ @- Q: ? x M. N$ @. m7 j, w
classifier = SVC(**params)
/ D. u5 [5 P: C& y# N/ E/ aclassifier.fit(X_train, y_train)! |' Y' i0 m, {5 m
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')+ V; z9 F* T( U; S
5 y# l# b1 J- z' g) b
y_test_pred = classifier.predict(X_test)
4 |- x; y& w& u& P+ i* Putilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
+ v7 E. e' ]# U; \, v3 I$ p
" d- j" A3 h. Z, s9 g/ }& \- q###############################################7 k1 K% K) g v$ O$ F; B
# Evaluate classifier performance
/ q/ D: o1 W/ |$ d/ q, N6 A* b K: R% r# L, ?
from sklearn.metrics import classification_report$ v: T7 ]" g; W4 Y/ ]/ t( K; E
( b0 k4 M `; ^, s, E% c
target_names = ['Class-' + str(int(i)) for i in set(y)]6 v4 u% Q9 L5 d
print "\n" + "#"*30
% x# f# d c5 N" n6 @! ^' s) Y8 q/ tprint "\nClassifier performance on training dataset\n"1 s5 Q( e5 c- O/ s! c0 }
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
# N, }; s7 |$ e+ D: N3 b7 @3 w% qprint "#"*30 + "\n"
' @5 |4 T2 `' w8 T( W1 j* Q$ f9 N
! T9 j+ k' a& a% r0 j" o+ o) oprint "#"*30! D7 w3 p; }" d! ?; u! X! N
print "\nClassification report on test dataset\n"
' W$ Y8 R* X$ x: X7 ], Dprint classification_report(y_test, y_test_pred, target_names=target_names)& [! Z8 w7 Z( l. s- p `
print "#"*30 + "\n"$ Z0 V( S! u' F( | o" l. h8 g& A
- n2 w/ w) j* j8 l9 i. u4 |; h) g |
|