- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
. a0 V# p' u9 T1 \, u' @% U* Nimport matplotlib.pyplot as plt
( i5 x8 W' [2 u' L" A1 R
: H/ Y4 x/ Y7 q6 v0 i1 _% `6 s, yimport utilities
7 ~' `8 u$ d3 e# o7 H; s" ]# @) t4 w1 A
# Load input data
3 e& F0 ?, h8 ~$ p8 _/ pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'* h/ v/ K! g$ \9 C* V4 k5 V8 J
X, y = utilities.load_data(input_file)
# t: ~; O0 R" [% g) u# Y' w
9 ?! R, O8 U# a/ \( F###############################################/ v8 r1 ~$ X, q* I6 U
# Separate the data into classes based on 'y'
6 @& h b/ M+ l% {, Oclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
5 t; c$ [5 F5 ^# i( pclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
# V2 c1 X/ Z& z7 q# _0 D" B" }$ ]/ M4 O% {
# Plot the input data
4 h2 L' d- T1 t1 h1 Q5 c. Lplt.figure()0 {8 c" j1 z1 k8 t
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
/ K, X" p4 Q0 x" q' \plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
: T) j# x2 m# m, c5 s" F% h+ kplt.title('Input data')
9 A( X* P, f; W6 F/ ~; |& l$ o$ E/ W9 k
9 ^- { z, ^* e% K/ |###############################################: U- i6 S* e" M" A
# Train test split and SVM training! k, q: O. } C& Q
from sklearn import cross_validation$ B# E* U; U$ J
from sklearn.svm import SVC
$ p- B6 j+ U: O% v
! {3 p9 a$ Y1 ]+ ?6 Z1 k8 vX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
6 R% {6 X% v& w
' `7 i' b) q W9 H) e7 L2 |6 M9 ?#params = {'kernel': 'linear'}% \- ` f- b) C. x
#params = {'kernel': 'poly', 'degree': 3}7 Z: Q* T2 E4 L) P/ a
params = {'kernel': 'rbf'}0 w- q8 ]3 S. s& K
classifier = SVC(**params)
! S$ T$ J6 y) C$ @classifier.fit(X_train, y_train)6 R3 a* ~( Z+ `! j+ ~( u1 x
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 V2 w0 E% E, P2 |9 F7 k
# Q9 e! b$ Y' z. d$ F* D2 M; n& Uy_test_pred = classifier.predict(X_test)
+ r% X1 M. k% o8 V; i8 F1 G: S7 Jutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'), i: \+ _0 U4 o5 @1 V
9 w- k. `) ~7 o. X+ T+ Q
###############################################
3 A6 D8 r) @- y# Evaluate classifier performance: D G: E: B; {. S
`7 K; x, Y) n* }" x) ?, Afrom sklearn.metrics import classification_report% N; u3 d9 ]8 h
) |0 `" f H7 F1 b2 O+ J0 C+ R
target_names = ['Class-' + str(int(i)) for i in set(y)]( \5 X" H' c I. d
print "\n" + "#"*30
# @9 w& ~2 b. }& c2 Rprint "\nClassifier performance on training dataset\n"* d' R* d) f$ f# {0 g% [$ m
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
% @, q; j0 U: S! C6 [print "#"*30 + "\n"
, g& P, ^3 t% h8 V* T/ g" _" U+ e' S$ c. I- H! [
print "#"*30
6 C5 O% u7 J/ Nprint "\nClassification report on test dataset\n"
9 ]6 e. S2 c. j2 O8 U& Pprint classification_report(y_test, y_test_pred, target_names=target_names)
7 L+ f+ S) @1 Q' oprint "#"*30 + "\n") V( o/ R7 {0 B- V+ @9 ?9 E
# D6 b% y* q# T2 D7 Y% {& a
|
|