- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np# S ^2 B% [" m! A
import matplotlib.pyplot as plt
0 A. D1 \2 U _6 z7 x' H) q" D' m, c; R% D0 L* l5 y
import utilities
0 b, G2 b; v& U! V3 L0 x6 [1 V: q/ Q& E( b- b4 Y5 F+ A2 t. T7 `# U
# Load input data
+ m9 l3 t S- j1 ]5 Oinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 B" ]0 H4 i% S- v. _5 j- ?X, y = utilities.load_data(input_file)
( O' K& X' ~, v2 d, U5 H5 k5 `' x8 X+ h" g9 {* M
###############################################
" j0 l( y4 R4 c# Separate the data into classes based on 'y'
* v5 f. T" s2 M' ~% |$ I8 }0 Nclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
) u. M0 s( z+ J7 K# tclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
' s' a, j& l3 ] u( O8 p7 B! k
1 X; l, O |: D9 d; d# Plot the input data2 ?% t+ C1 d- ?/ [' ^. B
plt.figure()
- W4 r A5 R) l& ?! @plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
8 P/ q; S% s. p, V/ B# M; c8 yplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')3 `+ c9 [/ M$ b: z2 N
plt.title('Input data')) `& X) ~" x: c5 a( r- ?' P
* C) G* x" `: s# ~, e0 i' O6 @
###############################################
+ [. v' c0 ]7 N# Train test split and SVM training m" Q |1 O% v, o2 d& p
from sklearn import cross_validation
) V7 l- ]9 a$ G+ ~8 G3 |) Wfrom sklearn.svm import SVC
5 P4 G9 G6 a' q ?: ~( G1 O( V L: S4 x' {$ H
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)7 X; `% q$ J/ a' ~. c4 ]
- c& Y0 W( D: g: W9 e% b3 s6 i) d
#params = {'kernel': 'linear'}5 O- a+ t m1 e4 T j$ a
#params = {'kernel': 'poly', 'degree': 3}
) }$ i0 h; u4 ]" ]; e* sparams = {'kernel': 'rbf'}: h$ g# Q s/ M! Q0 I# y# R; {
classifier = SVC(**params)# ]* d: e% e! M- `4 k
classifier.fit(X_train, y_train), n2 u* X- u& A' I1 t% }- y
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
* q6 r: o8 [; v u8 s1 G; \% M2 T+ x
y_test_pred = classifier.predict(X_test)$ p$ c; I. G- v4 ~0 b3 e( x% \
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')4 m7 k$ q/ x% z# c
! F) q" U U+ n% ?* g###############################################+ C9 y6 l }; b+ o" d& p
# Evaluate classifier performance& p4 H6 L: O; v9 |
" r9 p! `5 X* U$ ?from sklearn.metrics import classification_report7 v- K- D# W" c+ i8 d" `6 D
! \5 P Q2 C. `# [0 F( {" }' _target_names = ['Class-' + str(int(i)) for i in set(y)]
q7 w7 i+ A# E8 bprint "\n" + "#"*30
! U% k3 k. E1 w- b# M$ aprint "\nClassifier performance on training dataset\n"
+ c/ v% H( n' T/ N/ nprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)' P$ _7 j! R, c% f
print "#"*30 + "\n"
/ v7 `$ r" Q5 _+ n7 M% w! w" x% Q; h8 t! ]
print "#"*301 i- K, h! @* ]; R. |
print "\nClassification report on test dataset\n"
1 s) G! w& y- q( B1 Z+ e+ B" Hprint classification_report(y_test, y_test_pred, target_names=target_names)
; Z; U. ~( G/ I2 o7 Lprint "#"*30 + "\n"
2 j' `& g! W) ^3 H2 S5 G
. D$ _5 |3 X0 P! w& _. \2 R* y |
|