- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np* Z. q- Y3 R$ i7 o
import matplotlib.pyplot as plt. e$ B% |, Z2 U& E, j* c
7 v. I- ^7 g; Z
import utilities D" a/ c9 l3 [; J4 K* Y9 e
: N! t' U! ~# T4 G2 t* l
# Load input data% O3 G: E- e4 ?% h3 M7 E( S
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'; d" \- X$ J5 D
X, y = utilities.load_data(input_file)8 J. ?3 P g- _$ S' t
6 e# G; p0 m u
###############################################' H$ [+ B' e" x- E/ ?1 l
# Separate the data into classes based on 'y'
5 ]- t, A$ p% ^' yclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
* S u/ m9 S) ]' S# u( wclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])( t H, L9 k2 G8 [: s- Q# o
+ t5 _" M. v; i* R$ ~" E3 [3 y
# Plot the input data4 Q1 ^! E' S' _( a
plt.figure()1 r; b) w6 K! W; F' W P4 h: d/ P
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')0 e' [" G3 O5 g6 V, u
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')1 S p( ^5 N$ G( e7 J8 e; c, W
plt.title('Input data')
G" W0 S* L' A0 d }. U* v. F8 s8 I$ v0 j
###############################################" _, j3 H, [- T! p
# Train test split and SVM training3 n& Y# m9 A* U8 s8 e- N
from sklearn import cross_validation. ~: x: @' B3 z/ D3 Q
from sklearn.svm import SVC
# Y% x! l, H: {! h9 m, ]) h2 ], @, ^5 x- S9 b% n5 n
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)$ O8 y- _4 X0 O
+ p+ c$ ^; Q2 ^, a4 M0 O#params = {'kernel': 'linear'}% H. ?9 T; e( J
#params = {'kernel': 'poly', 'degree': 3}
2 `- c' E/ B* K+ b6 W" X) ~" H& K* dparams = {'kernel': 'rbf'}
7 q! M7 T) X" ^# h8 r+ }classifier = SVC(**params)& z* F3 Q* ~. N: i
classifier.fit(X_train, y_train)7 J; \0 C0 l8 q' @0 S; z
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
. j" s) z- J# L" z7 t0 T* l% w( M8 P3 a1 z
y_test_pred = classifier.predict(X_test)# Z1 H- {* i) G \$ A
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 i+ C; w, `1 f6 d& R
0 X9 l7 q' G- z0 V% {: ^; G" I###############################################& V8 t, `2 ^8 L. N0 R- b
# Evaluate classifier performance5 C6 B% N, Y4 I! v1 u
+ `, R& A, z9 e% Q' D4 `% a" ~from sklearn.metrics import classification_report
9 |6 A+ ^0 \8 a
. ]; i# p) V: q2 L# Dtarget_names = ['Class-' + str(int(i)) for i in set(y)]
- Q. ^. C" b0 o* ~$ g4 f2 D9 Bprint "\n" + "#"*303 I, A* f3 n# L: I4 Q
print "\nClassifier performance on training dataset\n"
3 t/ s! ^2 [9 K+ s$ V* ]% `print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. V0 H: f, U: T" h( R) tprint "#"*30 + "\n"
, V# a3 r- G$ y; C
* O$ v3 k7 ~ S: G) J0 |print "#"*30
( m: \* h( t, Z. q3 E5 _1 Cprint "\nClassification report on test dataset\n"& r c2 ^5 b9 g, _3 o
print classification_report(y_test, y_test_pred, target_names=target_names)3 z( X: i; w" i# [1 ~5 n& [
print "#"*30 + "\n"
! j- {. H- H L: d" ^
1 O& i2 }* ]2 y+ u1 p1 _ M; I. y ? |
|