- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np6 K' u+ I, B+ m) T! p* E' T2 N
import matplotlib.pyplot as plt9 [+ O3 l! g/ |, P6 x' l9 o& E# L6 V
1 p+ t1 y0 ^9 o7 J0 ^, F* g: v/ Y
import utilities
* L" `. C! D1 ]! j$ D$ X6 K/ l5 f1 W: K d* C. p
# Load input data
& }; j- j( g/ O3 x1 u* Y f. \input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'& \, Z3 @: m! C( e
X, y = utilities.load_data(input_file)
5 P8 w( w. Y* z! b: l5 p, w7 u1 W- ^0 Q/ {8 r1 c( E5 ]
###############################################5 Y8 b7 U9 ~- Y7 W0 b$ a4 i
# Separate the data into classes based on 'y'4 A. u" @/ ]. P# V# G8 C# o' M
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
$ f2 l* A" _- Oclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])' @+ H) B' Q( e7 s- o% H6 U( h- J
: G: m8 n7 T6 r# }4 L% ]8 `- C/ l8 b# Plot the input data6 D/ v1 K$ O, ^% |- i+ x
plt.figure(). @ a3 ^4 a% V2 ?! R
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'): p$ o5 f% {- F- t* p |
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
& M; J) a, A. o3 oplt.title('Input data')
' {5 k& W" N$ X9 v: w1 q
+ X5 }$ n$ u$ c6 L% Y W###############################################
. ]+ `& o/ W2 C9 v9 j# Train test split and SVM training
6 S4 O. d( L; V( Z( M* Jfrom sklearn import cross_validation
6 [/ c9 [' T* l8 J( ^6 cfrom sklearn.svm import SVC
/ s* z. }* Q; k) w% B8 @
8 p/ `, f3 |+ _: V) |! `& j) j0 W; u5 \X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
/ m+ y% [, z D* J" ^3 L/ O$ R: T t, ^0 U5 Z4 Q% `# E1 O% J
#params = {'kernel': 'linear'}
8 J f% Y5 C! p1 X* ]" \# I3 d#params = {'kernel': 'poly', 'degree': 3} R0 ]$ \: Z. I4 z- X% S1 j
params = {'kernel': 'rbf'}
* `; M0 V2 S; v: q5 Hclassifier = SVC(**params)9 E' \% u/ N% j% |+ j
classifier.fit(X_train, y_train)
: w. k; `, F1 Kutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
2 w. }9 f9 t( W9 ^9 \" j, r
" ?- g6 `& L0 f$ F5 j% Ny_test_pred = classifier.predict(X_test)5 y6 H# s* A% L$ T/ I; }
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'): Z4 [( F. X' _1 x
1 P1 D+ X+ {8 `) ^; {
###############################################
4 \- @# U: y- b# Evaluate classifier performance* _+ y' ~, u0 M4 h
3 p1 K: v# ]) S4 _" A7 [$ jfrom sklearn.metrics import classification_report0 S5 b* d! c, O! z+ `
. H1 [( G' _. ]* E; I
target_names = ['Class-' + str(int(i)) for i in set(y)]
1 S: z8 E$ u, H. j" p, V9 \2 C# gprint "\n" + "#"*301 O) u. ~) f, {
print "\nClassifier performance on training dataset\n" \% ~4 _. F7 k8 S
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)6 j% A/ ]8 u( I
print "#"*30 + "\n". d0 `' I9 N4 j$ E" Q3 W5 s- P
/ R) ]) c, F1 x- f, m: \
print "#"*30
0 t9 k F3 k: E2 r$ nprint "\nClassification report on test dataset\n"
+ ?4 {/ R$ `1 z9 j- r L' U9 qprint classification_report(y_test, y_test_pred, target_names=target_names)
) y' J% K% [+ J. Y4 zprint "#"*30 + "\n"1 q( h2 b, w& v2 u# n! z$ X5 A
2 t8 Z: N% f) `0 i
|
|