- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np4 A+ e j& A( ^* }# b& {5 S5 [
import matplotlib.pyplot as plt+ B$ h1 s* K! W, E. k0 O: l! i
9 |& \. R8 m: s- C) \3 [6 V- Limport utilities
3 @! I" q9 J% \
$ T: y" U7 y. v$ c# \ G, f& W# Load input data
1 j& p( ?) \$ w2 `; Y3 Hinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'5 c. F; |3 M' i0 P: R
X, y = utilities.load_data(input_file), C( y, s1 p9 b
0 k1 G0 }$ X% u5 y###############################################
' _* P) t: Y- G0 W* A7 C* E- y# Separate the data into classes based on 'y'
4 D+ d5 H0 x2 s1 h: q" ^6 ~8 v6 Vclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
/ p+ l' q" b# `( Bclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])( S% c* {3 Y# s+ B
9 F& d0 b- o/ v8 V; W, n, X6 Z6 c
# Plot the input data
$ D( b! c" }( o2 ~6 O j& u- Y$ w4 Q, tplt.figure()- V0 H# ^/ [% Q" e- C
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')& g% a) u n0 D \# x& c" E" W+ C
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')7 {/ w* f0 y& W- u2 j
plt.title('Input data')
9 g- F: D4 n# C. B5 Y( Z& Q1 J% g2 h, ]" N# T& c/ ^9 F% c; j
###############################################
7 I( F5 b* e, P' K8 c" }# Train test split and SVM training
/ G1 z9 V/ `5 D9 x; J3 Ffrom sklearn import cross_validation
9 F4 O- e0 @4 ?0 R: B2 [from sklearn.svm import SVC
s& ~4 ]. G* E0 W# L0 K! n k( B. Z) a7 t% x6 I. ^1 [3 |
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)3 @- O6 e3 `; I" f8 w
" a; W: Y/ ^8 ]/ P; W$ G% K8 a#params = {'kernel': 'linear'}
# j! X1 C- U$ ^- Y) w" ~! \+ p#params = {'kernel': 'poly', 'degree': 3}
2 C% y8 y! Q, C; b# }4 C8 N/ Dparams = {'kernel': 'rbf'}; Z8 X( x6 J2 h4 B- p8 i
classifier = SVC(**params)& N" ~/ ]- t0 }- v, G! L% E
classifier.fit(X_train, y_train): b1 S% ]0 }# f6 e7 o. X
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
4 [ a/ d* E; ]) O1 P/ i' z. T) p g1 k0 e& P! q: C
y_test_pred = classifier.predict(X_test)3 H+ v/ b3 {4 Y9 T* A9 x
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
5 L1 `& F8 c( S; F8 K$ G$ ]$ t9 ?0 D( a
###############################################+ X: u: F1 R& ~
# Evaluate classifier performance" D, B3 W9 y& m: s
; i4 n5 N D0 d3 p- a: Q( Gfrom sklearn.metrics import classification_report. k5 w/ I2 v1 o* j- N% }4 w$ Y
5 A) f( ?7 ?2 l3 \2 h% J. a, Jtarget_names = ['Class-' + str(int(i)) for i in set(y)]
$ \. G4 Q2 a/ u6 P2 vprint "\n" + "#"*309 r- u1 @: x7 K
print "\nClassifier performance on training dataset\n"4 K' |/ J/ a) e3 k' _
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)! H; h) W: o- e- a8 \8 F- D+ m8 Q
print "#"*30 + "\n"& }- w! [5 ]6 k8 z' J5 S
4 [" u) f: X" x7 O0 ^
print "#"*30
0 v8 I* k' u- Q, R# uprint "\nClassification report on test dataset\n"% s/ _" b1 S, ]0 g, s% [
print classification_report(y_test, y_test_pred, target_names=target_names)9 W& x1 I' c# ~. r$ b- i6 I
print "#"*30 + "\n"
0 A# A7 L5 } v; S" X+ |" w1 i+ L7 H/ S8 U f3 ]( }$ p Y& t1 t
|
|