- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ g& h! C- w+ x9 m; i4 kimport matplotlib.pyplot as plt2 g7 B. |, F f% N' L9 ~
/ _ W' p9 E$ n; @4 Mimport utilities
1 E7 v- m {% C3 o
* i J" ~0 N/ C3 a- G5 E2 r5 i# Load input data1 U* j- X7 Y. I! ^
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'/ U' p0 n. z& _, t. x
X, y = utilities.load_data(input_file); Y- K) R+ ^8 O) f6 Q Q
2 Z* U; Y4 M, _8 q###############################################$ ^1 b( t9 H3 `2 Q( E" Q
# Separate the data into classes based on 'y'
" x8 z0 m% b$ U+ m2 J) Aclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 f; d* c( V/ w6 V2 S
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 O; D2 p$ s2 R; U; q+ R# d& ~
6 G1 C: T. M1 a0 V) L6 ]
# Plot the input data
3 A4 O# ~8 W# z0 yplt.figure()
; c; m6 x6 c% Y; Y P% v& Eplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')0 j( m' y5 M& d3 M: w
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')1 O9 }8 d6 U1 A2 q
plt.title('Input data')
. n; T9 q ^+ T ^4 G( L, q3 A: Q2 ]) o' K3 Y- _
################################################ d) i, K3 ?6 X
# Train test split and SVM training
* q1 g4 t' W/ Y# Y! ~6 Dfrom sklearn import cross_validation
+ b7 W" r2 o4 v9 W. ofrom sklearn.svm import SVC, H- Z3 w7 G" _" E: \
: \, x, ^4 S$ E! Y
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
- K" E x2 S$ {* g" o* g4 C; S3 T" X3 t2 [# A$ V+ }
#params = {'kernel': 'linear'}
4 M" Q; p& w. c7 ~5 B3 M* {#params = {'kernel': 'poly', 'degree': 3}
% ]0 A7 ?9 C, m; A: xparams = {'kernel': 'rbf'}
7 b2 [! Y8 o/ D0 u0 z) mclassifier = SVC(**params)4 f$ H! y0 S& P: Q$ H
classifier.fit(X_train, y_train)
6 W( G: ^( B8 Z) ]' R5 O5 G6 Yutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')3 j: T: X- u1 n1 D5 j% M& l' S% n
: D: q( M1 |- `! xy_test_pred = classifier.predict(X_test), k2 w* T9 i7 A* _% A/ D! ?9 e
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')# J1 {8 I. F; W
$ Y3 [- H: [. _9 s
###############################################9 [ x4 ?/ W/ z. R; X! F6 R6 F
# Evaluate classifier performance
5 Q6 N& U( y* `# \# n- b" s
3 n. Z8 u o: c* _9 F7 Mfrom sklearn.metrics import classification_report
9 j9 K1 j" r6 E6 }1 @
# ?6 M1 V5 a$ ~6 Htarget_names = ['Class-' + str(int(i)) for i in set(y)]
! b, Z1 E1 w$ y# U8 R. Z) _print "\n" + "#"*30
# b7 E4 g' I. I' O, dprint "\nClassifier performance on training dataset\n"9 w% h( z7 M3 u2 o+ W' ~6 b) q
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)8 t# V6 q7 k0 _
print "#"*30 + "\n"
: ?6 m/ B" Y8 w, G! f4 ~; Z$ w/ b" a: F1 A) F8 g( d% J2 G! j) H
print "#"*30: F7 r2 m- `, i5 y) R
print "\nClassification report on test dataset\n"
5 b7 `+ [ q* o. z `print classification_report(y_test, y_test_pred, target_names=target_names)' m/ C+ l9 a% I* J9 J& s
print "#"*30 + "\n"1 B* d ]3 K1 O" S/ R7 k5 Z% T
( N& b+ e0 t- h! h; d+ V3 c
|
|