- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np0 M7 t: L& Y8 z) @3 A
import matplotlib.pyplot as plt
" i0 y' h+ Z% m: t6 x
! `3 k$ v, o+ y% h2 ?import utilities
* j! C+ _1 J r5 ^# \/ ^; C
# g1 X. ^- o! f/ y* X/ \4 i# Load input data
9 C) [. G9 c& |3 |1 D# rinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'' c- M: i, q; v" I- W P6 s. b
X, y = utilities.load_data(input_file)
+ l; B- G$ Y! J: M4 B: L! i8 ^% u* y' |$ e4 ^ M
###############################################1 R; B1 }# M- l. c8 ^
# Separate the data into classes based on 'y'6 u: _' V( s$ z! Z1 Y( i' U
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
$ u9 n( ]: J* K( @' Tclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
+ E9 u& E% v! b! E: p, Q$ f/ @& B8 q' \9 _" n# G1 K
# Plot the input data
! z9 a9 }6 }1 M3 d/ Gplt.figure()
* \ ?$ f( J5 V+ B' M0 @. |7 wplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
$ `; |& d6 X+ i0 j Qplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
9 b6 M2 l( t# `) P, ^plt.title('Input data')7 G: ]4 {! Z+ X
. w5 B2 n% a8 N) \ N6 A% B# f$ o
###############################################
/ w6 Q% @, a) @& n0 u m* g l, p+ |# Train test split and SVM training6 O1 [1 j& J1 `$ a
from sklearn import cross_validation" N1 v! A4 W! w- l: {
from sklearn.svm import SVC
: ?1 S3 \/ j% \9 z; ]: c+ @1 ` U( d0 `: F- E- \$ `; Z
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)+ g; y3 ^' r8 ~6 h
6 I' S( _0 @" s; S7 z
#params = {'kernel': 'linear'}
8 l( {- ~$ b, @+ f1 |$ h#params = {'kernel': 'poly', 'degree': 3}
8 j9 {6 l. l; yparams = {'kernel': 'rbf'}# w3 V2 ~+ U8 o( F! s& q- h, T$ ]. i
classifier = SVC(**params)
9 d0 m. J% x0 tclassifier.fit(X_train, y_train); X" U! I8 [1 H+ _! F6 P
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
$ N7 Q; Y# t5 F5 ~( u0 Q x& Z( p: E* i$ E5 E; A3 z
y_test_pred = classifier.predict(X_test)
: u3 j! X6 u# I+ H9 F2 D- Wutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')6 n* A6 X0 v* B* P U
k8 U7 G; \1 w: I2 |
###############################################3 O* _1 s) v7 j& o
# Evaluate classifier performance
$ @9 U* \5 |' N9 \
2 A, i5 c* [. w9 b2 A* yfrom sklearn.metrics import classification_report
( C0 W4 R/ G3 f2 y! S" `9 C) x: v6 j. }9 F
target_names = ['Class-' + str(int(i)) for i in set(y)]. h4 U4 O7 a1 g- l& u4 w* B e' _
print "\n" + "#"*30) Z' _# A# n: Y$ G9 r
print "\nClassifier performance on training dataset\n"
% H- J2 K) ^) e& h! Tprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
, b* m4 }0 Q. |3 Cprint "#"*30 + "\n"
& z- \4 @" h3 W9 J
$ k4 H; h/ Z/ {. K2 Q o2 Z, \$ hprint "#"*301 G1 ]* q+ }$ `. S8 f, P
print "\nClassification report on test dataset\n") G5 e; O1 O, [& A+ Q) F6 l
print classification_report(y_test, y_test_pred, target_names=target_names)
) b# j; m3 m+ I) |* @, Qprint "#"*30 + "\n"' N0 i. A$ h& g; F( d! h8 y
# `$ q1 X% a2 ]- z
|
|