- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np4 o, j; h: f) F: n% S
import matplotlib.pyplot as plt7 r7 |8 f4 F; D+ s
0 E! I. k% J ^/ @import utilities , N2 q2 Y) {7 g {, q
, V% \$ t! z, z" G
# Load input data8 |4 W* M/ g: F( w3 P
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
. ? a& p" w. }7 k9 p' dX, y = utilities.load_data(input_file)/ f9 i( {( ^, u6 C0 E
4 S5 @2 m: b5 A$ ~###############################################0 D* J: V9 a4 B8 {& @
# Separate the data into classes based on 'y'
{- B, r) w. {, Yclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]) T+ a5 W( {9 _; P6 y; s9 j; \
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
9 j! [0 F$ F- Y" ?% r7 c1 |
; l8 O* j9 Q) y. g$ d( G# Plot the input data
7 R! ?' x7 A+ g0 f6 ^plt.figure()
# x, W/ w! b% D7 V8 k5 {plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
; a c. s! G0 Lplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')1 r1 E6 J: I- J
plt.title('Input data'). H( m, x! ?3 h, ?
" [0 N: O. j7 _, t; y ^( K###############################################0 [# N. _6 X' n" @
# Train test split and SVM training( Q/ d( ]$ A9 f! ]
from sklearn import cross_validation6 e& c5 P2 j1 L
from sklearn.svm import SVC( @8 T! z7 w9 E! N0 H
9 s$ }7 v* W# t% T- ^/ HX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)8 ?" ?" Y: ?5 p$ |' J
( Z- d; D5 f& s) w1 ]2 O#params = {'kernel': 'linear'}& V9 W- p; q5 y( m8 o9 x
#params = {'kernel': 'poly', 'degree': 3} z0 P9 U/ x- f+ }+ P- U7 N% B
params = {'kernel': 'rbf'}
6 Q8 ^8 c g7 z2 q9 nclassifier = SVC(**params)
4 c8 J0 |8 e3 f& F6 J- d6 i4 Zclassifier.fit(X_train, y_train)
; p8 r/ C8 L! R- c) g. G& U7 i$ tutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
4 m1 G9 U8 Z& J/ c4 l* @6 w
+ m+ f+ R0 t& h/ m) G8 `. H- ky_test_pred = classifier.predict(X_test)
% A; S" P1 X$ m1 f- }utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')5 ]# H1 c! C- H% X# ?7 \7 `
# a t3 z7 G3 V5 D. x/ C$ l###############################################' A9 ]$ s3 g5 `1 k1 E$ q& b7 R1 r
# Evaluate classifier performance
9 Q8 y% W3 |( J- ^2 R/ m6 N9 T3 y' N0 j4 T* `
from sklearn.metrics import classification_report9 `/ G# ]1 N; o0 M5 W* f% {, z
. p( G$ [/ }6 }- S
target_names = ['Class-' + str(int(i)) for i in set(y)]
# m1 S; P7 O6 Y9 d9 M8 Hprint "\n" + "#"*301 {1 x5 P* z6 f7 l( c# ?
print "\nClassifier performance on training dataset\n"& j" A9 f4 u5 m4 ]( o! F5 v
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
' Y" l6 I& E3 V6 l% @9 ^print "#"*30 + "\n"
( {" x% n A( @- A; @
$ _3 Q5 @# Z# n8 E# v$ w: g0 {& gprint "#"*301 `5 `( g5 E/ ~8 H
print "\nClassification report on test dataset\n"; L) X! g# \4 j; ^3 O5 l
print classification_report(y_test, y_test_pred, target_names=target_names)
: @7 d2 M9 A l: E2 Y# Zprint "#"*30 + "\n"
( G+ H& c% l( {9 d1 z# h: ]7 @2 d; c [. y
|
|