- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np: [9 m; F4 H9 D& w% x! M+ Q
import matplotlib.pyplot as plt& F: u6 e. _0 F# m
9 z. b. d! }% Y
import utilities
$ r. K$ }& _+ ]4 D- K8 G' j. H Y' B$ w4 U$ i$ H/ R
# Load input data
" N2 S9 b- E3 D& N- @$ winput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'8 _ [0 e5 _3 o+ z
X, y = utilities.load_data(input_file)7 D. d. K g7 o6 D5 F0 s, [; p# |
% e4 J4 K; e2 m& p7 [
###############################################
) F/ H! e$ N" r. b- Z# Separate the data into classes based on 'y'1 o) R$ q' b7 B5 D& k$ N8 }9 P1 f9 A' U
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
3 F6 @% K! p& Yclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]), x4 d4 V6 J. B( U; Y" K7 A
8 T0 }2 H" Z1 ] K' z
# Plot the input data y7 v& a! i; `5 ]
plt.figure()
8 W' i& y% z# Uplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
& S1 m! c1 K! [: o$ l# L4 Qplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
- Q& H3 j+ M0 zplt.title('Input data') y# H& p3 q( \4 F6 Q I
- u$ S- T/ `4 F
###############################################) {* H6 |) {1 g) i! q+ U
# Train test split and SVM training
! s7 i8 d) N$ ufrom sklearn import cross_validation @: I; `7 x& {8 U6 P, E. F, d
from sklearn.svm import SVC6 l" l, g8 ~, v- r
. Z k# X3 E0 ~. R1 {# U. M QX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
, O" B0 m Q& e7 D6 R2 R: U# G5 O' S4 N# D5 ] m+ l- p
#params = {'kernel': 'linear'}
/ K5 y- u$ W. c' L0 j& ]#params = {'kernel': 'poly', 'degree': 3}
' C3 @7 Q; {$ X$ k- t& Kparams = {'kernel': 'rbf'}* f3 J# T& @1 p; @
classifier = SVC(**params)
! ^# a: R) X" c* `8 h0 X, Hclassifier.fit(X_train, y_train)
" ^! e- y' [. _& _0 c+ }utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 R+ n: ~: i3 i: h
" r$ _! T; g5 J, t# Y/ J$ zy_test_pred = classifier.predict(X_test)! w5 W+ ?2 G# m+ L @* ^" r
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
4 q0 B! r; Q" J3 N$ s
4 @) w6 u [8 j$ v3 }) M###############################################
) I8 d4 n5 [1 a& c; m# Evaluate classifier performance
. Y6 l$ Y- [& h# \
8 U, {0 O& J0 Ofrom sklearn.metrics import classification_report
5 G$ @+ |+ }; ~9 P I" U; s( @5 m" h5 U; s% {, c; W& [6 o' _
target_names = ['Class-' + str(int(i)) for i in set(y)]
* z- w! k/ k. k. E- H1 E0 |1 Gprint "\n" + "#"*30
S1 ]6 w) J" s) y9 Y* yprint "\nClassifier performance on training dataset\n"
* E9 j6 K( r, [2 R8 ]print classification_report(y_train, classifier.predict(X_train), target_names=target_names)1 Z3 i# E9 O \; ~. R- K8 o# y
print "#"*30 + "\n"0 x# @, j( F6 K# l! G
9 Y3 E8 G* T6 {
print "#"*30. b$ G" m; {* u1 @( M
print "\nClassification report on test dataset\n"
3 J3 z1 q% Z& A6 N& ~8 S% \. A& wprint classification_report(y_test, y_test_pred, target_names=target_names)+ N1 @4 V- ^) g) u/ |- l! d" m
print "#"*30 + "\n"& @# _5 X4 H$ _
2 p& U4 }+ Z/ S/ q: S M1 [
|
|