- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
" |# \' ]5 q, l' m8 o9 R6 Gimport matplotlib.pyplot as plt
0 @5 ^5 L+ F' s# L9 t$ R; ?
, n( G! N' k; O( H! [1 \import utilities . p2 R* `. A; E' j. b
9 H8 W' C; v8 u4 j# Load input data
0 }$ s! N+ \: s+ W9 l z, ?& Dinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
) Y z. G% T" e( f! P4 t, i, wX, y = utilities.load_data(input_file)
' J' s) V5 W$ o/ i+ u6 n
" y4 Z6 b: U6 h###############################################
9 t# I9 I7 y5 ~! u# J# Separate the data into classes based on 'y'# N% [$ J7 r9 k# p( c( F
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; @( w+ u& l% {8 q+ E, ~2 Y \class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
, g- I* D0 U2 V5 b; B: }1 y, C* p+ ~* ^7 a& C! z( N
# Plot the input data
6 G( Y3 q7 P$ D2 Qplt.figure()5 T5 X8 ]+ q( x9 W# B9 D1 a6 z
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
5 g Y; m- [$ B: x3 Y2 B& Cplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'). O2 [5 ~) B! e9 k& W5 r
plt.title('Input data')5 v+ s; D2 F5 w8 h
4 `0 e4 i/ k4 t) `: s- B###############################################
9 g+ }7 ]5 Q, H. `: C# Train test split and SVM training
5 n- d/ g. }+ M. l6 ^/ Ifrom sklearn import cross_validation
( Z" x# j! [0 Lfrom sklearn.svm import SVC; b5 p2 j# h8 b' @# w
, k6 P+ f# }& ]+ l/ aX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
5 Q8 b) C- c% n9 o @0 Z5 W$ V* E
$ O& ?8 V) L5 a/ }* B#params = {'kernel': 'linear'}0 ^$ J: c2 J% z# F, h# T
#params = {'kernel': 'poly', 'degree': 3}
' ]: K- E; y5 G: W8 n; y+ W& h0 }params = {'kernel': 'rbf'}+ N! k- p1 L6 P- R+ i- w6 S; s& h
classifier = SVC(**params)
) G) ]$ I8 K" m: _" oclassifier.fit(X_train, y_train); j7 x- Z E @
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')# L" A0 |8 ]) h3 L7 i. k* \
4 Y. G/ G- z$ A, M- `y_test_pred = classifier.predict(X_test)" _6 L7 s# U1 m3 q2 V- e3 v
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
7 e7 u. q: ?0 ?3 k
) I! p O9 }6 @& X1 [- L$ t###############################################0 n9 G! f2 W) p( o' w; `! j; H( t
# Evaluate classifier performance
1 X8 [2 M9 a) v, R% f) e, D1 n
- j, V6 u/ ] B( a. X4 N5 Ifrom sklearn.metrics import classification_report
7 l& K; s" |9 v0 V" X/ v
" x) f# y- q: W% ?9 m. d) ftarget_names = ['Class-' + str(int(i)) for i in set(y)]
- ?% v8 F8 ^: o! A6 Y: k0 j) Eprint "\n" + "#"*30% p q, R. t4 ?! m2 p) F0 N3 [, V
print "\nClassifier performance on training dataset\n"
% q5 o5 }5 n& f! L. X+ g, D, k" fprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)3 r1 c5 W* g" P' A; t- ~* Q
print "#"*30 + "\n"
4 K6 C' C3 n; m, b# z! \- z0 A6 f8 p6 p4 v% ?: ~& b5 V
print "#"*30' v6 Y* x" D( V% L& c
print "\nClassification report on test dataset\n"
/ h( C1 O. ?" W% _( v7 o {print classification_report(y_test, y_test_pred, target_names=target_names)
* I1 s( C+ H8 u7 V! l$ Y y. Mprint "#"*30 + "\n"
) r: z. S7 |, e, w! o
+ ?& k; b; w7 T: e8 l |
|