- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
+ u% u4 S' j! ^% [, w' e2 M7 Zimport matplotlib.pyplot as plt
1 g) @5 E0 `1 r+ f8 B) x: [4 ~8 e5 {7 G: d5 W/ J
import utilities
T7 g0 @4 A/ O5 s- X- p8 R* V/ v) C
- ~, D7 u) q+ b# Load input data2 O0 G; |3 q7 h# Y% y
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 n* Z. O4 G; u6 x, fX, y = utilities.load_data(input_file)
. U! q2 m3 E% L; k( y6 E4 ?
|5 ]- u4 M4 Y5 |###############################################+ F+ p2 h& C+ h, }; E/ L; ]
# Separate the data into classes based on 'y'
# h% V6 L) i# Y5 z2 `& U9 e+ kclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])" Q/ z7 l5 Z4 f p
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
9 J- f+ i5 I$ n# _) J1 d+ x. [5 Z; q' o' V
# Plot the input data7 X9 t; ?. P4 g* k: n4 i
plt.figure()
6 H4 f5 i; h. r0 ~6 z: ]! nplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
# L& U, j* X) T8 Kplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'), o6 `/ o6 F; N1 T' \1 P4 C4 G- u$ A
plt.title('Input data')$ i% }0 V' B& T4 ~6 Z u, {
7 z! w1 k% L0 r1 |
###############################################
. ]' a& W( g7 p9 D. ]) G# Train test split and SVM training
* c- b& e, v0 b3 F# Gfrom sklearn import cross_validation4 u+ B2 C& \: ~/ l# X6 ~6 i; B4 t% M
from sklearn.svm import SVC3 M* B) j. ?" x. ?
: q3 q& C4 S- o1 H, G: K9 RX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
, n, W3 C* {) k8 }
5 g; M/ m! i' r7 F- ]1 T5 E#params = {'kernel': 'linear'}; C$ B, ?: b" q. o! }
#params = {'kernel': 'poly', 'degree': 3}9 q/ c$ u% U0 b# f, p! A
params = {'kernel': 'rbf'}* t+ S* }- \# F
classifier = SVC(**params)5 N; |/ Z I) A9 I
classifier.fit(X_train, y_train)
9 e% B) z$ O7 u7 { `utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
& W9 x& v( ^ g# c. l4 }/ @8 N' ~5 B; W$ G
y_test_pred = classifier.predict(X_test)
1 M3 V' |% v5 a4 iutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')1 D$ O! Q p: x2 }3 E
6 A. X5 O% M9 o; }) C+ J; R
###############################################( y5 b" d$ n6 ~8 u4 X$ i
# Evaluate classifier performance- y V2 p: C7 f2 V J3 F* ?
; _: @3 a E( G- @1 q- I1 d
from sklearn.metrics import classification_report
8 K- T& A" H2 E2 X0 X9 k7 ~$ u9 Q# s# r
target_names = ['Class-' + str(int(i)) for i in set(y)]" w( O: m' s7 B0 V* l0 a" e
print "\n" + "#"*30
V) K( g/ {) _# xprint "\nClassifier performance on training dataset\n"
- _" B5 @9 `4 g/ v! K' Y9 B4 W' wprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
9 r8 o' I# _, Kprint "#"*30 + "\n"
' s' y7 k- ? ~5 Z- u4 R7 O) X9 f% D. l- N+ S' w0 E4 j- _8 j0 ^
print "#"*30) p' W8 X0 L: U( E
print "\nClassification report on test dataset\n"# H9 y5 @. k& m$ k4 ^
print classification_report(y_test, y_test_pred, target_names=target_names)+ [8 H! O. ^; H+ \: ^( B$ h
print "#"*30 + "\n"1 J1 {% p* [8 Q* z: s- c
, { [, `1 _1 Y! o$ T0 H5 y: L Y
|
|