- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np6 A Y/ q3 w% p
import matplotlib.pyplot as plt/ G" x4 ^& W& S1 X
/ x; v7 U3 p3 v t5 U/ \1 S( w- w$ x
import utilities & J4 }* \/ a3 k P/ b N
6 A* x( A: {# Y0 L+ P I
# Load input data; o& x) V+ `5 I1 D
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
* L% G+ E+ C+ j$ k1 [5 f9 O8 t& V3 pX, y = utilities.load_data(input_file)
2 u" [3 I. z8 {6 }: v
5 R: x' W" g N5 u# e2 \3 M###############################################- W' u+ i, d) K0 U
# Separate the data into classes based on 'y' H; @+ E" E- r6 X- D& W, ]4 b
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]): A5 {3 |2 l7 `0 J0 s( }1 H$ Z
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])4 L2 X4 U/ y. |, d
& A! }) v. d7 J, W3 v' ^. J2 q
# Plot the input data( `; C0 l# \4 ~$ \6 O3 Q5 Q: m
plt.figure()
! [$ I( R6 Z# j3 l4 ~$ i* C3 Hplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')% l- l: }7 B6 V; j: Y
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& h6 \5 z+ @3 ~# Y0 k* y
plt.title('Input data')
8 D0 B: }# { b: w7 x- I
4 h6 O0 D( k8 M J' x' V# X+ l5 X; `; _###############################################* F- E" p$ n) T
# Train test split and SVM training
$ k7 T& f' k1 y/ k- r8 ]5 g1 j$ Vfrom sklearn import cross_validation) l% A+ b, e0 i. {
from sklearn.svm import SVC
. i8 v9 r6 j4 A+ y7 o: V7 T4 y+ U4 M& y1 g& W- W: k, f) `) ^: a5 q
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 J" \* q# E% ]
$ h; U$ f# v. X1 ]1 u+ n5 g; Q
#params = {'kernel': 'linear'}
' I/ I2 |# E1 ^ S. ]* p#params = {'kernel': 'poly', 'degree': 3}
/ q& L) l: n5 g3 y& Lparams = {'kernel': 'rbf'}
: i2 x5 X+ a+ L, Sclassifier = SVC(**params)0 x" o! a- ?1 J' s% U
classifier.fit(X_train, y_train)
|5 d# h; [- o- q; Uutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
1 Q% d2 X: Y2 S g: x% f7 e
4 p; Z$ z0 w& r( W/ l9 W% `) _y_test_pred = classifier.predict(X_test)
6 _' r/ t" L9 k2 s6 _* xutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset') h% a0 i8 j6 G+ \) E7 M
0 h' M- [0 V( x# a###############################################) T$ q7 n* ~! z0 ~2 d
# Evaluate classifier performance& f; `% z% W) {/ B; h; A7 r4 w' `
1 I Q A1 H/ h; Q! Z$ J% j" J0 ofrom sklearn.metrics import classification_report8 L; E. c7 H2 Y4 V' k
5 u; _$ {. o ztarget_names = ['Class-' + str(int(i)) for i in set(y)]+ K2 p+ p- c3 ^: V" }# i
print "\n" + "#"*308 Y: \ z ]- w- g1 ^
print "\nClassifier performance on training dataset\n"7 M- W! q S% H% v3 L
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. F; M' q0 F) g! Y3 j. Tprint "#"*30 + "\n"
# {' c: q! y) R/ N7 @
- Q! T! J! T) J0 l! N9 Gprint "#"*30) [- q0 M7 f5 R& S4 `" [! K
print "\nClassification report on test dataset\n"
4 u- K/ J' w/ ]3 [& f2 i$ hprint classification_report(y_test, y_test_pred, target_names=target_names)1 a3 z2 E( C) F# y% m+ V
print "#"*30 + "\n"
% F* R6 g6 t1 t, @; d' x! h2 I- N6 j6 Q- D2 b
|
|