- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np0 n1 ]* n _( R& P5 a8 m
import matplotlib.pyplot as plt
, B( f5 f5 m ^. g# X x/ Z, ]5 e" l* W/ B
import utilities
3 P3 L( K7 J9 l& |, Y
, a6 a+ ?# y: k2 X: R* H7 b; w. x# Load input data
: }8 X6 i/ e8 Q- Qinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
' @! [2 [9 Z9 D& s" \0 E2 PX, y = utilities.load_data(input_file)
1 v5 \; t8 k8 t
3 n9 ?8 g7 P8 n###############################################
2 Z! g1 V# w8 J) h# Separate the data into classes based on 'y', d6 o* {8 ~3 G9 I
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
. ?4 A# c' Q+ f8 M1 |* A* rclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
, {$ ~4 G) v9 \
, a4 Q7 V: `5 ?; G: E# Plot the input data- K& ?" t: ?' G: L# c
plt.figure()/ W1 `4 ]; U$ |7 m1 u3 V
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
( B( v) p! H: f. l9 a8 X v' kplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
+ Y1 J, Q5 e; s+ Splt.title('Input data')
+ V7 }' J6 J9 O. z% b1 V* A2 x3 p6 l0 o0 j9 E& t* q" ^! F
###############################################
6 @) F" X" T2 e# h+ S7 w% I# Train test split and SVM training9 w3 Y, r' G6 r. U7 v1 J
from sklearn import cross_validation
- t, ^) w. `9 \& t2 W* h2 Cfrom sklearn.svm import SVC8 ~: q& L6 f7 p. w5 z4 s
8 h4 n; X9 |5 t; @3 j/ N0 C$ cX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)! V% V. c0 B; M! q- M5 j0 k( P
* r" ~* [1 o0 z( E#params = {'kernel': 'linear'}& y( S( @- x! X, i/ M: J& n- ]
#params = {'kernel': 'poly', 'degree': 3}, V8 _ i* G; H2 ]2 a
params = {'kernel': 'rbf'}
1 Q; a: t4 E9 E7 ]+ Pclassifier = SVC(**params)
' z" b- C5 @& U' s w$ mclassifier.fit(X_train, y_train)
0 n# {, q4 H/ ^* h% r" zutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')) V/ X6 M7 B4 F8 S6 ]
4 _, X7 K9 ^: Y# P
y_test_pred = classifier.predict(X_test)3 t' K! e: g0 L1 \
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')3 g8 N) R2 S- Z4 F: e
* H( J1 J0 H0 p+ t2 ~- }& X# [' I* g###############################################
* b+ }7 z. S" f, g) G4 [# Evaluate classifier performance
1 ]* B, R) W- F
5 t4 L3 j* n! i2 Ufrom sklearn.metrics import classification_report* @% z5 }" T- g6 U* ~ W
2 [) r6 `% _) }7 E2 i# t+ V
target_names = ['Class-' + str(int(i)) for i in set(y)]
" d- @$ U: l+ C0 _) \+ h* ?+ I- oprint "\n" + "#"*30
5 `% f5 m2 @2 Q7 d) S9 vprint "\nClassifier performance on training dataset\n"
& ^) m* {" g3 o% V! @0 y1 t& bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ a: w0 K9 o: o' B
print "#"*30 + "\n"
- r0 p }% V$ `- p/ `
9 ~. [ ~& P) i% ?print "#"*30) C/ W/ y( B& s3 `$ `' y% }7 ~6 r
print "\nClassification report on test dataset\n"
5 v6 I: i- c- }6 i" rprint classification_report(y_test, y_test_pred, target_names=target_names)
# I& j/ l9 c) l4 nprint "#"*30 + "\n"" b. h& A" `; y' o$ P! H
/ |3 q9 h0 c5 n |
|