- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
& ~/ p7 a2 ^7 h% q1 Aimport matplotlib.pyplot as plt
0 q8 i9 r+ ]4 ?4 ?+ [! M- a' E7 h7 \( u) J9 m5 f3 _1 [* z
import utilities
: q; X. f- {, ~' }' t3 u) C7 g( p6 O* {( y4 d5 a! |
# Load input data5 v6 e; Y8 V# H, I" i
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'/ ~- q0 x6 r. i/ Z3 P5 J
X, y = utilities.load_data(input_file)
/ m S2 b) g Z: h' }, j6 d6 n
1 D7 S; v, D! _& T###############################################
3 \! G( A! r- b2 i) U# Separate the data into classes based on 'y'+ G* H% y F/ }# C% K \
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])* m) \ M' `% J# w
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])3 l4 c% e- W. Y
3 a# {3 J4 l, g& t5 J# g) P# Plot the input data
: Z% I5 T' B% U5 i& ?plt.figure()
, `1 B/ j% P" k: _& E9 F& ^plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')/ j- G/ z% B0 u. c7 }. _
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')4 U# A3 s! @* b% K( f, j
plt.title('Input data')
2 e( l( g) {, L9 G4 D. k
3 f( @5 k) A6 h###############################################/ c# F n2 |1 [
# Train test split and SVM training1 X8 M- j+ Q* c$ B4 R6 P4 V
from sklearn import cross_validation
2 y! A2 ^9 k/ W efrom sklearn.svm import SVC
" ^7 \- \) X9 H; `+ g" v, o! i
9 ~% B" i0 W5 l7 h, L( ]' @X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
7 i4 e- Y* W) K- V8 l7 z6 v! l
& \0 R1 I% O- G0 f#params = {'kernel': 'linear'}
; F+ U7 a: X2 U0 w- @#params = {'kernel': 'poly', 'degree': 3}; @3 z6 l. }8 X/ t1 H5 ?
params = {'kernel': 'rbf'}4 K3 {; q G5 k: [+ L# w! Q
classifier = SVC(**params): g J9 i! i& Y" ]
classifier.fit(X_train, y_train)" d! a% X7 h& a! Q
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
! @5 z$ [$ K+ q! H1 m" E
( C! R' K! @. S6 O. c0 x+ Vy_test_pred = classifier.predict(X_test)
: t; w. y& ~9 i3 ]1 ]9 D/ X/ ^8 Qutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
+ T$ U, c# p" T( ?! n" a8 D+ G
0 }7 I3 B: L8 V* a0 T###############################################, E5 t) [: V# F8 v1 ?4 n4 i2 V
# Evaluate classifier performance. n! e6 M( h' v$ @" r! V
2 {# Y5 G$ K3 Tfrom sklearn.metrics import classification_report
! ~9 ~$ Q4 M/ T
& f; B% _2 J0 F7 D2 V3 Atarget_names = ['Class-' + str(int(i)) for i in set(y)]
6 a. N- k! _. d5 f- a7 ~4 v1 Yprint "\n" + "#"*30
, w+ w0 ?) C' K; Oprint "\nClassifier performance on training dataset\n"
7 W3 T; g9 V; W0 {, D/ ?print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
1 F/ r; H& O. m- b- _print "#"*30 + "\n"- F) T% t s# Q: A
* g) ^3 q7 w! [* L& Uprint "#"*30
% g. T) g1 V0 v2 f; |$ s, W! Bprint "\nClassification report on test dataset\n"2 g1 a: G2 \$ x( y
print classification_report(y_test, y_test_pred, target_names=target_names)
( \( ?& @0 c L& n d: S6 jprint "#"*30 + "\n": o1 w8 j0 d0 p% l; ^
% n' n5 Q% ~7 W* K: U$ i |
|