- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np% N0 H; F6 ]& [: X: \
import matplotlib.pyplot as plt8 }6 R2 z% U$ s; s
' I+ F: o0 a% ?! B7 W0 vimport utilities " {. I, R+ e2 J
$ W2 U) W; J, p8 h+ h V# `
# Load input data8 v5 @. N1 p$ B8 J
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'% F; e3 U! k( f+ G; Z) u
X, y = utilities.load_data(input_file)
2 z; F4 R' X, e; w
. X5 I h# Z* s" I F1 _############################################### _4 v! ]" I( C4 F
# Separate the data into classes based on 'y'
/ `6 y4 o r7 Z3 y& yclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])8 u5 v& U2 d0 N
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])- d. v4 J" R# U* f
; k6 f- y2 b P& W. ?7 K
# Plot the input data
& n* [2 H" D0 z% bplt.figure()
+ \ X: q9 r1 Kplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s') H2 Q Q* O2 O6 F' u% F0 P
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')$ z% A3 w2 d+ o+ |) ^( U
plt.title('Input data')
! m9 ]5 B; d( n8 f1 \) ]3 a+ m" `
3 b2 Y+ @% m; \) M# `6 R' H# N# M###############################################
9 U3 k( Q( p) L9 K6 I6 P3 c1 C# Train test split and SVM training
, r7 z2 K6 A9 P$ R& b2 xfrom sklearn import cross_validation0 x$ r# E( S* ]- l7 Y/ w
from sklearn.svm import SVC J3 a/ J+ e5 t* ?! m" h- Q' q: H- Y
. R% {- E ]3 {8 BX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)/ @) O& `* H7 w& J# I
# l# S" d9 v6 }8 x8 v! h1 I#params = {'kernel': 'linear'}+ \. `. U, \% T; z( M
#params = {'kernel': 'poly', 'degree': 3}) {" E; }9 |1 l& i( b
params = {'kernel': 'rbf'}$ _; Y4 w$ o0 d
classifier = SVC(**params)$ Q* n* b7 b) X% z
classifier.fit(X_train, y_train)& ]# ]4 A6 j0 R. D8 H7 b
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
# z! Z) V' T6 q8 |+ X; N) X: I% {
y_test_pred = classifier.predict(X_test)
/ n+ M9 C7 g6 v$ Gutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
P. ^* i1 w3 \+ @4 _1 G5 `; J) a7 a
###############################################
, _2 `% T6 a& S2 b T# Evaluate classifier performance
# O6 `- [1 C, p) `, p& `- f% j: n# F0 f3 b2 u; m7 K6 y0 D
from sklearn.metrics import classification_report
# }* \ w1 [# D$ m4 q9 j3 H& t, S6 i8 _2 _: z
target_names = ['Class-' + str(int(i)) for i in set(y)]
9 I$ |2 ?% H/ _) jprint "\n" + "#"*30
, a6 G2 d) N: }+ B+ wprint "\nClassifier performance on training dataset\n"4 S# l8 G. }; u: `- V, N7 I
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
6 Y5 u5 ^9 Z) G! H0 q# c @* |3 V- T. zprint "#"*30 + "\n"
: U* ]- W! f$ o/ D$ V
3 g: g8 I- a6 a [2 }print "#"*30! L5 N$ Y: L& I9 J& _ }% ?3 v
print "\nClassification report on test dataset\n"9 \, b0 w+ R6 W. K5 p
print classification_report(y_test, y_test_pred, target_names=target_names)
, _$ Z9 J: m. ]: ^8 M& H. fprint "#"*30 + "\n"
2 W i" T% |4 j Y& A' N- p- ^& P) I4 u$ h1 Y9 V
|
|