- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np8 \5 Z0 ~" n; R; A7 i9 @
import matplotlib.pyplot as plt% w- R" v, s M: h
6 }! x8 ]/ P" `' i
import utilities - d" B7 y- X2 N! @# \
( z; ^( m; @7 r! ^8 d
# Load input data2 n( r* n( R9 A4 @
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'9 b+ N9 L' i4 X; l$ N! v5 S
X, y = utilities.load_data(input_file)! h, V" z4 X0 @$ x" W, n
0 i2 r6 e3 ]2 P; r! \# y4 _
###############################################4 s! L" u( P+ y; T
# Separate the data into classes based on 'y'
3 j+ `: y `3 L& Q6 Uclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])3 q) m8 i- W! ~
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 k; A7 V: G g6 s9 ?
- F1 D4 c% X! J) u
# Plot the input data) d s* [1 y7 P% Y, Z" Z/ |; A7 d
plt.figure()
) C! e+ o4 R F0 m- k4 H0 C5 B yplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')" v4 f+ [7 x# d, u( e& e# K0 z
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')* Y* h. H) \ f, I" ?/ j5 P- o
plt.title('Input data')
( |9 v6 e1 ~6 N5 s. u+ |' |; U
% f: W( Y) f1 R3 R0 P###############################################2 L. P( R6 y2 v6 z" u+ i
# Train test split and SVM training4 c; F7 {. _4 y7 V' G
from sklearn import cross_validation
: A& H9 i& z @3 P J( Rfrom sklearn.svm import SVC
3 {: J' l A) K. R3 h6 v8 r
* a1 F5 N6 t% @& @; P7 ]X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)6 g5 o" E0 u3 B4 L8 b0 [+ M
9 Q6 F `, W+ H- U ?: W) t, m#params = {'kernel': 'linear'}9 |! b5 m3 v% k* {. @8 u
#params = {'kernel': 'poly', 'degree': 3}7 ?7 u. D: t1 @% k
params = {'kernel': 'rbf'}- K# [/ r* O( L/ s, n
classifier = SVC(**params)
' ]- E' @8 |" b; Fclassifier.fit(X_train, y_train)9 P- q# I7 q0 J
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')5 i% q/ d( O+ H
1 L) ]3 p3 O$ W; s6 v4 Ny_test_pred = classifier.predict(X_test)
% A0 q" e: p2 ~+ s* ^0 S5 ^- tutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
* ^8 W7 q! i7 ~6 y
4 Q6 X( B9 i1 i1 k S5 n###############################################5 N8 _% z% w9 X+ h# X e
# Evaluate classifier performance
$ H: s! f# v: K& E- L$ ]6 l5 ]; B$ A1 [7 _1 i5 P% y5 K# T7 |% }
from sklearn.metrics import classification_report0 K; X# s }/ U8 N2 Z* O* g; r; S9 t
9 ]) L# r! @! gtarget_names = ['Class-' + str(int(i)) for i in set(y)]
7 l$ ]; O0 H d9 j, h. V+ g: \print "\n" + "#"*30) e2 f9 y4 v& f0 _. U1 G* W# d
print "\nClassifier performance on training dataset\n" j# b& J( o+ l4 ]1 K5 X+ ]& z7 n
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)1 h' X& @! a0 z
print "#"*30 + "\n"
- _) H4 z5 v5 O8 }& @. S8 [, q7 u4 g4 P7 L+ D: J2 B0 k
print "#"*30# l2 K( ^2 p7 v. D1 a
print "\nClassification report on test dataset\n"
" O7 \& o# g$ E6 Iprint classification_report(y_test, y_test_pred, target_names=target_names)% V/ N9 c# W3 K7 A! R
print "#"*30 + "\n"0 S. t7 m( o5 |: g; H
! G1 q6 ]( r; ]- U& ] |
|