- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np. a. w1 d+ U$ B! f" C3 J
import matplotlib.pyplot as plt6 A: w- k& G* P' ?
! W8 Q6 y# {. {8 Eimport utilities
. [+ [0 ?/ T: D8 _" I+ m/ `* f A6 j
' l9 ~4 V! V. d8 v% r) L+ e# Load input data0 N. T# ~$ d1 O$ u9 b/ b3 ]
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
: t3 u3 U3 ~" I* l6 }! Z, ]* TX, y = utilities.load_data(input_file)) J! H$ ^8 f4 v; H6 H7 _& o3 w
5 \+ B" ] l! h; H- X' o ~$ I
###############################################
5 f: k d4 m$ `# Separate the data into classes based on 'y'* V+ L# g; W2 S1 }/ R* D& O
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
! R- G5 W" z3 R' d3 hclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
& X- P9 [; y; d D
" d# T f% r0 s& M# Plot the input data
' d* U2 H" t7 }% @' q% @plt.figure()
( r2 V# {& f+ q8 [# uplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')' W6 ]+ k4 I$ Z! K- L& Q( B
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s'); x8 W' [8 |' E* X! n
plt.title('Input data')
+ I+ H1 f8 ~4 z3 \* h' {' _: J, G7 \" b; @
###############################################
/ @2 h6 T/ q( f# Train test split and SVM training
, G6 ~+ Q7 A6 ufrom sklearn import cross_validation
Q7 T4 k; K) z' D- G$ f/ o9 Ffrom sklearn.svm import SVC3 P$ ?5 Q) x7 Z- X# z1 g
3 @+ P5 ^9 E8 i& u! u0 [
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
7 \; W }+ j! q- f4 A
# }, q$ a- J* z* m) m# ]#params = {'kernel': 'linear'}
- M1 p/ S2 s. r- I0 B#params = {'kernel': 'poly', 'degree': 3}
- [. [: ]$ i" g4 Iparams = {'kernel': 'rbf'}4 n A6 r$ P7 M1 x/ T* s: t
classifier = SVC(**params)
3 c$ V2 J! a' Q/ B! k! L) Wclassifier.fit(X_train, y_train), f3 g& M! h! L" a6 N
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')1 j0 m! \. y8 r# Y5 Q
4 @$ @) y! o; y% C7 j
y_test_pred = classifier.predict(X_test)& P* m- P& V- Y* R8 r
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')3 C. ]4 J2 @) ^: z
7 u, H( D( F7 }. b* y& }4 Z1 B
###############################################
, ]% S: |/ U' @' F; D1 i# Evaluate classifier performance
6 h9 H( ^5 y! U! E: q( n k
3 Z+ P* v0 c# c; Y7 v6 x) i/ gfrom sklearn.metrics import classification_report V1 G& Q. Q7 @( p, J' c
2 \" B& Z0 x" e; I5 utarget_names = ['Class-' + str(int(i)) for i in set(y)]: Z2 Q( P- L+ p: b( Y
print "\n" + "#"*300 P. ^- I3 z. Y0 u" P1 }
print "\nClassifier performance on training dataset\n") z F o) C5 _: V! k/ t( Y
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. w$ T7 ~3 ~* p5 k# ~8 f8 lprint "#"*30 + "\n"
/ S) T& v8 r) p; @
: K7 n( p2 y8 x0 ?9 j) a* `print "#"*30
+ n8 y' s, T3 y9 }print "\nClassification report on test dataset\n"
" R# S7 w. l- h& y6 k6 a; t" n3 E- Kprint classification_report(y_test, y_test_pred, target_names=target_names)
& ~8 H; [, q; o: L9 }3 Yprint "#"*30 + "\n"
) n( l) y- h/ [6 N% U; \
. ? }8 @7 Q, s. t |
|