- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np" U0 ^+ m, q- i
import matplotlib.pyplot as plt$ Q; Y: f5 q+ Y
/ g8 a8 J4 T5 q2 `/ x, d9 N% E
import utilities 7 j) `" z- }5 j6 x+ V
; z$ j. S3 Q; W6 p$ z8 ~2 F
# Load input data
9 }+ ]- ~; t8 D+ J" Pinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ n+ G' w( q8 j! nX, y = utilities.load_data(input_file)
2 Z8 J, Z Y: O3 b( g/ g: U, Z3 H" G, Z9 V9 ^# d5 E7 c2 t
###############################################3 J: k$ G G+ B7 L2 {
# Separate the data into classes based on 'y'( v- }. m9 s+ n w" c
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; C% ?1 A9 Z2 k# y9 nclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])& n# y* i7 z' q
# F& Y/ Q+ @' [' s* y7 c& A
# Plot the input data6 G8 D0 r3 V. `+ Y
plt.figure()9 c, B3 R ^+ z" q1 l: t% B& g8 Y7 {5 `
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')' ^( _% p5 L+ S4 A3 l* z
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
( p# l& I( v2 B' [7 Q, k+ O& ~0 yplt.title('Input data')
1 O- h3 o6 x6 }5 f8 o/ ~9 ? A# E
5 }3 y% f( x2 y8 ]9 J7 I; m###############################################; D; y% _; H' g+ H; u
# Train test split and SVM training. t9 o# ]4 ^' m
from sklearn import cross_validation
- _5 M4 W7 ]5 `2 f# W& V% T- G7 d: Afrom sklearn.svm import SVC8 v( ^9 R! @$ o) h+ n2 w
; B; B p: U3 D9 W% [
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5): O) r5 z# K) ~7 ]' o/ }
. {5 E$ F# D# F" Q) `5 D
#params = {'kernel': 'linear'}) D5 M( {+ [ K% N" A7 ?7 c( o/ K
#params = {'kernel': 'poly', 'degree': 3}, M5 } x: u- V- F
params = {'kernel': 'rbf'}! `, V" o# G8 t9 @
classifier = SVC(**params)4 u3 T3 H1 {) d& s9 {, {& s
classifier.fit(X_train, y_train)
`# c/ l3 p( ~: tutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')2 Y6 Y' p/ ~* T" ?- B' ]
" q* _- m7 V/ J1 F
y_test_pred = classifier.predict(X_test)& B* a$ Y& ^7 e9 x8 P, p
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')* X( e) S4 y& s. @
+ u" S9 K" u8 o6 b* D4 b" B###############################################
9 v% u) _; K2 G q# Evaluate classifier performance
6 T3 l+ k. {5 \) d
) s4 P5 i& |- P- ~8 ?6 l! o, }7 [from sklearn.metrics import classification_report! ^) y9 i$ {% b6 D( y
3 S) f5 G+ n: `3 T/ G; W6 ?target_names = ['Class-' + str(int(i)) for i in set(y)]( Y: n! s6 |1 y5 v
print "\n" + "#"*30
" f9 k- M- D7 h! Iprint "\nClassifier performance on training dataset\n"
8 P+ K0 I7 S: `7 F" Bprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ J: g3 O2 y# i2 A
print "#"*30 + "\n"
9 q5 d5 b5 s. p4 x) T' _% H; S
' u( p: |: h: V- j0 Fprint "#"*30
5 {; n: y+ @& Wprint "\nClassification report on test dataset\n"
) c; \8 I8 ~- [print classification_report(y_test, y_test_pred, target_names=target_names)
( l4 b# N0 z# I4 @* B4 ]( t; R5 o' Yprint "#"*30 + "\n"
0 P7 D) O* c& }* @$ X2 T4 B3 L* u
|
|