- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 S5 B& \# W4 T2 w, a3 U8 G8 Himport matplotlib.pyplot as plt
( q& s: E" p/ c1 g7 d( v$ H- U
# J. o) p8 p) X% J% B/ z ]* ?import utilities 0 [% s3 g- ^$ M% p! n
, w/ J: t; ]1 X" e
# Load input data
4 g2 w* e6 {/ q9 v4 D) Cinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'0 a0 ^+ J" v1 Z, M2 {( ?
X, y = utilities.load_data(input_file)
2 h" T. N9 |$ s3 Z. Y- g
- C$ d- s# z. F& H0 t8 p2 t###############################################7 f. B7 V& f% w+ @ E# D. r
# Separate the data into classes based on 'y'# \, T2 J+ C8 A' X6 o) m$ w) j
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
1 p; _) P" Q8 R. W8 D- a6 w7 C# mclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])7 O3 S9 x$ x4 O% |
' { M1 T5 E! r. }# Plot the input data
. a& q/ X% r0 T6 N1 B; Wplt.figure()
/ R2 P: m' f T7 fplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')4 h" z! h9 K r, X. R# n; g
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')7 _1 r/ R# p! z) g6 X2 \' X) N0 i
plt.title('Input data')2 o4 g: _3 D& e$ L; @
/ P5 b5 g+ j( ?4 o/ y+ O' e& }###############################################5 T. L7 ?, b: t" P+ _+ H
# Train test split and SVM training
% [' P- U1 Y( d/ [1 C, h! N! jfrom sklearn import cross_validation
7 B) K* l, [0 [" u8 T, h& k( P" Jfrom sklearn.svm import SVC0 r& F: O# o, G3 S* I- O
' m7 m: V. E; i2 K+ H* U4 q X- hX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
8 P- e; l; N. a4 F$ X4 ]
5 j1 W% H0 `% x, U& X. P" e#params = {'kernel': 'linear'}% [2 X9 s! q$ q: H0 Y! L+ l0 O
#params = {'kernel': 'poly', 'degree': 3}
& ]9 C$ c& A& V0 m z7 ^- F( _params = {'kernel': 'rbf'}
' ]# n/ W# B. P. Q! t7 g' ^classifier = SVC(**params)
2 L2 k8 }6 M aclassifier.fit(X_train, y_train)# m2 L7 v( f; _/ M- a
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
) r0 x) R+ A# r- e0 z2 O0 e, l) w# _/ E* c
y_test_pred = classifier.predict(X_test)
- m, c' J: t4 a9 i" ?7 u# ?1 autilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'). t3 s* J4 j+ M' @4 N1 A
1 n, m+ t! i+ ?; s
###############################################
, |* |" M7 V9 U7 J9 }# Evaluate classifier performance
$ j, h( n# q- L* W4 C: u |) V) w% O, ?7 N# h
from sklearn.metrics import classification_report; b: t! q9 r! a: z: R) g0 j9 F$ E: o6 Q
) L: I# f% P2 }; P% A
target_names = ['Class-' + str(int(i)) for i in set(y)]$ o9 Q) U; ?! a9 q5 u, j' B1 I
print "\n" + "#"*30
2 S- w" N! Z/ eprint "\nClassifier performance on training dataset\n" J% M3 O* A* |+ i
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)2 ]4 }' \- q% v* @$ q
print "#"*30 + "\n"
) e x d1 C0 I2 R" Q2 t
' Z& w X* }3 g$ @7 ]' Q0 n1 ?print "#"*30' q% r0 ~) z* h3 p8 {
print "\nClassification report on test dataset\n"
6 [$ w5 v$ f/ b b: Q4 Fprint classification_report(y_test, y_test_pred, target_names=target_names); U2 X' Y. P% c! n
print "#"*30 + "\n"% S# w/ Y3 }; w8 k1 P* m' U
- {' Y `$ H& ^6 F |
|