- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
$ {1 I8 S+ p2 M& p* m* i% }import matplotlib.pyplot as plt% }$ x t }5 G. l, P
, h1 K8 y) ^! _( b% J- I
import utilities
$ H% {8 D, M C3 y4 {- k5 q: m0 p; q8 Z* D# s' z. S% O |7 m
# Load input data
9 ^" z( ]/ [! z% T7 sinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
, n+ h' j; U# NX, y = utilities.load_data(input_file)
; S6 o& A2 y/ b3 Q/ L. M% l" A! T `. Q1 ]( j; i0 m- f
###############################################
: X( f2 X% v% Q7 i' S4 S4 L# Separate the data into classes based on 'y'
4 F3 l: ^& d1 M6 m# Zclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])/ Z0 ~ K' p) x: q$ i9 D
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
5 ^: K$ D2 d1 X( H
8 i- l0 Y0 T& z) H& [4 H% f9 e# P8 c# Plot the input data
+ m2 o+ n# ^: ^plt.figure()
9 J; q' n6 n4 Rplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
; }# ?" V8 W. J. a0 Iplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
: T% l" m% N4 z) [; d5 dplt.title('Input data')
( l; m; v5 }8 Q- ? h! K
- M. O- t+ w9 ^) H+ ^; b8 o; G###############################################
! A& Y' \) x6 c# D# G% t( }# Train test split and SVM training
9 A) Z1 L! |# v( i+ a6 @from sklearn import cross_validation
9 U' B8 L: \ A% p* I2 n5 q( Ufrom sklearn.svm import SVC9 o. K: u: i) p0 \
8 D2 n* {8 |; a0 H0 c. w
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)( C/ Y# ]/ Z: b0 Z$ m5 X6 r0 z# y
& Z. [, ?6 }, r; [3 ?& f6 W$ }) g
#params = {'kernel': 'linear'}
/ C) @& M: z5 k3 f$ Q#params = {'kernel': 'poly', 'degree': 3}
2 C: r/ K8 [$ o) D$ T8 R# hparams = {'kernel': 'rbf'}
( K% a% a7 L6 I( e+ cclassifier = SVC(**params)& e+ H2 ^! ~ Y( K- P
classifier.fit(X_train, y_train)5 {, B6 w6 E: x; w
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
) o: J' _/ O& A# e' t( Y7 B/ \8 V z, [3 J, K' }
y_test_pred = classifier.predict(X_test): ~) v: T6 q2 \" J0 ]
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
! P6 \2 L) Z* H" j) X# h2 W* s* j0 @9 |, P& q
###############################################
7 r9 A+ I4 g- I% t4 N/ [6 o# Evaluate classifier performance* `' b6 e& r6 S$ U @8 @
( X: ]$ M" \* ]3 Ifrom sklearn.metrics import classification_report5 { U0 `* I" d. z
: w* G, e* a: f5 u8 u4 T& Q1 Itarget_names = ['Class-' + str(int(i)) for i in set(y)]
9 n6 U, b" J+ xprint "\n" + "#"*305 \" P" C N8 O/ `. ]+ Z
print "\nClassifier performance on training dataset\n"( K/ p% C$ G3 E+ g+ E% J2 N/ H( X5 U
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
! O& | D8 Y* i. g" {print "#"*30 + "\n"
! v, F' g4 L2 Y
* g2 S: s( D+ z* Y* nprint "#"*30# d" ]5 l c$ g* }
print "\nClassification report on test dataset\n"3 j: g4 G- f1 H2 q3 A! g% u
print classification_report(y_test, y_test_pred, target_names=target_names)
@, e" L5 p" Z! K0 D; gprint "#"*30 + "\n"
& c' b7 p# ~1 d1 [$ e1 Q$ _- q4 n4 N6 O7 }5 J5 K
|
|