- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
& D: R) i3 ^6 j# L/ ?+ a. z0 Qimport matplotlib.pyplot as plt
: D0 P" r( u. S# G! b8 u7 M3 P9 ~3 j$ E1 x$ f$ Y
import utilities # W. V, d: z, H4 J5 N
0 F2 i# Y( R$ T# Load input data; W& H/ X7 g" k
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
7 S" K# r' P4 NX, y = utilities.load_data(input_file)1 m, W6 W# l( {- {" i9 t
0 \# I2 k. }" S+ D3 A( k6 P###############################################
4 Y8 X" j. ^, A! G# Separate the data into classes based on 'y'% Q7 \$ c$ c$ o
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])! d9 [. Z( u6 A2 ?% {" O4 F
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
* }. M$ C o$ D7 W; F/ D& _( @( s$ g
% G) k' u" |0 h( ]# Plot the input data
- g$ o9 j& l6 {$ {+ } O. Xplt.figure()
: Z* V8 Y M6 f8 V# }: V- Dplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s') _* s- I3 r* f9 k' b& n% u
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
9 @, V {( N* {* x7 \# {plt.title('Input data')
: N3 Q7 S+ g R. `& B4 B* A
: I+ `$ I' g+ w7 c3 i) _###############################################8 H; n& r) G0 R) q4 k% y
# Train test split and SVM training6 l5 t7 ]5 U) H+ Z/ O
from sklearn import cross_validation
- C, B. ? K" @from sklearn.svm import SVC4 j$ y$ D w0 @9 h0 K
0 p3 D( ^! u" U7 X8 ~X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
9 z0 |( i& F: y7 x7 H
, i$ Y ?+ r: o! t) H$ l; H#params = {'kernel': 'linear'}, K/ J% o/ |2 ?* X% M0 a4 W: A
#params = {'kernel': 'poly', 'degree': 3}
, k/ c6 E8 q, V' qparams = {'kernel': 'rbf'}
# `& |# H {; |- k) Fclassifier = SVC(**params)
- C( j% f$ g. z0 I( u ^classifier.fit(X_train, y_train)
& X5 p! [' l, b* y$ h% mutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')+ m7 @0 a$ s }( i5 F6 {1 Q5 C$ t
3 Q* e( P/ K0 T) {' X, N8 F
y_test_pred = classifier.predict(X_test)
. d) n0 e* D2 w* [$ futilities.plot_classifier(classifier, X_test, y_test, 'Test dataset'); Y2 Q, c9 @% L7 E+ b3 H8 l
+ X9 I: D% C5 m ~9 U; v, s n+ Z
###############################################
0 X9 z& J: Z6 g# Evaluate classifier performance8 ^9 I% w) a: S; K
" g; H4 V& J% Ufrom sklearn.metrics import classification_report8 j% ?5 J# L- [- ]7 `* |
S+ p( ^- T% ^9 atarget_names = ['Class-' + str(int(i)) for i in set(y)]) j2 p& E" S+ w+ R& \- l% g
print "\n" + "#"*30
/ g2 j: k3 ?- U: [print "\nClassifier performance on training dataset\n"0 C2 _1 V& D2 ?3 i+ m
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)7 h3 x5 h4 u, t3 Q! r
print "#"*30 + "\n"1 T6 P( ]1 {# S0 Z9 r' s
% g1 p' I8 f/ `print "#"*30
1 I$ C T, g! q) h; K, Q- G* @% Jprint "\nClassification report on test dataset\n"( M9 K( P% t$ F; A1 }: `) ]
print classification_report(y_test, y_test_pred, target_names=target_names)8 ]1 \# f0 ?* `( ?) t# Z
print "#"*30 + "\n"
: ~6 O- K8 s+ ]
( e2 t) ?; ]) t |
|