- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 558
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
, E+ x; C" @) G6 ~- rimport matplotlib.pyplot as plt8 h @6 W, L |0 K/ ~! b' P
' Q% }; Z5 l/ K* I) A; z% R. wimport utilities
1 n- q e( r F/ x$ b
) Q4 Q$ M" n, x1 |$ z8 V: c2 P# Load input data
6 D* b7 V. Q4 F2 Finput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
+ u9 L" @0 ]) B7 ^5 h' f$ r GX, y = utilities.load_data(input_file)/ N' y9 x) W+ V( o
0 s5 K( y Y; S/ E) @: y7 G! K X
###############################################
3 b9 [. H4 Y* `& q( h# Separate the data into classes based on 'y'9 M, }6 t1 m8 I* H6 L" A1 x
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])0 K: }# E1 Y* H7 f# _& g- M
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
2 `7 }0 G6 R' H6 i. _
' G3 x3 S& }3 O4 A4 z7 j* C3 _# Plot the input data0 } V/ T. g& a$ q6 o
plt.figure()" f/ D& r9 t) P% x$ H
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
9 K$ c3 d- I4 ~5 A, y9 y: N1 `plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')# p1 [ `; e- V! R9 t5 r7 K
plt.title('Input data')7 i0 n; I$ M" r' b; j, R; T- A, o7 K
' G) k3 w( G8 U- o# A* m- T###############################################0 k& I( x: ^7 g5 K
# Train test split and SVM training) a8 [: ?2 C0 A* b+ S& j
from sklearn import cross_validation- K( {5 M5 b% n4 x# b/ i4 O1 W
from sklearn.svm import SVC
5 b$ Q/ Z) _$ K$ n. e. e; }5 e
# P0 L0 ]2 D2 Y$ ~X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 ^. \1 O( s0 Q% N& G# d" _1 ]- O, O2 L: g3 S
#params = {'kernel': 'linear'} P: [" K: j0 U# p2 S1 O) D8 B$ j M
#params = {'kernel': 'poly', 'degree': 3}1 Q2 G2 O; F' A: |9 Q; y
params = {'kernel': 'rbf'}6 y' F2 c. K X
classifier = SVC(**params)
/ }$ `! x9 Y0 U9 r$ c+ ^classifier.fit(X_train, y_train)
+ l+ x4 ^8 x, x) A" r2 Yutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
3 x8 I) U- [. X' e. d' S& \, i
% b5 _# |: n5 V' B/ [$ p" ^+ S2 ~y_test_pred = classifier.predict(X_test)
* ^; c; h# O" w! v& F' Dutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
! G6 r0 W% p. P; H; ~
; Z9 w" f0 F4 K8 Y###############################################
2 A2 k8 t# J1 @0 M' a. a# Evaluate classifier performance5 {8 `2 L( n, a
! ]! f0 x D& M7 u' {& d
from sklearn.metrics import classification_report
* J- a7 Q/ m! N3 w. N+ l% c# r/ K6 K8 \% E+ ~1 M
target_names = ['Class-' + str(int(i)) for i in set(y)]8 S" r; ? Y! \' j" a H1 e- |
print "\n" + "#"*30
- B) c, _: G5 y& r9 h) Vprint "\nClassifier performance on training dataset\n"9 o( U+ {" L; Y: F: o
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
- M' K8 n0 k8 T" ]+ qprint "#"*30 + "\n"
0 J" ~0 }- Y* U8 ^7 `4 b( J) d" V( y8 a2 L" C
print "#"*30' b5 F3 v; p- g- x3 E
print "\nClassification report on test dataset\n"' @3 j( O$ Y2 G1 j
print classification_report(y_test, y_test_pred, target_names=target_names)% M& \4 c, [) X5 e |+ L% R5 z
print "#"*30 + "\n"
+ A& |) w. r; a$ u/ K9 Q
6 |' p! |# u2 o |
|