- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
- g$ R9 ?. e- _. q# s# k, Mimport matplotlib.pyplot as plt. j1 L! Q' X8 }/ h) R8 [
( t3 `! E8 N9 W7 l& E( }% }, cimport utilities
% A: Z) W& y8 t3 N$ ]. m1 V6 M# i, H. Y5 X# L9 |
# Load input data0 \3 E% t" I0 H, a: x
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
/ X6 Q& D; }: F U* \X, y = utilities.load_data(input_file) [, D2 N }$ G- d5 R
; X7 i- J) j( b+ T
###############################################
& q5 u0 G) J. c8 i; W# Separate the data into classes based on 'y'2 E: @+ ^& f5 I; T2 |- U- M* O; X
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
3 E4 |3 ]5 G1 F sclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]). X. U7 K, V- m3 q. I
, l2 R q9 g9 |6 t/ K6 b7 }7 T% ]# Plot the input data( r" O( D1 l4 {% n1 m
plt.figure()
7 G$ }+ S- H/ `1 C* }plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s'): h3 H2 N/ F6 ~. }# W3 V- ?" X
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
2 F2 M6 ]; i& H9 iplt.title('Input data')
0 h' {, O) ^) ~3 d9 f8 l7 y) [
1 ~* }$ y. G# ]! o# ~+ T; a6 L3 V###############################################0 U+ Y2 c- P/ M6 _5 ?4 L+ h+ {0 w
# Train test split and SVM training
* @& l! K w% Lfrom sklearn import cross_validation2 O/ x2 J# ] ~3 z& f6 i
from sklearn.svm import SVC
6 b9 Y" R$ W8 V5 U4 f! @
8 o( I" m# D+ S/ L1 _X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)$ c+ h P3 t/ U. ]" E
# [* p# `) y% j) Z6 i, {/ j6 q#params = {'kernel': 'linear'}
; Y) y9 M2 R' T2 S1 Z#params = {'kernel': 'poly', 'degree': 3}( ?& {/ r# D/ V# Z- ?
params = {'kernel': 'rbf'}
9 T; K9 e0 r* c# l8 aclassifier = SVC(**params): A; [. h+ X) M% G
classifier.fit(X_train, y_train)( s0 e: X9 `8 t7 o% ]
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
, V1 _3 S, @ [; J
6 G( Y' O, u) o; K- G/ Ky_test_pred = classifier.predict(X_test)
, I) Z0 w! `- x9 X9 d1 ^/ r- Gutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')7 _2 }# O# a2 B+ S( r: b" u# G
( F9 _, M0 C9 F( y* h' D# W8 o( C###############################################- A5 H+ }$ d N% \& T1 L0 r) T
# Evaluate classifier performance% N O* |. p* U8 H6 J. d N! v
4 b" o/ ?7 P7 D( q3 `
from sklearn.metrics import classification_report
" g- j) h: O/ [# C
. t1 C/ F; z0 E2 ytarget_names = ['Class-' + str(int(i)) for i in set(y)]7 |* G) A( c) i% d5 r$ m4 k! s. B
print "\n" + "#"*30
8 ]- V6 a, Q0 v8 {+ v! |% m% N- ^7 Aprint "\nClassifier performance on training dataset\n"
- p* u6 C3 K: `0 g; `! @* p g7 \print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 b0 @3 O4 ~. W8 l0 s4 R: K$ E4 M3 eprint "#"*30 + "\n"( F0 m9 `( L$ P( m U# p
: w8 j# O; E6 W6 x) s! {! X' k
print "#"*30+ O7 y# \! V! r
print "\nClassification report on test dataset\n"
) [+ O+ \5 z6 x2 o7 u+ B, qprint classification_report(y_test, y_test_pred, target_names=target_names)
: L* D! s: {! t" x& ?print "#"*30 + "\n", b; Z8 u5 t. J' D5 q- R
' H: N+ y2 Q/ h8 z+ |' B2 e. b, D n |
|