- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
& S8 D. _( _; e; s3 [+ Iimport matplotlib.pyplot as plt) I, S5 K. A/ t/ A0 d3 v
# \ k5 U+ ~& P5 v f4 u; X3 mimport utilities 9 V T, j1 ?9 D& J5 y$ Y! S9 {, ^' }
: k0 \8 T+ n+ z0 o' k
# Load input data
! P; s7 @* Y kinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
! x5 D4 p- b+ t$ Z {X, y = utilities.load_data(input_file)
+ T2 K/ C4 ^- m0 h- S) w& Y8 q7 p. o5 b) U' A% k
###############################################& _ l p$ B# N: X/ _6 F
# Separate the data into classes based on 'y'( K i1 e0 |' Q! d
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])) |: s7 Z# \6 g
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
( m: x0 i9 Z' m; l# `( {8 T2 D# t* ~3 i7 l) R
# Plot the input data
7 V7 x$ E; d9 f2 f$ tplt.figure()
/ p, q% r0 h& y2 L! _/ u5 G# Splt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
) J3 h* {% I/ i& u7 z5 F, vplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
8 i+ v8 L4 l$ z! r6 C8 kplt.title('Input data')
4 L! M X! l I8 z3 E$ V
0 P# }2 \' j ~ j, I###############################################! m3 M' O8 W6 h; b
# Train test split and SVM training1 S3 E) t8 D/ K3 q
from sklearn import cross_validation9 Z4 k* c5 A, L- z' h) X+ Q
from sklearn.svm import SVC/ _$ ~* V9 v, h
7 S6 a( P( k; G; _: O w- }X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)7 M' a& H3 e. ^# n$ N8 w8 d3 s9 K
! ]/ G' ]; p$ [+ b. Y9 q6 }4 \
#params = {'kernel': 'linear'}2 {0 {# i/ `! D
#params = {'kernel': 'poly', 'degree': 3}9 H+ j: Q/ M3 ^1 n
params = {'kernel': 'rbf'}! V( `$ t$ F1 U" }
classifier = SVC(**params)
" Y# E# |8 ` J6 X6 B: u! hclassifier.fit(X_train, y_train)1 e! @% b7 _, k0 v% b' _6 h
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'): s# u+ o+ q& Y% {, ^' i+ Q
7 P& D A( X- i7 i* k5 F5 V9 g- S$ v
y_test_pred = classifier.predict(X_test)9 F- X! ?! W; M; S9 M7 ?
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')8 z2 T6 E& C* l
/ S5 j" I) j4 M8 l# g
###############################################% n ^! B- R/ b2 U' {$ R* `; n
# Evaluate classifier performance
* N1 g( d% e4 e D9 n9 N; F( f! m4 U
$ S0 Q% v0 O W, Ffrom sklearn.metrics import classification_report$ r- }5 z S% `5 r1 j$ ]7 B! i+ T& g
. \; W7 Z7 s3 s( G# k' U7 R
target_names = ['Class-' + str(int(i)) for i in set(y)]
# v$ R. I' H- _& @( M3 Sprint "\n" + "#"*30
% B% \' w7 `% a, | U, k+ o7 pprint "\nClassifier performance on training dataset\n". g& l+ W1 E+ Y( P( ~# M0 y
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
: n. |' Y" C9 |5 k- S3 R g6 oprint "#"*30 + "\n"$ f" ]6 B5 K9 K0 b8 S( D5 P
$ o; }$ w5 J! X/ U+ s
print "#"*30) e3 p; J: ]2 R5 t/ g& q9 v
print "\nClassification report on test dataset\n"
& O4 h' D! Y5 W. |5 F2 Bprint classification_report(y_test, y_test_pred, target_names=target_names)
- @4 g! c4 `% R' cprint "#"*30 + "\n"
3 k [) ]# I+ \6 }; r" a9 o" U
" K3 j% x; m$ `5 J9 E |
|