- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np$ C @! u5 j J$ I3 b o7 ~8 H
import matplotlib.pyplot as plt" a& d% F; b6 \" |! L/ @# \
7 h5 s+ U& x2 V
import utilities 8 d% F2 l$ d3 ?% u* x; h
L6 z! R5 n( ^7 k
# Load input data8 S% ^2 ], e* }5 C
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
2 q! c2 m. S) J2 C9 PX, y = utilities.load_data(input_file)
! w. V: M1 Q1 x! Z; y9 T/ y
: U9 ]. H4 R8 W7 L* X2 W1 \################################################ k5 J1 K- q8 C: l* Y. |' v4 F
# Separate the data into classes based on 'y'
+ f4 E% t5 B7 E' h8 V( ?! Cclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
; `: g- K4 j% Wclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])8 w; {, G6 G" w1 C
% n, {; u2 ]0 Q& ?* n9 Q# Plot the input data* s4 \0 ^( ?' O" f3 R3 y
plt.figure()9 o3 D0 W% V+ l
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
# S; T3 {' v$ T7 Wplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')) M1 G( [& F* m0 a6 o, V
plt.title('Input data')' S% Q, |! {" `" H" C
$ i% A# E& k* w! E1 G6 i4 h5 x###############################################
' ~2 ~# D1 u7 b# Train test split and SVM training
0 }. k, Y: L) P8 n% u9 h: F3 Yfrom sklearn import cross_validation$ x' y$ ] i) X4 }2 |
from sklearn.svm import SVC
5 O% G2 a( E3 S7 }8 r# n
; `" x" c1 K& ^ @1 \X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
0 t* {+ H* |3 c" w- l5 m3 J
* j9 e! P9 N- h8 A: `#params = {'kernel': 'linear'}
/ [" q( i4 D' h2 T" I#params = {'kernel': 'poly', 'degree': 3}
5 Q% O" o$ O, x4 w* Z( ^9 ^9 [$ Eparams = {'kernel': 'rbf'}
) T! q2 M @3 Tclassifier = SVC(**params)6 l) u: i2 M Y6 I2 `9 i3 Q5 c
classifier.fit(X_train, y_train)% z% q. T! W9 R* Y
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
6 N: w- {$ e5 b0 t4 {* E5 r% k2 p: o6 K7 ]$ n# ` h
y_test_pred = classifier.predict(X_test)
2 a4 M2 `$ E$ ?6 {; @2 eutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
3 D' H* w- k) r' v, Z& J, w1 Q- d; g; l5 C5 J# h
###############################################
0 Z# ~' \. r; t# L' K0 r# Evaluate classifier performance7 o& y* d( r; Y( g3 v
: D$ ~- J# M$ I9 X
from sklearn.metrics import classification_report
9 X. O- q% V: a
: b3 E; B5 A0 |/ u! f! P3 Itarget_names = ['Class-' + str(int(i)) for i in set(y)]: j) w9 E: ~. G" u5 s) u. n" O
print "\n" + "#"*30$ H5 o' X/ G+ p' I
print "\nClassifier performance on training dataset\n"
( }2 w( t$ ?, \- U' I$ Tprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
. g+ D$ n$ T$ Z" i- s' w6 jprint "#"*30 + "\n"1 Z' T9 I r, Y; f9 \- v. K
% k% l% t" `0 e3 ^% N; W% L3 T: w
print "#"*30! A' T; {& W. M6 y) Y
print "\nClassification report on test dataset\n"
8 C4 H8 L% z; k: U. m- t7 p5 Bprint classification_report(y_test, y_test_pred, target_names=target_names)
( h3 i; W1 F+ [; n, gprint "#"*30 + "\n", g8 `. D; E' S4 e
5 I, E8 p$ D; _) Q3 A
|
|