- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np) ^$ q' F9 M/ s+ W1 @2 @0 n
import matplotlib.pyplot as plt% ^- P/ C) R/ ~# B
0 O0 w2 y" q# r% \1 D
import utilities
9 Q! u2 L8 _+ R0 }, Y
! X' x4 C" Q) v# Load input data
7 v. @) w( y" K$ n. L/ D/ \input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
2 a) l! v# U1 Z: d$ G) E6 `X, y = utilities.load_data(input_file)
+ J3 f; q" O7 n H6 |- b0 R" M" k: a7 s1 [/ d3 \1 i5 {1 K0 y. ]
###############################################
* F ?' m6 W6 g1 S- Y# Separate the data into classes based on 'y'0 g9 L2 r$ i0 c2 A) D7 _
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0]) W! G2 F! v, l
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])" p3 Z) D% o$ q% J4 l
, B$ n4 r4 h: e1 Q# Plot the input data
* Y8 |: i: q+ v3 x' [% h! Yplt.figure()
; }, w) L* G! n$ jplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 h, ^6 B. T& @, [$ c1 s
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')8 B) v3 }* H; V7 U2 s% H
plt.title('Input data')
7 [! R. d5 w( e
( i0 U1 |/ Y! g###############################################+ c- Z* g- ]' h' n
# Train test split and SVM training) S" o( k8 W/ s% T! I
from sklearn import cross_validation
0 h1 M6 _8 ~4 ~& K4 h2 {from sklearn.svm import SVC
+ P* W' y, I4 ]5 U; P8 U( _- j8 ~8 ~/ n' Y7 Z S
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5), {5 W4 B% v* l7 W/ |! B( D: W
3 i* C+ j0 t( h {; H7 X8 b& g
#params = {'kernel': 'linear'}, T0 T$ |' B8 ^$ W) V
#params = {'kernel': 'poly', 'degree': 3}
' |( n; I! ~1 w1 L; w, y4 Dparams = {'kernel': 'rbf'}
1 j* v1 v9 I. h* X K% Cclassifier = SVC(**params)
, a3 W& { L2 K( Q5 D* E, {: Rclassifier.fit(X_train, y_train)$ {5 b! K, C2 _1 X& v
utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
% }6 y' \: R7 n( }, o
5 x: O1 n7 h5 ~# ]: B" cy_test_pred = classifier.predict(X_test)
6 `( K% V; C. \- j: lutilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')& x% k" v3 G7 R( S
% y0 V/ ?0 |2 ~0 O! V
###############################################
, I. N: o" n% Q, a2 h! H) F# Evaluate classifier performance. I, p* V! u( l& ?
2 i' c% y. e1 O% x" u# l0 F5 z3 d* Gfrom sklearn.metrics import classification_report8 y9 u$ ^2 W4 s
' m8 b5 q$ C" {- ~
target_names = ['Class-' + str(int(i)) for i in set(y)]" c1 G$ u* ^0 L9 q9 ]( k) i) D
print "\n" + "#"*30
# u7 @9 \8 r( Aprint "\nClassifier performance on training dataset\n"
* {% a' d5 m% i6 G# D w; @; ?; Qprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)/ H/ l7 F! I$ \: h! w5 }5 a [
print "#"*30 + "\n"9 R+ p1 ~9 s9 _3 U* @4 U6 w
q% h/ J4 `, \' R7 t8 i2 rprint "#"*30
; q$ M3 r$ ?$ zprint "\nClassification report on test dataset\n"
! Y6 x1 V3 q/ p$ Eprint classification_report(y_test, y_test_pred, target_names=target_names)
- T5 W( u, U- [+ k9 C6 F. kprint "#"*30 + "\n" @. p% q& h4 |% f8 E7 _
" E: W; A% T; h6 M# y |
|