- 金錢
- 45
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 553
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 45
- 威望
- 3183
- 主題
- 0
|
import numpy as np
1 V8 v, {8 m; rimport matplotlib.pyplot as plt
+ X6 O1 u6 c8 ~+ L/ G: H" O! }1 }, ^! j% {) M
import utilities ' L; B6 c: l9 q: v) c
, |2 @9 v4 g0 c# k2 j. \# Load input data
# c! e: b2 \: g9 e/ H' Z- Einput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
4 m8 `9 A3 e4 P' t; mX, y = utilities.load_data(input_file)
* G; \* v9 w7 Y' u" H' |% l4 y/ g* C/ U. W7 ^
###############################################
+ \, H9 x0 v- R# Separate the data into classes based on 'y'
- c+ [1 p5 [8 w+ n9 nclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
' {1 O: _8 O. H+ G4 vclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
, `: x9 v' x. h4 D$ n2 W
% t$ v$ c' _7 R" T8 ]$ j, z# Plot the input data4 o* Z0 t' H- q& T
plt.figure()( H$ B- ] o3 @) T: c
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')7 w% X3 `7 n M7 K* x1 _
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
3 h. u: h- f/ f4 x! Q! \# ^plt.title('Input data')
, h; ^6 K6 x$ _: D. S& i+ x$ e. C9 @, E4 ]! `' v" e
###############################################" V- P6 V/ }$ y" d) C3 s
# Train test split and SVM training7 u O- A4 `. k
from sklearn import cross_validation
. S5 K1 |; B& Qfrom sklearn.svm import SVC- q0 \: ^& t0 F5 R/ W3 r
% y* L1 o/ C1 g0 N! s0 [
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)# G2 O% s+ p2 p( I1 e8 q9 ]
, V- [! S; m" {) @) Z$ s
#params = {'kernel': 'linear'}
4 h- C( l7 _& j& t1 a* L3 l#params = {'kernel': 'poly', 'degree': 3}3 [4 J" N4 A. F3 \* Y+ F4 r. s* N! C k
params = {'kernel': 'rbf'}+ E6 n/ ^. c. ^5 Y0 H5 i5 T" X
classifier = SVC(**params)
! u \) G; j& g" \classifier.fit(X_train, y_train)
8 B; R9 I' l( L# P1 t. i- zutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')
9 `% h$ U2 w& |( a# s, Y
! p# r& Y8 F9 B/ a- N3 r7 `y_test_pred = classifier.predict(X_test) d C9 U. K: z7 K
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')- U l8 Q0 {8 Y/ ^4 F* d' f
" L5 Z' N7 G- f############################################### w3 e$ d; N& J8 C" \
# Evaluate classifier performance
( {3 m5 f3 }/ l+ H% Y+ y+ h* B
* v0 R$ A; U3 @: n* [ j/ bfrom sklearn.metrics import classification_report/ w$ V% g; J- |; A& g0 a# o5 d: A
3 N, C! g \& X T
target_names = ['Class-' + str(int(i)) for i in set(y)]
. @! ~( s& W& M x1 n8 U' u! Rprint "\n" + "#"*305 f4 ~5 w9 P' G5 b# L
print "\nClassifier performance on training dataset\n") A1 Q$ @% n% y8 J
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
' _& e" s3 s+ |2 R J0 pprint "#"*30 + "\n") T. [3 r/ m) |& r1 M: `" ~2 L
, ]5 S* \8 u) x3 R2 `print "#"*30$ I- n! R, }4 h" U& [# ^
print "\nClassification report on test dataset\n"
' u* {' Y4 C4 W* g* N0 lprint classification_report(y_test, y_test_pred, target_names=target_names)
) ^) l, G" D1 Q, f$ yprint "#"*30 + "\n"
" \4 V8 ~8 w! A4 m! ^7 ?. b9 Y5 K) F9 n2 a9 _+ z# n
|
|