- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np2 l% J: R( b& T; G
import matplotlib.pyplot as plt4 { }! Q" u- M$ e; D
# \6 i' g0 _+ g, F, U( k
import utilities % L7 [1 I! {& R- t! I# x
; c+ ~5 \% { L- S# J) `% @/ t& ?# Load input data
7 {! K( Z1 Y) J) _# g8 |5 vinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'/ ^5 K7 b& h( [% b' c# ?
X, y = utilities.load_data(input_file)
1 S7 c ^0 d; g1 F) _; \; f. r3 K. q, N
###############################################5 ]$ H* A* f) _6 X
# Separate the data into classes based on 'y'
* W0 K! e! n& M0 C! P3 {9 V8 ^class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
' a( ` L7 N: \( B8 {4 b Aclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])% U: A/ U* N* ? e N# j8 b, p5 U
8 B+ E) ^' K) j6 Y, }: D- M# Plot the input data3 {/ E+ g$ r) u4 y
plt.figure()7 Q/ k& N/ |. T" r, e O- X( L. D& k; ]
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')
* H; L* u6 d: M, |/ Eplt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')9 r/ T8 g9 Y+ _* z+ j1 C# ~0 v
plt.title('Input data')$ z9 O" f- D9 S9 K
P& e7 M& J6 q$ ?1 W* J4 ]: G
###############################################! \# d+ U. q4 d: W, J
# Train test split and SVM training
4 V; W z- r: M' g ]from sklearn import cross_validation2 E2 w, L: p. a3 f% Z5 S8 r& ~
from sklearn.svm import SVC
" w) F- }6 B/ O0 [5 U; | j# }) j0 H$ ~+ W, N" s
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
! i A2 T& ]6 A; K- z
# c, W/ W- T5 F/ N9 c- X#params = {'kernel': 'linear'}. e2 c' x) z# ?7 l# N8 _
#params = {'kernel': 'poly', 'degree': 3}# E' }: { p" P! y
params = {'kernel': 'rbf'}
7 K* i- u7 f7 M d7 f: lclassifier = SVC(**params)
+ n j2 ^( E0 E/ N2 Vclassifier.fit(X_train, y_train)
% ~3 B7 ~" e/ e5 M1 rutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')$ B# |1 r( j: _0 Q; }
) _0 D( ~$ S. M9 cy_test_pred = classifier.predict(X_test)! } `2 m& s; f0 _, ]3 U+ H- M
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
8 @ t- r+ c. K2 ?' ?' i: ]* A X4 e w8 z
+ a. V7 ~( y2 q; N" c3 N9 c% L: g###############################################0 f# W+ v! S+ p% a& u
# Evaluate classifier performance3 s1 ? q$ q! _
% \* s& x/ M vfrom sklearn.metrics import classification_report
3 W: w. e- C% D* V" S- v% n: L( p: s- X; A' N% u
target_names = ['Class-' + str(int(i)) for i in set(y)]& g9 q9 `/ l+ e. f
print "\n" + "#"*30
. S) ]$ p/ T. y& Y* hprint "\nClassifier performance on training dataset\n"
9 _$ }3 Y" c8 L- d! Iprint classification_report(y_train, classifier.predict(X_train), target_names=target_names)
# S& j: {, m2 C- p+ Sprint "#"*30 + "\n"" \3 Q3 q' ]0 o1 {! C' O
; t; X& D( ~' e- d% S* y- Lprint "#"*30
- X; k' q W2 G8 F5 Q A2 aprint "\nClassification report on test dataset\n"
" r8 Q, {3 ?- D/ ]print classification_report(y_test, y_test_pred, target_names=target_names): v2 `$ g. ~) e$ o
print "#"*30 + "\n"1 ]* c% }1 z- m, Q* K8 Q1 |$ X
) t& x' t: \ [8 Q |
|