- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np" p$ `* w3 A8 z5 z; v
import matplotlib.pyplot as plt9 d8 i8 C2 k3 M4 I, U
% s$ n) T8 W1 M4 C& Q, q3 S
import utilities 8 |0 M8 M4 W( I
7 N: J% o3 Z% g- N2 j+ B, F# Load input data
1 D: O# M. t: f$ T0 Winput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
9 Y0 c9 M5 I7 h: IX, y = utilities.load_data(input_file)
3 U% _ z/ j% } `. i
2 X( l3 ?9 H* ~1 Y: F+ v$ K, m& g###############################################
) _7 ?- I& G! l' t+ A: Y% ^# Separate the data into classes based on 'y'" F' O/ L( w( R: Y
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
0 C1 |) M4 t) \' jclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
: |9 B, p! F3 B0 j1 [- y- @) e9 U4 H3 H% X, e
# Plot the input data" g: K4 p4 [7 s% A4 l/ Z
plt.figure()8 }% S' a5 `3 j
plt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')+ b8 x9 p* A: ?# W
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')
( k! R% m' H: {# h c# F: h" dplt.title('Input data')1 s: ]* l' G) a. N& z
: t; G+ M. a% X! Z
###############################################
& r- C5 r. q& I- y: u5 W# Train test split and SVM training" H8 `0 n1 q4 p' Y5 h6 c
from sklearn import cross_validation
9 @9 T2 j" `0 N3 O9 b1 ~from sklearn.svm import SVC. w; q+ `( D' _6 ~8 M
$ c( b# O. Y' [; ?2 l
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)
) N5 ^% c% O; S; f
) [$ h+ m _$ a6 I#params = {'kernel': 'linear'}
" P3 o' h( e: q# k3 p1 q3 y$ z#params = {'kernel': 'poly', 'degree': 3}$ m) D: @# ]3 Y# j8 {2 c4 a( s" z
params = {'kernel': 'rbf'}3 r5 }+ |. a( {" N4 }: K
classifier = SVC(**params)
7 {; V' J3 P& Uclassifier.fit(X_train, y_train)
0 v& Z) W+ {* R' ~. Kutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')8 K1 v" w6 f3 B
, }# g! m) i' V2 p2 s4 m6 q+ N/ C4 U
y_test_pred = classifier.predict(X_test)' {- n( b5 o* C
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')1 b K+ H. o: i/ }1 p9 u# F% n( D
9 M, l. z$ P% \: }###############################################
' ^4 z' x" X( Y# Evaluate classifier performance
, {6 Z$ a; D4 r; |/ f. d; G. z+ K& @3 M. Z3 v K
from sklearn.metrics import classification_report
' n L- B/ p: Z" P7 L: G8 D, e2 \
9 g1 z0 j2 H: O! `& |( L# w& \3 c! ytarget_names = ['Class-' + str(int(i)) for i in set(y)]1 Q+ f3 |; L. i
print "\n" + "#"*30- I& V1 M& a: \! v+ T# w* L3 b
print "\nClassifier performance on training dataset\n"
* H& Q: u0 r+ a4 L& [& i. W6 `print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
4 e4 Q$ c% i r- h+ bprint "#"*30 + "\n"
' O9 |+ H. B7 L- x* J3 B
0 i* ^8 C1 u$ Oprint "#"*30
# q) }. J0 s. V: `& @2 J. F5 Q3 Eprint "\nClassification report on test dataset\n"
5 m# }" X. _0 eprint classification_report(y_test, y_test_pred, target_names=target_names)( M6 Z8 a" ]: T( N% B3 J3 c
print "#"*30 + "\n"6 ~ g. Y; s: ]! O8 Z
8 S2 M) W5 K4 {8 H6 g+ u; j! M
|
|