- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
![Rank: 6](static/image/common/star_level3.gif) ![Rank: 6](static/image/common/star_level2.gif)
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np1 F1 D1 H1 A+ V4 u- b4 n1 a9 ^; T
import matplotlib.pyplot as plt
: G. d; W6 _2 `! U f: N6 k: J, {6 v: w; F
import utilities
0 y: z: N' X0 z3 V8 g
& A6 I) d2 s3 A; C9 k3 ~% F# Load input data
# p- g; q% T+ y$ Cinput_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
" J$ t) n7 P- V/ WX, y = utilities.load_data(input_file)
. V( L" t0 _2 A3 h- |% b* S5 Z F+ v* m7 z1 I4 o, \+ P5 e: z
###############################################7 b2 P9 a4 h8 g H- ]
# Separate the data into classes based on 'y'* F- \* G& \3 t8 G" b: G# Z
class_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])
D3 H" n/ B: r, W' rclass_1 = np.array([X[i] for i in range(len(X)) if y[i]==1]); h6 V) X* U/ p* y6 k' A
) |1 J+ Z. k* O- _# Plot the input data
* C1 q; ]+ W1 D1 w5 H! g; J, U" lplt.figure()
- i& X7 C, k# F* U. Nplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')1 i0 i& L3 }8 P' m$ Y
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')& ?4 r' P; J% `! M+ V1 }! C
plt.title('Input data')
: }$ W* x1 @) V
o" k9 ^& ^- ^1 F###############################################9 g. F$ S5 T7 o* r- `0 u
# Train test split and SVM training
/ F0 C5 o2 w5 k0 ffrom sklearn import cross_validation$ T( A; N, c) C$ F3 ?% q! W/ B
from sklearn.svm import SVC2 V* X7 R# g: d
. h3 s& W8 P, U [# e* n7 l; jX_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)4 b$ b4 G4 F0 v7 Q& a x
4 a) E/ i. z6 y#params = {'kernel': 'linear'}
. u: m' R; r. L* b. `2 F#params = {'kernel': 'poly', 'degree': 3}
$ V3 `- O) Q6 [; a" `8 pparams = {'kernel': 'rbf'}
c# w7 U, t2 a$ X+ h3 @3 T9 s4 G. `( eclassifier = SVC(**params)7 b3 y1 z- h9 g) }* N, A6 Q8 U
classifier.fit(X_train, y_train)
( t6 ]9 y8 L; Q8 J) Nutilities.plot_classifier(classifier, X_train, y_train, 'Training dataset'); R1 X) L4 f. t( s( r
! ~+ V' U' [0 u- X/ Z
y_test_pred = classifier.predict(X_test)
* @8 y, t7 p' R1 z: H2 `utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
% ~! r* t" v. Y6 U& \9 M+ \8 C
/ A& c3 _) Q% a3 m###############################################
3 x+ [ Q$ ^9 w5 H6 W Z' u/ \# Evaluate classifier performance
. a5 Y, N- I$ N% t+ l! D6 Y4 l5 a
from sklearn.metrics import classification_report c7 \( x$ |7 z: Z
, @5 J# |1 S2 g! W1 w" Rtarget_names = ['Class-' + str(int(i)) for i in set(y)] P5 j) P ?4 o8 O) F5 m9 U; {$ U
print "\n" + "#"*30* `" E7 e+ ?" \4 A4 M- Q3 b9 D
print "\nClassifier performance on training dataset\n"# U' @7 \0 k, E- [
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)4 M7 j- R/ l+ R) i" S3 ^( O
print "#"*30 + "\n"
+ I) Y% @& S/ P+ w) l
3 J# h9 \( \, `: B0 u: D; Y. ]print "#"*30
& i1 m2 [9 R' C: U7 {& B8 [print "\nClassification report on test dataset\n"
4 L7 u, t) A" aprint classification_report(y_test, y_test_pred, target_names=target_names)
$ M8 Z( G/ i+ ]* R$ J' \0 qprint "#"*30 + "\n"
% e& R5 v% i Y. _* M S" F A5 ]* j3 D0 g: U
|
|