- 金錢
- 46
- 威望
- 3183
- 貢獻值
- 0
- 推廣值
- 0
- 性別
- 保密
- 在線時間
- 38 小時
- 最後登錄
- 2024-2-25
- 主題
- 0
- 精華
- 0
- 閱讀權限
- 70
- 註冊時間
- 2012-3-17
- 帖子
- 556
 
該用戶從未簽到 - 推廣值
- 0
- 貢獻值
- 0
- 金錢
- 46
- 威望
- 3183
- 主題
- 0
|
import numpy as np
6 Q, k& u0 E: [- o. Nimport matplotlib.pyplot as plt
: ?8 v b" Q' Y7 o9 t2 A& c c7 { B9 S7 p5 I, N" O# J9 A5 U) M+ Z
import utilities
3 a" T! E* ~8 V6 @, {& r6 ?8 Q0 _* P, {
# Load input data/ i. O! F6 }0 N
input_file = 'D:\\1.Modeling material\\Py_Study\\2.code_model\\Python-Machine-Learning-Cookbook\\Python-Machine-Learning-Cookbook-master\\Chapter03\\data_multivar.txt'
( J+ z. i. Q" z! Z) B$ GX, y = utilities.load_data(input_file)& R4 v: S' c" a' ~- x
8 G7 ~! b9 F+ E$ M! \$ A( p2 j% y/ k###############################################
2 _2 y: |, f- P7 I |6 Q4 ^* d# Separate the data into classes based on 'y'
! I0 J8 p7 Y% C) O6 Iclass_0 = np.array([X[i] for i in range(len(X)) if y[i]==0])4 {, z M- Z7 f3 N4 v+ ?
class_1 = np.array([X[i] for i in range(len(X)) if y[i]==1])
# y E. d2 V0 U% H0 ]$ {: P5 I( J! h7 k; i! W# X
# Plot the input data- F. q( f# O$ ^7 D( z
plt.figure()
: G, z' c/ F1 Qplt.scatter(class_0[:,0], class_0[:,1], facecolors='black', edgecolors='black', marker='s')6 W, d, V+ E2 c) U$ @
plt.scatter(class_1[:,0], class_1[:,1], facecolors='None', edgecolors='black', marker='s')! ]- F8 }& g* G+ P. f
plt.title('Input data')
% Q. W9 q9 u* G" ^9 n, ^
: N( O1 h2 Z! o' m! I( E, z) y###############################################
) c' v9 J) s- H# Train test split and SVM training
; x# I2 P: k! b) X+ f( Kfrom sklearn import cross_validation
/ r: \% X T0 W+ ]from sklearn.svm import SVC
, v) @, m& H9 E- ^& o9 W2 D- v/ A r
X_train, X_test, y_train, y_test = cross_validation.train_test_split(X, y, test_size=0.25, random_state=5)1 M+ E9 p* @* i0 D
4 R' F! [) \% |2 c- m#params = {'kernel': 'linear'}* j) E f1 k4 N* a
#params = {'kernel': 'poly', 'degree': 3}: Z* V5 ?& h2 c: Z$ l I6 s/ r
params = {'kernel': 'rbf'}
& c. [0 s" }. p& X1 f: \classifier = SVC(**params)
" w+ h9 \4 N; t$ Q: W6 u1 z, Uclassifier.fit(X_train, y_train)
% H I6 M9 |( H7 G; C' }utilities.plot_classifier(classifier, X_train, y_train, 'Training dataset')& k& c- f1 ?* f3 i7 H6 L
) A& `/ e. i, f: ^y_test_pred = classifier.predict(X_test). b+ T( T. v/ `8 k7 D, l2 c7 i
utilities.plot_classifier(classifier, X_test, y_test, 'Test dataset')
2 {7 ]: o! `% s% _) i: d
$ C8 k+ `: E% j+ p###############################################/ Q4 s; w" v. v& r% I/ o0 [% j
# Evaluate classifier performance
! K& u4 |5 |: K5 q1 K6 V* h( q* u U
from sklearn.metrics import classification_report0 q4 k4 K' B3 F9 }
- ]8 y; H H/ G" U- R
target_names = ['Class-' + str(int(i)) for i in set(y)]6 t+ ? l& A) X r
print "\n" + "#"*30) S3 |- L9 D F
print "\nClassifier performance on training dataset\n"& R9 ]; G. Q- c4 b5 h1 K: W
print classification_report(y_train, classifier.predict(X_train), target_names=target_names)
- n9 z c* g. ?; ~: Bprint "#"*30 + "\n") t$ c; @' T# M7 m( n0 C9 H8 W
$ A- [3 t3 ?, X) r) ^( {3 P
print "#"*30
! `, \7 ~# s( E U9 Dprint "\nClassification report on test dataset\n"! j# }( B( Q. ?4 c0 C x
print classification_report(y_test, y_test_pred, target_names=target_names)
. ^9 n* r! D) v+ a1 \print "#"*30 + "\n"
, W* S( ]4 _4 x
4 i/ W/ F1 m& b( o0 S |
|