0
点赞
收藏
分享

微信扫一扫

count(*) over (partition by ……)用法详解

引言

正文

01-线性不可分下的支持向量机最大边界超平面

#本章需导入的模块
import numpy as np
from numpy import random
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import warnings
warnings.filterwarnings(action = 'ignore')
%matplotlib inline
plt.rcParams['font.sans-serif']=['SimHei'] #解决中文显示乱码问题
plt.rcParams['axes.unicode_minus']=False
from sklearn.datasets import make_classification,make_circles,make_regression
from sklearn.model_selection import train_test_split,KFold
import sklearn.neural_network as net
import sklearn.linear_model as LM
from scipy.stats import multivariate_normal
from sklearn.metrics import r2_score,mean_squared_error,classification_report
from sklearn import svm
import os

N=100
X,Y=make_circles(n_samples=N,noise=0.2,factor=0.5,random_state=123)
X_train, X_test, Y_train, Y_test = train_test_split(X,Y,train_size=0.85, random_state=1)
X1,X2= np.meshgrid(np.linspace(X_train[:,0].min(),X_train[:,0].max(),500),np.linspace(X_train[:,1].min(),X_train[:,1].max(),500))
X0=np.hstack((X1.reshape(len(X1)*len(X2),1),X2.reshape(len(X1)*len(X2),1)))
fig,axes=plt.subplots(nrows=2,ncols=2,figsize=(15,12))
for C,ker,H,L in [(1,'poly',0,0),(1,'rbf',0,1),(1000,'poly',1,0),(1000,'rbf',1,1)]:
modelSVC=svm.SVC(kernel=ker,random_state=123,C=C)
modelSVC.fit(X_train,Y_train)
Y0=modelSVC.predict(X0)
axes[H,L].scatter(X0[np.where(Y0==1),0],X0[np.where(Y0==1),1],c='lightgray')
axes[H,L].scatter(X0[np.where(Y0==0),0],X0[np.where(Y0==0),1],c='mistyrose')
for k,m in [(1,'^'),(0,'o')]:
axes[H,L].scatter(X_train[Y_train==k,0],X_train[Y_train==k,1],marker=m,s=40)
axes[H,L].scatter(X_test[Y_test==k,0],X_test[Y_test==k,1],marker=m,s=40,c='r',edgecolors='g')

axes[H,L].scatter(modelSVC.support_vectors_[:,0],modelSVC.support_vectors_[:,1],marker='o',c='b',s=120,alpha=0.3)
axes[H,L].set_xlabel("X1")
axes[H,L].set_ylabel("X2")
axes[H,L].set_title("线性不可分下的支持向量机最大边界超平面(C=%.1f,Kernal=%s,测试误差=%.2f)"%(C,ker,1-modelSVC.score(X_test,Y_test)))
axes[H,L].grid(True,linestyle='-.')
plt.savefig("../4.png", dpi=500)

运行结果如下图所示: 

02-100个样本观测点的SVR和线性回归

#本章需导入的模块
import numpy as np
from numpy import random
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import warnings
warnings.filterwarnings(action = 'ignore')
%matplotlib inline
plt.rcParams['font.sans-serif']=['SimHei'] #解决中文显示乱码问题
plt.rcParams['axes.unicode_minus']=False
from sklearn.datasets import make_classification,make_circles,make_regression
from sklearn.model_selection import train_test_split,KFold
import sklearn.neural_network as net
import sklearn.linear_model as LM
from scipy.stats import multivariate_normal
from sklearn.metrics import r2_score,mean_squared_error,classification_report
from sklearn import svm
import os

N=100
X,Y=make_regression(n_samples=N,n_features=1,random_state=123,noise=50,bias=0)
X_train, X_test, Y_train, Y_test = train_test_split(X,Y,train_size=0.85, random_state=123)
plt.scatter(X_train,Y_train,s=20)
plt.scatter(X_test,Y_test,s=20,marker='*')
plt.title("100个样本观测点的SVR和线性回归")
plt.xlabel("X")
plt.ylabel("Y")
plt.savefig("../3.png", dpi=500)
modelLM=LM.LinearRegression()
modelLM.fit(X_train,Y_train)
X[:,0].sort()
fig,axes=plt.subplots(nrows=2,ncols=2,figsize=(12,9))
for C,E,H,L in [(1,0.1,0,0),(1,100,0,1),(100,0.1,1,0),(10000,0.01,1,1)]:
modelSVR=svm.SVR(C=C,epsilon=E)
modelSVR.fit(X_train,Y_train)
axes[H,L].scatter(X_train,Y_train,s=20)
axes[H,L].scatter(X_test,Y_test,s=20,marker='*')
axes[H,L].scatter(X[modelSVR.support_],Y[modelSVR.support_],marker='o',c='b',s=120,alpha=0.2)
axes[H,L].plot(X,modelSVR.predict(X),linestyle='-',label="SVR")
axes[H,L].plot(X,modelLM.predict(X),linestyle='--',label="线性回归",linewidth=1)
axes[H,L].legend()
ytrain=modelSVR.predict(X_train)
ytest=modelSVR.predict(X_test)
axes[H,L].set_title("SVR(C=%d,epsilon=%.2f,训练MSE=%.2f,测试MSE=%.2f)"%(C,E,mean_squared_error(Y_train,ytrain),
mean_squared_error(Y_test,ytest)))
axes[H,L].set_xlabel("X")
axes[H,L].set_ylabel("Y")
axes[H,L].grid(True,linestyle='-.')

plt.savefig("../4.png", dpi=500)

运行结果如下图所示: 

03-老人的活动进行统计

#本章需导入的模块
import numpy as np
from numpy import random
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import warnings
warnings.filterwarnings(action = 'ignore')
%matplotlib inline
plt.rcParams['font.sans-serif']=['SimHei'] #解决中文显示乱码问题
plt.rcParams['axes.unicode_minus']=False
from sklearn.datasets import make_classification,make_circles,make_regression
from sklearn.model_selection import train_test_split,KFold
import sklearn.neural_network as net
import sklearn.linear_model as LM
from scipy.stats import multivariate_normal
from sklearn.metrics import r2_score,mean_squared_error,classification_report
from sklearn import svm
import os

#path='C:/Users/xuewe/《Python机器学习:数据建模与分析》代码/健康物联网/'
path='D:/代码与数据/健康物联网/'
#print(os.path.dirname(path)) 返回文件路径
#cwd=os.getcwd() 得到当前目录
#os.path.join(dirname, filename)
#os.walk(path)

filenames=os.listdir(path=path)
data=pd.DataFrame(columns=['TimeStamp', 'frontal', 'vertical', 'lateral', 'SensorID', 'RSSI','Phase', 'Frequency', 'Activity', 'ID', 'Gender'])
i=1
for filename in filenames:
tmp=pd.read_csv(path+filename)
tmp['ID']=i
tmp['Gender']=filename[-5]
i+=1
data=data.append(tmp)

label=['坐在床上','坐在椅子上','躺在床上','行走']
countskey=data['Activity'].value_counts().index
plt.bar(np.unique(data['Activity']),data['Activity'].value_counts())
plt.xticks([1,2,3,4],[label[countskey[0]-1],label[countskey[1]-1],label[countskey[2]-1],label[countskey[3]-1]])
plt.title("老人的体位状态")
plt.show()
data['ActivityN']=data['Activity'].map({3:0,1:0,2:1,4:1})
plt.bar([1,2],data['ActivityN'].value_counts())
plt.xticks([1,2],['安全体位','风险体位'])
plt.title("老人的体位状态")

plt.savefig("../4.png", dpi=500)
plt.show()

Y=data['Activity'].astype(int)
X=data[['frontal', 'vertical', 'lateral', 'RSSI']]
X_train, X_test, Y_train, Y_test = train_test_split(X,Y,train_size=0.70, random_state=1)
for ker in ['poly','rbf']:
modelSVC=svm.SVC(kernel=ker,random_state=123)
modelSVC.fit(X_train,Y_train)
print("测试误差=%f(%s)"%(1-modelSVC.score(X_test,Y_test),ker))
print(classification_report(Y_test,modelSVC.predict(X_test)))

04-基于线性回归以及支持向量机对汽车MPG与自重进行回归预测

#本章需导入的模块
import numpy as np
from numpy import random,math
import pandas as pd
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import warnings
warnings.filterwarnings(action = 'ignore')
%matplotlib inline
plt.rcParams['font.sans-serif']=['SimHei'] #解决中文显示乱码问题
plt.rcParams['axes.unicode_minus']=False
from sklearn.datasets import make_classification,make_circles,make_regression
from sklearn.model_selection import train_test_split,KFold
import sklearn.neural_network as net
import sklearn.linear_model as LM
from scipy.stats import multivariate_normal
from sklearn.metrics import r2_score,mean_squared_error,classification_report
from sklearn import svm
import os

data=pd.read_csv('汽车MPG.csv')
data=data.dropna()
data.head()

X=data[['weight','horsepower']]
X0=[[X.max()[0],X.max()[1]]]
Y0=data['MPG'].mean()
modelLM=LM.LinearRegression()
modelSVR=svm.SVR(C=1000,epsilon=0.01)
yhat1=[]
yhat2=[]
fig,axes=plt.subplots(nrows=2,ncols=2,figsize=(12,10))
kf = KFold(n_splits=2,shuffle=True,random_state=123) # K折交叉验证法
H=0
for train_index, test_index in kf.split(X):
sample=data.iloc[train_index,]
X=sample[['weight','horsepower']]
#Y=sample['MPG'].map(lambda x:math.log(x))
Y=sample['MPG']
modelLM.fit(X,Y)
modelSVR.fit(X,Y)
yhat1.append(modelLM.predict(X0))
yhat2.append(modelSVR.predict(X0))

axes[H,0].scatter(sample['weight'],sample['MPG'],s=20,label="训练点")
axes[H,0].set_title("MPG与自重(训练集%d)"%(H+1))
axes[H,0].set_xlabel("自重")
axes[H,0].set_ylabel("MPG")
axes[H,0].scatter(X0[0][0],Y0,c='r',s=40,marker='*',label="新数据点")
axes[H,0].legend()

axes[H,1].scatter(sample['weight'],modelLM.predict(X),s=15,marker='*',c='orange',label="线性回归预测")
axes[H,1].scatter(sample['weight'],modelSVR.predict(X),s=15,marker='o',c='blue',label="SVR预测")
axes[H,1].set_title("MPG与自重(训练集%d)"%(H+1))
axes[H,1].set_xlabel("自重")
axes[H,1].set_ylabel("MPG")
axes[H,1].scatter(X0[0][0],modelLM.predict(X0),c='r',s=40,marker='<',label="新数据点的线性回归预测")
axes[H,1].scatter(X0[0][0],modelSVR.predict(X0),c='r',s=40,marker='>',label="新数据点的SVR预测")
axes[H,1].legend()
H+=1
plt.savefig("../4.png", dpi=500)

总结

举报

相关推荐

0 条评论