吴恩达机器学习课后作业-05偏差与方差

ops/2024/10/18 14:15:50/

偏差与方差

  • 题目
  • 欠拟合
  • 改进欠拟合
    • 影响偏差和方差因素
    • 训练集拟合情况
    • 训练集和测试集代价函数
    • 选择最优lamda
  • 整体代码

在这里插入图片描述

训练集:训练模型

·验证集︰模型选择,模型的最终优化

·测试集:利用训练好的模型测试其泛化能力


#训练集
x_train,y_train = data['X'],data[ 'y']#验证集
x_val,y_val = data['Xval'],data[ 'yval']
x_val.shape,y_val.shape#测试集
x_test,y_test = data['Xtest'],data[ 'ytest']
x_test.shape,y_test.shape

题目

在这里插入图片描述
在这里插入图片描述
在这里插入图片描述

def reg_cost(theta,x,y,lamda):cost=np.sum(np.power(x@theta-y.flatten(),2))reg=theta[1:]@theta[1:]*lamdareturn (cost+reg)/(2*len(x))def reg_gradient(theta,x,y,lamda):grad=(x@theta-y.flatten())@xreg=lamda*thetareg[0]=0return (grad+reg)/(len(x))def train_mode(x,y,lamda):theta=np.ones(x.shape[1])res=minimize(fun=reg_cost,x0=theta,args=(x,y,lamda),method='TNC',jac=reg_gradient)return res.x

欠拟合

在这里插入图片描述
在这里插入图片描述
在这里插入图片描述


"""
训练样本从1开始递增进行训练
比较训练集和验证集上的损失函数的变化情况
"""
def plot_learning_curve(x_train,y_train,x_val,y_val,lamda):x= range(1,len(x_train)+1)training_cost =[]cv_cost =[]for i in x:res = train_mode(x_train[:i,:],y_train[:i,:],lamda)training_cost_i = reg_cost(res,x_train[:i,:],y_train[:i,:],lamda)cv_cost_i = reg_cost(res,x_val,y_val,lamda)training_cost.append(training_cost_i)cv_cost.append(cv_cost_i)plt.plot(x,training_cost,label = 'training cost')plt.plot(x,cv_cost,label = 'cv cost')plt.legend()plt.xlabel("number of training examples")plt.ylabel("error")plt.show()

改进欠拟合

影响偏差和方差因素

在这里插入图片描述
在这里插入图片描述


"""
任务:构造多项式特征,进行多项式回归
"""def poly_feature(x, power):for i in range(2, power + 1):x= np.insert(x, x.shape[1], np.power(x[:, 1], i), axis = 1)return x
"""
归一化
"""
def get_means_stds(x):means = np.mean(x, axis=0)stds = np.std(x, axis=0)return means, stds
def feature_normalize(x,means,stds):x [:,1:]=(x[:,1:] - means[1:,])/stds[1:]return xpower=6
x_train_poly=poly_feature(x_train,power)
x_val_poly=poly_feature(x_val,power)
x_test_poly=poly_feature(x_test,power)
train_means,train_stds=get_means_stds(x_train_poly)
x_train_norm=feature_normalize(x_train_poly,train_means,train_stds)
x_val_norm=feature_normalize(x_val_poly,train_means,train_stds)
x_test_norm=feature_normalize(x_test_poly,train_means,train_stds)
theta_fit=train_mode(x_train_norm,y_train,lamda =0)

训练集拟合情况

"""
训练集
绘制数据集和拟合函数
"""
def plot_poly_fit():plot_data()x = np.linspace(-60,60,100)xx= x.reshape(100,1)xx= np.insert(xx,0,1,axis=1)xx= poly_feature(xx,power)xx= feature_normalize(xx,train_means,train_stds)plt.plot(x,xx@theta_fit,'r--')

在这里插入图片描述

训练集和测试集代价函数

plot_learning_curve(x_train_norm,y_train, x_val_norm, y_val, lamda=0)
此时lamda=0没有加入正则化

在这里插入图片描述
可以看出是高方差,过拟合了,此时lamda=0没有加入正则化
加入正则化如下

plot_learning_curve(x_train_norm,y_train, x_val_norm, y_val, lamda=1)

在这里插入图片描述
此时训练集误差增大,验证集误差减小了
但是lamda不能过大了,如下

plot_learning_curve(x_train_norm,y_train, x_val_norm, y_val, lamda=100)

在这里插入图片描述

选择最优lamda

lamdas=[0,0.001,0.003,0.01,0.03,0.1,0.3,1,2,3,10]
training_cost =[]
cv_cost =[]
for lamda in lamdas:res = train_mode(x_train_norm,y_train,lamda)tc = reg_cost(res,x_train_norm,y_train,lamda=0)cv = reg_cost(res,x_val_norm,y_val,lamda=0)training_cost.append(tc)cv_cost.append(cv)
plt.plot(lamdas,training_cost,label="training cost")
plt.plot(lamdas,cv_cost,label="cv cos")
plt.legend()
plt.show()

在这里插入图片描述

l=lamdas[np.argmin(cv_cost)]#寻找最优lamda
print(l)
res = train_mode(x_train_norm,y_train,lamda =l)
test_cost = reg_cost(res,x_test_norm,y_test,lamda = 0)
print(test_cost)

整体代码

import numpy as np
import matplotlib.pyplot as plt
import scipy.io as sio
from scipy.optimize import minimizedef plot_data():fig,ax = plt.subplots()ax.scatter(x_train[:,1],y_train)ax.set(xlabel = "change in water level(x)",ylabel = 'water flowing out og the dam(y)')def reg_cost(theta,x,y,lamda):cost=np.sum(np.power(x@theta-y.flatten(),2))reg=theta[1:]@theta[1:]*lamdareturn (cost+reg)/(2*len(x))
def reg_gradient(theta,x,y,lamda):grad=(x@theta-y.flatten())@xreg=lamda*thetareg[0]=0return (grad+reg)/(len(x))def train_mode(x,y,lamda):theta=np.ones(x.shape[1])res=minimize(fun=reg_cost,x0=theta,args=(x,y,lamda),method='TNC',jac=reg_gradient)return res.x"""
训练样本从1开始递增进行训练
比较训练集和验证集上的损失函数的变化情况
"""
def plot_learning_curve(x_train,y_train,x_val,y_val,lamda):x= range(1,len(x_train)+1)training_cost =[]cv_cost =[]for i in x:res = train_mode(x_train[:i,:],y_train[:i,:],lamda)training_cost_i = reg_cost(res,x_train[:i,:],y_train[:i,:],lamda)cv_cost_i = reg_cost(res,x_val,y_val,lamda)training_cost.append(training_cost_i)cv_cost.append(cv_cost_i)plt.plot(x,training_cost,label = 'training cost')plt.plot(x,cv_cost,label = 'cv cost')plt.legend()plt.xlabel("number of training examples")plt.ylabel("error")plt.show()"""
任务:构造多项式特征,进行多项式回归
"""def poly_feature(x, power):for i in range(2, power + 1):x= np.insert(x, x.shape[1], np.power(x[:, 1], i), axis = 1)return x
"""
归一化
"""
def get_means_stds(x):means = np.mean(x, axis=0)stds = np.std(x, axis=0)return means, stds
def feature_normalize(x,means,stds):x [:,1:]=(x[:,1:] - means[1:,])/stds[1:]return x
"""
训练集
绘制数据集和拟合函数
"""
def plot_poly_fit():plot_data()x = np.linspace(-60,60,100)xx= x.reshape(100,1)xx= np.insert(xx,0,1,axis=1)xx= poly_feature(xx,power)xx= feature_normalize(xx,train_means,train_stds)plt.plot(x,xx@theta_fit,'r--')data=sio.loadmat("E:/学习/研究生阶段/python-learning/吴恩达机器学习课后作业/code/ex5-bias vs variance/ex5data1.mat")#训练集
x_train,y_train = data['X'],data[ 'y']#验证集
x_val,y_val = data['Xval'],data[ 'yval']
x_val.shape,y_val.shape#测试集
x_test,y_test = data['Xtest'],data[ 'ytest']
x_test.shape,y_test.shape
#
x_train = np.insert(x_train,0,1,axis=1)
x_val = np.insert(x_val,0,1,axis=1)
x_test = np.insert(x_test,0,1,axis=1)
# plot_data()
theta=np.ones(x_train.shape[1])
lamda=1
# print(reg_cost(theta,x_train,y_train,lamda))
#print(reg_gradient(theta,x_train,y_train,lamda))
# theta_final=train_mode(x_train,y_train,lamda=0)
# plot_data()
# plt.plot(x_train[:,1],x_train@theta_final,c='r')
# plt.show()#plot_learning_curve(x_train,y_train, x_val, y_val, lamda)
power=6
x_train_poly=poly_feature(x_train,power)
x_val_poly=poly_feature(x_val,power)
x_test_poly=poly_feature(x_test,power)
train_means,train_stds=get_means_stds(x_train_poly)
x_train_norm=feature_normalize(x_train_poly,train_means,train_stds)
x_val_norm=feature_normalize(x_val_poly,train_means,train_stds)
x_test_norm=feature_normalize(x_test_poly,train_means,train_stds)
theta_fit=train_mode(x_train_norm,y_train,lamda =0)
# plot_poly_fit()
#plot_learning_curve(x_train_norm,y_train, x_val_norm, y_val, lamda=100)
lamdas=[0,0.001,0.003,0.01,0.03,0.1,0.3,1,2,3,10]
training_cost =[]
cv_cost =[]
for lamda in lamdas:res = train_mode(x_train_norm,y_train,lamda)tc = reg_cost(res,x_train_norm,y_train,lamda=0)cv = reg_cost(res,x_val_norm,y_val,lamda=0)training_cost.append(tc)cv_cost.append(cv)
plt.plot(lamdas,training_cost,label="training cost")
plt.plot(lamdas,cv_cost,label="cv cos")
plt.legend()
plt.show()
l=lamdas[np.argmin(cv_cost)]#寻找最优lamda
print(l)
res = train_mode(x_train_norm,y_train,lamda =l)
test_cost = reg_cost(res,x_test_norm,y_test,lamda = 0)
print(test_cost)

http://www.ppmy.cn/ops/99146.html

相关文章

数码管驱动器ICM2718A

相关引脚说明: WR:控制D0-D7数据是否写入,低电平有效(高到低脉冲) MODE:高电平写入控制字,低电平写入数据 ID4:高电平正常工作模式,低电平关闭工作模式 ID5&#xff…

Elasticsearch8.15 springboot配置类简单解释

Elasticsearch8.15 springboot配置类简单解释 0、配置类代码 Configuration public class ElasticsearchConfig {// 从配置文件中读取 Elasticsearch 服务器的 URLValue("${spring.elasticsearch.rest.uris}")private String serverUrl;// 从配置文件中读取用户名V…

双系统安装:一键解锁电脑新境界,Windows与Linux并肩作战!

目录 🌟 告别单一选择,拥抱无限可能! 🌟 🌈 双系统安装:解锁双重身份的秘密武器 🔧 轻松上手,三步搞定双系统安装 🌟 告别单一选择,拥抱无限可能&#xff0…

MCU的ISR与IRQ

ISR(Interrupt Service Routine):中断服务程序 定义:ISR是计算机系统中用于处理硬件中断的一种特定程序。中断是硬件或软件引起的事件,会暂时打断当前正在运行的任务,以便紧急处理某个事件。ISR的目…

uniapp重新编译在微信开发者工具跳转指定页面

uniapp重新编译在微信开发者工具跳转指定页面 步骤 选择编译模式添加编译模式设置启动页面

PHP农场扶农系统智慧认养智慧乡村系统农场系统小程序源码

🌱科技赋能田园梦 —— 探索“农场扶农系统”与“智慧认养智慧乡村”新篇章🚀 🌈【开篇:田园新风尚,科技引领未来】 在快节奏的都市生活中,你是否曾梦想过拥有一片属于自己的绿色天地?现在&am…

数学建模笔记(四):熵权

背景&基本思想介绍 在实际的评价类问题中,在前面所说的层次分析法以及Topsis法中,指标权重的确定往往是通过主观的评价得来的,如果在没有专家的情况下,我们自己的权重分配往往可能带有一定的主观性,有没有一种可以…

blender--二维平面图标变为三维网格

有时候我们希望把一些二维图片能变成三维网格,本案例我们就针对这一场景进行实现。 首先我们可以先去找一张需要的图片(注意,本例需要图片是svg格式),我们可以在阿里巴巴矢量图标库等平台进行搜索,如图所示,找到需要的…