import torch
import torch.utils.data as Data
import torch.nn.functional as F
import matplotlib.pyplot as plt
import torch.optim
# torch.manual_seed(1) # reproducible LR = 0.01
BATCH_SIZE = 32
EPOCH = 12 # fake dataset
x = torch.unsqueeze(torch.linspace(-1, 1, 1000), dim=1)
y = x.pow(2) + 0.1*torch.normal(torch.zeros(*x.size())) # plot dataset
plt.scatter(x.numpy(), y.numpy())
plt.show() # put dateset into torch dataset
torch_dataset = Data.TensorDataset(x, y)
loader = Data.DataLoader(dataset=torch_dataset, batch_size=BATCH_SIZE, shuffle=True, num_workers=2,) # default network
class Net(torch.nn.Module):
def __init__(self):
super(Net, self).__init__()
self.hidden = torch.nn.Linear(1, 20) # hidden layer
self.predict = torch.nn.Linear(20, 1) # output layer def forward(self, x):
x = F.relu(self.hidden(x)) # activation function for hidden layer
x = self.predict(x) # linear output
return x if __name__ == '__main__':
# different nets
net_SGD = Net()
net_Momentum = Net()
net_RMSprop = Net()
net_Adam = Net()
nets = [net_SGD, net_Momentum, net_RMSprop, net_Adam] # different optimizers
opt_SGD = torch.optim.SGD(net_SGD.parameters(), lr=LR)
opt_Momentum = torch.optim.SGD(net_Momentum.parameters(), lr=LR, momentum=0.8)
opt_RMSprop = torch.optim.RMSprop(net_RMSprop.parameters(), lr=LR, alpha=0.9)
opt_Adam = torch.optim.Adam(net_Adam.parameters(), lr=LR, betas=(0.9, 0.99))
optimizers = [opt_SGD, opt_Momentum, opt_RMSprop, opt_Adam] loss_func = torch.nn.MSELoss()
losses_his = [[], [], [], []] # record loss # training
for epoch in range(EPOCH):
print('Epoch: ', epoch)
for step, (b_x, b_y) in enumerate(loader): # for each training step
for net, opt, l_his in zip(nets, optimizers, losses_his):
output = net(b_x) # get output for every net
loss = loss_func(output, b_y) # compute loss for every net
opt.zero_grad() # clear gradients for next train
loss.backward() # backpropagation, compute gradients
opt.step() # apply gradients
l_his.append(loss.data.numpy()) # loss recoder labels = ['SGD', 'Momentum', 'RMSprop', 'Adam']
for i, l_his in enumerate(losses_his):
plt.plot(l_his, label=labels[i])
plt.legend(loc='best')
plt.xlabel('Steps')
plt.ylabel('Loss')
plt.ylim((0, 0.2))
plt.show()

最新文章

  1. js数组中sort排序注意的地方
  2. 纯Java配置使用slf4j配置log4j
  3. android 异步加载框架 原理完全解析
  4. Asp.net WebPages框架运行原理浅析(转)
  5. 计算机网络: IP地址,子网掩码,默认网关,DNS服务器详解
  6. spring mvc中的文件上传
  7. maven插件mybatis-generator生成代码配置
  8. jsp_javabean
  9. Codeforces 459E Pashmak and Graph(dp+贪婪)
  10. 记一次有趣的互联网事件及console.log~
  11. 基于Orangpi Zero和Linux ALSA实现WIFI无线音箱(一)
  12. struts基础3-把数据写入页面
  13. telnetlib 中各种 read 函数的意义
  14. Object类型的转为String类型
  15. CodeGear RAD 2007 SP4 最新下载及破解
  16. Flatten Nested List Iterator
  17. Object是个什么鬼
  18. Android 密匙库导出
  19. WCF-异步调用和两种客户端形式
  20. 异步模式:Callbacks, Promises & Async/Await

热门文章

  1. 重拾c++第二天(4):复合类型
  2. 关于neo4j初入门(3)
  3. 7.JavaSE之类型转换
  4. Go的切片:长度和容量
  5. 内网IP的解释
  6. 解决SpringMvc后台接收json数据中文乱码问题
  7. 牛客网在线编程_有序矩阵中第K小的元素
  8. python文件、文件夹的相关操作
  9. devops与CICD
  10. MySQL 清理缓存—flush tablesFlush tables的影响