0
点赞
收藏
分享

微信扫一扫

【PyTorch教程】P22 squential和小实战

P22 squential和小实战

  • Sequential能把网络集成在一起,方便使用:

在这里插入图片描述

  • 写一个针对CIFAR10的数据集,写一个分类网络:有一个1024层的,以前的没有说:

在这里插入图片描述

  • 在这一集做了这个计算:这里计算的是padding:
    在这里插入图片描述
  • 清清爽爽:

在这里插入图片描述

  • 可视化:

在这里插入图片描述

在这里插入图片描述
在这里插入图片描述

可以运行的代码

# !usr/bin/env python3
# -*- coding:utf-8 -*-

"""
author :24nemo
 date  :2021年07月07日
"""

'''
import torch
from torch import nn
from torch.nn import Conv2d, MaxPool2d, Flatten, Linear, Sequential
from torch.utils.tensorboard import SummaryWriter


class Tudui(nn.Module):
    def __init__(self):
        super(Tudui, self).__init__()
        self.model1 = Sequential(
            Conv2d(3, 32, 5, padding=2),
            MaxPool2d(2),
            Conv2d(32, 32, 5, padding=2),
            MaxPool2d(2),
            Conv2d(32, 64, 5, padding=2),
            MaxPool2d(2),
            Flatten(),
            Linear(1024, 64),
            Linear(64, 10)
        )

    def forward(self, x):
        x = self.model1(x)
        return x

tudui = Tudui()
print(tudui)
input = torch.ones((64, 3, 32, 32))
output = tudui(input)
print(output.shape)

writer = SummaryWriter("../logs_seq")
writer.add_graph(tudui, input)
writer.close()

'''

import torch
from torch import nn
from torch.nn import Conv2d, MaxPool2d, Linear
from torch.nn.modules.flatten import Flatten


# from torch.utils.tensorboard import SummaryWriter


class Tudui(nn.Module):
    def __init__(self):
        super(Tudui, self).__init__()
        self.conv1 = Conv2d(3, 32, 5, padding=2)  # input_channel = 3, output_channel = 32, kernel_size = 5 * 5 ,padding是计算出来的
        self.maxpool1 = MaxPool2d(2)   # maxpooling只有一个kernel_size参数
        self.conv2 = Conv2d(32, 32, 5, padding=2)
        self.maxpool2 = MaxPool2d(2)
        self.conv3 = Conv2d(32, 64, 5, padding=2)
        self.maxpool3 = MaxPool2d(2)
        self.flatten = Flatten()  # 展平操作
        self.linear1 = Linear(64 * 4 * 4, 64)
        self.linear2 = Linear(64, 10)

    def forward(self, m):
        m = self.conv1(m)
        m = self.maxpool1(m)
        m = self.conv2(m)
        m = self.maxpool2(m)
        m = self.conv3(m)
        m = self.maxpool3(m)
        m = self.flatten(m)
        m = self.linear1(m)
        m = self.linear2(m)
        return m


tudui = Tudui()
print("tudui:", tudui)
input = torch.ones((64, 3, 32, 32))
output = tudui(input)
print("output.shape:", output.shape)

'''
writer = SummaryWriter("logs_seq")
writer.add_graph(tudui, input)
writer.close()

这个 可视化,我又没能实现
'''

举报

相关推荐

0 条评论