resnet18模型

resnet18模型结构ResNet18((conv1):Conv2D(3,64,kernel_size=[3,3],padding=1,data_format=NCHW)(bn1):BatchNorm2D(num_features=64,momentum=0.9,epsilon=1e-05)(relu):ReLU()(avagPool):AdaptiveAvgPool2D(output_size=1)(classifier):Linear(in_features=512

大家好,又见面了,我是你们的朋友全栈君。

睡觉

结构

ResNet18(
  (conv1): Conv2D(3, 64, kernel_size=[3, 3], padding=1, data_format=NCHW)
  (bn1): BatchNorm2D(num_features=64, momentum=0.9, epsilon=1e-05)
  (relu): ReLU()
  (avagPool): AdaptiveAvgPool2D(output_size=1)
  (classifier): Linear(in_features=512, out_features=1000, dtype=float32)
  (layer1): Sequential(
    (0): Block(
      (conv1): Conv2D(64, 64, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=64, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(64, 64, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=64, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Identity()
    )
    (1): Block(
      (conv1): Conv2D(64, 64, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=64, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(64, 64, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=64, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Identity()
    )
  )
  (layer2): Sequential(
    (0): Block(
      (conv1): Conv2D(64, 128, kernel_size=[3, 3], stride=[2, 2], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=128, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(128, 128, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=128, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Sequential(
        (0): Conv2D(64, 128, kernel_size=[1, 1], stride=[2, 2], data_format=NCHW)
        (1): BatchNorm2D(num_features=128, momentum=0.9, epsilon=1e-05)
      )
    )
    (1): Block(
      (conv1): Conv2D(128, 128, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=128, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(128, 128, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=128, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Identity()
    )
  )
  (layer3): Sequential(
    (0): Block(
      (conv1): Conv2D(128, 256, kernel_size=[3, 3], stride=[2, 2], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=256, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(256, 256, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=256, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Sequential(
        (0): Conv2D(128, 256, kernel_size=[1, 1], stride=[2, 2], data_format=NCHW)
        (1): BatchNorm2D(num_features=256, momentum=0.9, epsilon=1e-05)
      )
    )
    (1): Block(
      (conv1): Conv2D(256, 256, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=256, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(256, 256, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=256, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Identity()
    )
  )
  (layer4): Sequential(
    (0): Block(
      (conv1): Conv2D(256, 512, kernel_size=[3, 3], stride=[2, 2], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=512, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(512, 512, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=512, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Sequential(
        (0): Conv2D(256, 512, kernel_size=[1, 1], stride=[2, 2], data_format=NCHW)
        (1): BatchNorm2D(num_features=512, momentum=0.9, epsilon=1e-05)
      )
    )
    (1): Block(
      (conv1): Conv2D(512, 512, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn1): BatchNorm2D(num_features=512, momentum=0.9, epsilon=1e-05)
      (conv2): Conv2D(512, 512, kernel_size=[3, 3], padding=1, data_format=NCHW)
      (bn2): BatchNorm2D(num_features=512, momentum=0.9, epsilon=1e-05)
      (relu): ReLU()
      (downsample): Identity()
    )
  )
)

Process finished with exit code 0

代码

import paddle
import paddle.nn as nn


class Identity(nn.Layer):
    def __init__(self):
        super().__init__()

    def forward(self, x):
        return x


class Block(nn.Layer):
    def __init__(self, in_dim, out_dim, stride):
        super().__init__()
        self.conv1 = nn.Conv2D(in_dim, out_dim, 3, stride, 1, bias_attr=False)
        self.bn1 = nn.BatchNorm2D(out_dim)
        self.conv2 = nn.Conv2D(out_dim, out_dim, 3, 1, 1, bias_attr=False)
        self.bn2 = nn.BatchNorm2D(out_dim)
        self.relu = nn.ReLU()
        if stride == 2 or in_dim != out_dim:
            self.downsample = nn.Sequential(
                *[nn.Conv2D(in_dim, out_dim, 1, stride, bias_attr=False), nn.BatchNorm2D(out_dim)])
        else:
            self.downsample = Identity()

    def forward(self, x):
        h = x
        x = self.conv1(x)
        x = self.bn1(x)
        x = self.relu(x)
        x = self.conv2(x)
        x = self.bn2(x)
        identity = self.downsample(h)
        x = x + identity
        x = self.relu(x)
        return x


class ResNet18(nn.Layer):
    def __init__(self, in_dim=64, num_classes=1000):
        super().__init__()
        self.in_dim = in_dim  # 差点忘了这一行
        #     stem
        self.conv1 = nn.Conv2D(in_channels=3, out_channels=in_dim, kernel_size=3, stride=1, padding=1, bias_attr=False)
        self.bn1 = nn.BatchNorm2D(in_dim)
        self.relu = nn.ReLU()
        #     head
        self.avagPool = nn.AdaptiveAvgPool2D(1)
        self.classifier = nn.Linear(512, num_classes)
        # blocks
        self.layer1 = self.makelayer(64, 2, 1)
        self.layer2 = self.makelayer(128, 2, 2)
        self.layer3 = self.makelayer(256, 2, 2)
        self.layer4 = self.makelayer(512, 2, 2)

    def makelayer(self, out_dim, n_blocks, stride):
        layer_list = []
        layer_list.append(Block(self.in_dim, out_dim, stride))  # 哦对,这里的self.in_dim是这个类的,不是这个函数的.
        self.in_dim = out_dim
        for i in range(1, n_blocks):
            layer_list.append(Block(self.in_dim, out_dim, stride=1))
        return nn.Sequential(*layer_list)

    def forward(self, x):
        x=self.conv1(x)
        x=self.bn1(x)
        x=self.relu(x)

        #blocks
        x=self.layer1(x)
        x=self.layer2(x)
        x=self.layer3(x)
        x=self.layer4(x)

        #head
        x=self.avagPool(x)
        # print("preflatten:",x.shape)
        x=x.flatten(1)
        # print("flatten:",x.shape)
        x=self.classifier(x)
        # print("classifier:",x.shape)
        return x
def main():
    model=ResNet18()
    x=paddle.randn([2,3,32,32])
    out=model(x)
    print(model)
    # print("x.shape:",x.shape)
if __name__ == "__main__":
    main()
版权声明:本文内容由互联网用户自发贡献,该文观点仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请联系我们举报,一经查实,本站将立刻删除。

发布者:全栈程序员-站长,转载请注明出处:https://javaforall.net/141235.html原文链接:https://javaforall.net

(0)
全栈程序员-站长的头像全栈程序员-站长


相关推荐

  • idea2021激活码永久_在线激活

    (idea2021激活码永久)2021最新分享一个能用的的激活码出来,希望能帮到需要激活的朋友。目前这个是能用的,但是用的人多了之后也会失效,会不定时更新的,大家持续关注此网站~IntelliJ2021最新激活注册码,破解教程可免费永久激活,亲测有效,下面是详细链接哦~https://javaforall.net/100143.html…

    2022年3月21日
    52
  • 验证码的作用及原理

    验证码的作用及原理验证码的发展历程从互联网诞生前期,互联网是没有验证码的。在论坛博客上发帖子,只要敲一下回车键按“发表”就可以了。然而,有白客就有黑客,随着计算机程序的愈发发展,黑客十分猖狂。他们编写了一种能够大量、重复编写信息的程序,伪装成人类用户,肆无忌惮的在网络上倾倒大量的、无意义的“僵尸”信息,垃圾邮件、垃圾广告、垃圾评论到处飞。更编写了模仿登录、恶意激活成功教程代码、刷票等恶意程序。这严重影响了互联网的正常运行,导致体验效果很差。以受影响最大的电子邮件的提供商为例:用户每天收到数以千计的垃圾邮件,严重影响工作效率。.

    2022年7月14日
    18
  • 基于大数据的舆情分析系统架构(架构篇)

    前言互联网的飞速发展促进了很多新媒体的发展,不论是知名的大V,明星还是围观群众都可以通过手机在微博,朋友圈或者点评网站上发表状态,分享自己的所见所想,使得“人人都有了麦…

    2022年4月5日
    217
  • Sql server DATEADD日期函数的使用[通俗易懂]

    Sql server DATEADD日期函数的使用[通俗易懂]DATEADD日期函数DATEADD()函数在日期中添加或减去指定的时间间隔。日:在当前日期上加两天selectDATEADD(day,2,'2014-12-30')201

    2022年7月1日
    44
  • 怎么检查计算机和打印机是否连接网络,检查电脑是否正确连接网络打印机

    怎么检查计算机和打印机是否连接网络,检查电脑是否正确连接网络打印机我们有时候会在电脑中连接网络打印机来进行打印工作。可是有用户会遇到文件无法打印的问题。那么这个时候应该怎么检查电脑中是否已成功连接网络打印机?对此,我们可以参考以下方法来进行操作。1、如果文件打印不了,按Crtl+P打开打印对话框,在打印机名称中看看有没有网络打印机的名称,如果没有则说明打印机驱动有问题,需要重新安装网络打印机的驱动程序;2、如果有显示打印机名称,那么问题有可能是网络连接错误或者打…

    2022年5月10日
    230
  • bWAPP练习

    bWAPP练习简介虚拟机下载地址: https://www.vulnhub.com/entry/bwapp-bee-box-v16,53/如果你想自己去部署环境:https://sourceforge.net/projects/bwapp/files/bee-box/bWAPP包含有100多个漏洞,包括OWASPTop10安全风险,很爽的PHPweb靶机。登录username:bee pas…

    2022年9月24日
    2

发表回复

您的邮箱地址不会被公开。 必填项已用 * 标注

关注全栈程序员社区公众号