ResNet18和ResNet50的keras实现

ResNet18和ResNet50的keras实现fromtensorflowimportkerasfromtensorflow.kerasimportlayersINPUT_SIZE=224CLASS_NUM=1000#stage_name=2,3,4,5;block_name=a,b,c,d,e,fdefConvBlock(input_tensor,num_output,stride,stage_name,block_name):filter1,filter2,filter3=num_

大家好,又见面了,我是你们的朋友全栈君。

1.ResNet18

ResNet18

from tensorflow import keras
from tensorflow.keras import layers

INPUT_SIZE = 224
CLASS_NUM = 1000

# stage_name=2,3,4,5; block_name=a,b,c
def ConvBlock(input_tensor, num_output, stride, stage_name, block_name):
    filter1, filter2 = num_output

    x = layers.Conv2D(filter1, 3, strides=stride, padding='same', name='res'+stage_name+block_name+'_branch2a')(input_tensor)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2a')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2a_relu')(x)

    x = layers.Conv2D(filter2, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2b')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2b')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2b_relu')(x)

    shortcut = layers.Conv2D(filter2, 1, strides=stride, padding='same', name='res'+stage_name+block_name+'_branch1')(input_tensor)
    shortcut = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch1')(shortcut)

    x = layers.add([x, shortcut], name='res'+stage_name+block_name)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_relu')(x)

    return x

def IdentityBlock(input_tensor, num_output, stage_name, block_name):
    filter1, filter2 = num_output

    x = layers.Conv2D(filter1, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2a')(input_tensor)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2a')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2a_relu')(x)

    x = layers.Conv2D(filter2, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2b')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2b')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2b_relu')(x)

    shortcut = input_tensor

    x = layers.add([x, shortcut], name='res'+stage_name+block_name)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_relu')(x)

    return x

def ResNet18(input_shape, class_num):
    input = keras.Input(shape=input_shape, name='input')

    # conv1
    x = layers.Conv2D(64, 7, strides=(2, 2), padding='same', name='conv1')(input)  # 7×7, 64, stride 2
    x = layers.BatchNormalization(name='bn_conv1')(x)
    x = layers.Activation('relu', name='conv1_relu')(x)
    x = layers.MaxPooling2D((3, 3), strides=2, padding='same', name='pool1')(x)   # 3×3 max pool, stride 2

    # conv2_x
    x = ConvBlock(input_tensor=x, num_output=(64, 64), stride=(1, 1), stage_name='2', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(64, 64), stage_name='2', block_name='b')

    # conv3_x
    x = ConvBlock(input_tensor=x, num_output=(128, 128), stride=(2, 2), stage_name='3', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(128, 128), stage_name='3', block_name='b')

    # conv4_x
    x = ConvBlock(input_tensor=x, num_output=(256, 256), stride=(2, 2), stage_name='4', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(256, 256), stage_name='4', block_name='b')

    # conv5_x
    x = ConvBlock(input_tensor=x, num_output=(512, 512), stride=(2, 2), stage_name='5', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(512, 512), stage_name='5', block_name='b')

    # average pool, 1000-d fc, softmax
    x = layers.AveragePooling2D((7, 7), strides=(1, 1), name='pool5')(x)
    x = layers.Flatten(name='flatten')(x)
    x = layers.Dense(class_num, activation='softmax', name='fc1000')(x)

    model = keras.Model(input, x, name='resnet18')
    model.summary()
    return model

if __name__ == '__main__':
    model = ResNet18((INPUT_SIZE, INPUT_SIZE, 3), CLASS_NUM)
    print('Done.')

train_resnet18.py

from tensorflow import keras
from tensorflow.keras import layers

INPUT_SIZE = 224
CLASS_NUM = 2

# stage_name=2,3,4,5; block_name=a,b,c
def ConvBlock(input_tensor, num_output, stride, stage_name, block_name):
    filter1, filter2 = num_output

    x = layers.Conv2D(filter1, 3, strides=stride, padding='same', name='res'+stage_name+block_name+'_branch2a')(input_tensor)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2a')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2a_relu')(x)

    x = layers.Conv2D(filter2, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2b')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2b')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2b_relu')(x)

    shortcut = layers.Conv2D(filter2, 1, strides=stride, padding='same', name='res'+stage_name+block_name+'_branch1')(input_tensor)
    shortcut = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch1')(shortcut)

    x = layers.add([x, shortcut], name='res'+stage_name+block_name)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_relu')(x)

    return x

def IdentityBlock(input_tensor, num_output, stage_name, block_name):
    filter1, filter2 = num_output

    x = layers.Conv2D(filter1, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2a')(input_tensor)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2a')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2a_relu')(x)

    x = layers.Conv2D(filter2, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2b')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2b')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2b_relu')(x)

    shortcut = input_tensor

    x = layers.add([x, shortcut], name='res'+stage_name+block_name)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_relu')(x)

    return x

def ResNet18(input_shape, class_num):
    input = keras.Input(shape=input_shape, name='input')

    # conv1
    x = layers.Conv2D(64, 7, strides=(2, 2), padding='same', name='conv1')(input)  # 7×7, 64, stride 2
    x = layers.BatchNormalization(name='bn_conv1')(x)
    x = layers.Activation('relu', name='conv1_relu')(x)
    x = layers.MaxPooling2D((3, 3), strides=2, padding='same', name='pool1')(x)   # 3×3 max pool, stride 2

    # conv2_x
    x = ConvBlock(input_tensor=x, num_output=(64, 64), stride=(1, 1), stage_name='2', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(64, 64), stage_name='2', block_name='b')

    # conv3_x
    x = ConvBlock(input_tensor=x, num_output=(128, 128), stride=(2, 2), stage_name='3', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(128, 128), stage_name='3', block_name='b')

    # conv4_x
    x = ConvBlock(input_tensor=x, num_output=(256, 256), stride=(2, 2), stage_name='4', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(256, 256), stage_name='4', block_name='b')

    # conv5_x
    x = ConvBlock(input_tensor=x, num_output=(512, 512), stride=(2, 2), stage_name='5', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(512, 512), stage_name='5', block_name='b')

    # average pool, 1000-d fc, softmax
    x = layers.AveragePooling2D((7, 7), strides=(1, 1), name='pool5')(x)
    x = layers.Flatten(name='flatten')(x)
    x = layers.Dense(class_num, activation='softmax', name='fc1000')(x)

    model = keras.Model(input, x, name='resnet18')
    model.summary()
    return model

if __name__ == '__main__':
    model = ResNet18((INPUT_SIZE, INPUT_SIZE, 3), CLASS_NUM)
    print('Done.')

predict_resnet18.py

import matplotlib.pyplot as plt

from ResNet18 import ResNet18
import cv2
import numpy as np
from tensorflow.keras import backend as K   # K.set_image_dim_ordering('tf')
from tensorflow.keras.utils import to_categorical

INPUT_IMG_SIZE = 224
NUM_CLASSES = 2
label_dict = { 
   0:'CAT', 1:'DOG'}

def show_predict_probability(y_gts, predictions, x_imgs, predict_probabilitys, idx):
    for i in range(len(label_dict)):
        print(label_dict[i]+', Probability:%1.9f'%(predict_probabilitys[idx][i]))
    print('label: ', label_dict[int(y_gts[idx])], ', predict: ', label_dict[predictions[idx]])
    plt.figure(figsize=(2, 2))
    plt.imshow(np.reshape(x_imgs[idx], (INPUT_IMG_SIZE, INPUT_IMG_SIZE, 3)))
    plt.show()

def plot_images_labels_prediction(images, labels, prediction, idx, num):
    fig = plt.gcf()
    fig.set_size_inches(12, 14)
    if num>25: num=25
    for i in range(0, num):
        ax = plt.subplot(2, 5, 1+i)
        ax.imshow(images[idx], cmap='binary')
        title = 'labels='+str(labels[idx])
        if len(prediction) > 0:
            title += "prediction="+str(prediction[idx])
        ax.set_title(title, fontsize=10)
        idx += 1
    plt.show()

if __name__ == '__main__':
    log_path = r"D:\02.Work\00.LearnML\003.Net\ResNet\log\\"
    model = ResNet18((224, 224, 3), NUM_CLASSES)
    model.load_weights(log_path+"resnet18.h5")

    ### cat dog dataset
    lines = []
    root_path = r"D:\03.Data\01.CatDog"
    with open(root_path + "\\test.txt") as f:
       lines = f.readlines()

    x_images_normalize = []
    y_labels_onehot = []
    y_labels = []

    for i in range(len(lines)):
        img_path = lines[i].split(";")[0]
        img = cv2.imread(img_path)
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        img = cv2.resize(img, (INPUT_IMG_SIZE, INPUT_IMG_SIZE))
        img = img / 255
        x_images_normalize.append(img)
        label = to_categorical(lines[i].split(";")[1], num_classes=NUM_CLASSES)
        y_labels_onehot.append(label)
        y_labels.append(lines[i].split(";")[1])
    x_images_normalize = np.array(x_images_normalize)
    # x_images_normalize = x_images_normalize.reshape(-1, INPUT_IMG_SIZE, INPUT_IMG_SIZE, 3)
    y_labels_onehot = np.array(y_labels_onehot)

    predict_probability = model.predict(x_images_normalize, verbose=1)
    predict = np.argmax(predict_probability, axis=1)

    plot_images_labels_prediction(x_images_normalize, y_labels, predict, 0, 10)
    show_predict_probability(y_labels, predict, x_images_normalize, predict_probability, 0)
    print('done')

2.ResNet50

ResNet50

from tensorflow import keras
from tensorflow.keras import layers

INPUT_SIZE = 224
CLASS_NUM = 1000

# stage_name=2,3,4,5; block_name=a,b,c
def ConvBlock(input_tensor, num_output, stride, stage_name, block_name):
    filter1, filter2, filter3 = num_output

    x = layers.Conv2D(filter1, 1, strides=stride, padding='same', name='res'+stage_name+block_name+'_branch2a')(input_tensor)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2a')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2a_relu')(x)

    x = layers.Conv2D(filter2, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2b')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2b')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2b_relu')(x)

    x = layers.Conv2D(filter3, 1, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2c')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2c')(x)

    shortcut = layers.Conv2D(filter3, 1, strides=stride, padding='same', name='res'+stage_name+block_name+'_branch1')(input_tensor)
    shortcut = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch1')(shortcut)

    x = layers.add([x, shortcut], name='res'+stage_name+block_name)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_relu')(x)

    return x

def IdentityBlock(input_tensor, num_output, stage_name, block_name):
    filter1, filter2, filter3 = num_output

    x = layers.Conv2D(filter1, 1, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2a')(input_tensor)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2a')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2a_relu')(x)

    x = layers.Conv2D(filter2, 3, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2b')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2b')(x)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_branch2b_relu')(x)

    x = layers.Conv2D(filter3, 1, strides=(1, 1), padding='same', name='res'+stage_name+block_name+'_branch2c')(x)
    x = layers.BatchNormalization(name='bn'+stage_name+block_name+'_branch2c')(x)

    shortcut = input_tensor

    x = layers.add([x, shortcut], name='res'+stage_name+block_name)
    x = layers.Activation('relu', name='res'+stage_name+block_name+'_relu')(x)

    return x

def ResNet50(input_shape, class_num):
    input = keras.Input(shape=input_shape, name='input')

    # conv1
    x = layers.Conv2D(64, 7, strides=(2, 2), padding='same', name='conv1')(input)  # 7×7, 64, stride 2
    x = layers.BatchNormalization(name='bn_conv1')(x)
    x = layers.Activation('relu', name='conv1_relu')(x)
    x = layers.MaxPooling2D((3, 3), strides=2, padding='same', name='pool1')(x)   # 3×3 max pool, stride 2

    # conv2_x
    x = ConvBlock(input_tensor=x, num_output=(64, 64, 256), stride=(1, 1), stage_name='2', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(64, 64, 256), stage_name='2', block_name='b')
    x = IdentityBlock(input_tensor=x, num_output=(64, 64, 256), stage_name='2', block_name='c')

    # conv3_x
    x = ConvBlock(input_tensor=x, num_output=(128, 128, 512), stride=(2, 2), stage_name='3', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(128, 128, 512), stage_name='3', block_name='b')
    x = IdentityBlock(input_tensor=x, num_output=(128, 128, 512), stage_name='3', block_name='c')
    x = IdentityBlock(input_tensor=x, num_output=(128, 128, 512), stage_name='3', block_name='d')

    # conv4_x
    x = ConvBlock(input_tensor=x, num_output=(256, 256, 1024), stride=(2, 2), stage_name='4', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(256, 256, 1024), stage_name='4', block_name='b')
    x = IdentityBlock(input_tensor=x, num_output=(256, 256, 1024), stage_name='4', block_name='c')
    x = IdentityBlock(input_tensor=x, num_output=(256, 256, 1024), stage_name='4', block_name='d')
    x = IdentityBlock(input_tensor=x, num_output=(256, 256, 1024), stage_name='4', block_name='e')
    x = IdentityBlock(input_tensor=x, num_output=(256, 256, 1024), stage_name='4', block_name='f')

    # conv5_x
    x = ConvBlock(input_tensor=x, num_output=(512, 512, 2048), stride=(2, 2), stage_name='5', block_name='a')
    x = IdentityBlock(input_tensor=x, num_output=(512, 512, 2048), stage_name='5', block_name='b')
    x = IdentityBlock(input_tensor=x, num_output=(512, 512, 2048), stage_name='5', block_name='c')

    # average pool, 1000-d fc, softmax
    x = layers.AveragePooling2D((7, 7), strides=(1, 1), name='pool5')(x)
    x = layers.Flatten(name='flatten')(x)
    x = layers.Dense(class_num, activation='softmax', name='fc1000')(x)

    model = keras.Model(input, x, name='resnet50')
    model.summary()
    return model

if __name__ == '__main__':
    model = ResNet50((INPUT_SIZE, INPUT_SIZE, 3), CLASS_NUM)
    print('Done.')
版权声明:本文内容由互联网用户自发贡献,该文观点仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请联系我们举报,一经查实,本站将立刻删除。

发布者:全栈程序员-站长,转载请注明出处:https://javaforall.net/141343.html原文链接:https://javaforall.net

(0)
全栈程序员-站长的头像全栈程序员-站长


相关推荐

  • kafka删除topic中的数据_kafka删除数据

    kafka删除topic中的数据_kafka删除数据删除topic里面的数据这里没有单独的清空数据的命令,这里要达到清空数据的目的只需要以下步骤:一、如果当前topic没有使用过即没有传输过信息:可以彻底删除。二、如果当前topic有使用过即有过传输过信息:并没有真正删除topic只是把这个topic标记为删除(markedfordeletion)。想要彻底删除topic数据要经过下面两个步骤:①:删除topic,重新用创建to…

    2022年10月16日
    4
  • gulp pipe缓存_gulp使用教程

    gulp pipe缓存_gulp使用教程首先,gulp的源码里没有任何一部分是定义pipe的。gulp的pipe方法是来自nodejsstreamAPI的。gulp本身是由一系列vinyl模块组织起来的。pipe方法到底是什么呢?pipe跟他字面意思一样只是一个管道例如我有一堆文件var s=gulp.src(["fileA","fileB","fileC"])src方法实际上是’vinyl-fs’模…

    2025年5月22日
    3
  • arp内网攻击_外网和内网怎么设置

    arp内网攻击_外网和内网怎么设置arpspoof是一款进行arp欺骗的工具,攻击者通过毒化受害者arp缓存,将网关mac替换为攻击者mac,然后攻击者可截获受害者发送和收到的数据包,可获取受害者账户、密码等相关敏感信息。本次测试是在局域网内进行,利用kali截获centos相关数据攻击者ip:192.168.157.129受害者ip:192.168.157.2501、在kali中开启端口转发功能:echo…

    2022年10月7日
    4
  • datagrip mac 激活码_在线激活2022.02.16

    (datagrip mac 激活码)2021最新分享一个能用的的激活码出来,希望能帮到需要激活的朋友。目前这个是能用的,但是用的人多了之后也会失效,会不定时更新的,大家持续关注此网站~https://javaforall.net/100143.htmlIntelliJ2021最新激活注册码,破解教程可免费永久激活,亲测有效,上面是详细链接哦~FZ…

    2022年4月1日
    71
  • 浅谈Mysql共享锁、排他锁、悲观锁、乐观锁及其使用场景

    浅谈Mysql共享锁、排他锁、悲观锁、乐观锁及其使用场景Mysql 共享锁 排他锁 悲观锁 乐观锁及其使用场景一 相关名词 表级锁 锁定整个表 页级锁 锁定一页 行级锁 锁定一行 共享锁 S 锁 MyISAM 叫做读锁 排他锁 X 锁 MyISAM 叫做写锁 悲观锁 抽象性 不真实存在这个锁 乐观锁 抽象性 不真实存在这个锁 二 InnoDB 与 MyISAMMysql 在 5

    2025年11月19日
    4
  • 用百度ocr+微信截图实现文字识别

    用百度ocr+微信截图实现文字识别作用:将图片中的文字识别出来一、调用微信截图dll控件将微信截图插件复制到项目文件,使用ctypes加载(胶水语言就是给力)defcapture():try:dll=ctypes.cdll.LoadLibrary(‘PrScrn.dll’)exceptException:print(“Dllloaderror!”)…

    2022年6月3日
    84

发表回复

您的邮箱地址不会被公开。 必填项已用 * 标注

关注全栈程序员社区公众号