shiGod
shiGod
发布于 2024-12-12 / 37 阅读
1
1

ResNet(真,相信我😀)

先说一下参数:batch=512/256, loss为Huber损失,关于Huber

不清楚的自己查一下,优化器Nadam,学习率0.0002,epochs:300/200/400

下面的代码放在models文件夹下,命名为res_net.py

import tensorflow as tf
from tensorflow.keras import layers, models, Model


class BasicBlock(tf.keras.layers.Layer):
    expansion = 1
    def __init__(self, inp, oup, strides=1, **kwargs):
        super(BasicBlock, self).__init__(**kwargs)
        self.conv1 = layers.Conv1D(oup, kernel_size=3, strides=strides, padding='same', use_bias=False)
        self.bn1 = layers.BatchNormalization()
        self.relu = layers.ReLU()
        self.conv2 = layers.Conv1D(oup, kernel_size=3, strides=1, padding='same', use_bias=False)
        self.bn2 = layers.BatchNormalization()

        if strides != 1 or inp != oup * self.expansion:
            self.shortcut = models.Sequential([
                layers.Conv1D(oup * self.expansion, kernel_size=1, strides=strides, padding='same', use_bias=False),
                layers.BatchNormalization()
            ])
        else:
            self.shortcut = lambda x, _ : x

    def call(self, x, training=False):
        out = self.conv1(x)
        out = self.bn1(out, training=training)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out, training=training)

        shortcut = self.shortcut(x, training)
        out += shortcut
        out = self.relu(out)
        return out

class BottleNeck(tf.keras.layers.Layer):
    expansion = 4
    def __init__(self, inp, oup, strides=1, **kwargs):
        super(BottleNeck, self).__init__(**kwargs)
        self.conv1 = layers.Conv1D(oup, kernel_size=1, strides=1, padding='same', use_bias=False)
        self.bn1 = layers.BatchNormalization()
        self.relu1 = layers.ReLU()
        self.conv2 = layers.Conv1D(oup, kernel_size=3, strides=strides, padding='same', use_bias=False)
        self.bn2 = layers.BatchNormalization()
        self.relu2 = layers.ReLU()
        self.conv3 = layers.Conv1D(oup * self.expansion, kernel_size=1, strides=1, padding='same', use_bias=False)
        self.bn3 = layers.BatchNormalization()
        self.relu3 = layers.ReLU()

        if strides != 1 or inp != oup * self.expansion:
            self.shortcut = models.Sequential([
                layers.Conv1D(oup * self.expansion, kernel_size=1, strides=strides, padding='same', use_bias=False),
                layers.BatchNormalization()
            ])
        else:
            self.shortcut = lambda x, _: x

    def call(self, x, training=False):
        out = self.conv1(x)
        out = self.bn1(out, training=training)
        out = self.relu1(out)

        out = self.conv2(out)
        out = self.bn2(out, training=training)
        out = self.relu2(out)

        out = self.conv3(out)
        out = self.bn3(out, training=training)

        out += self.shortcut(x, training)
        out = self.relu3(out)
        return out

class ResNet(Model):
    def __init__(self, inp, block, num_blocks, num_class=8):
        super(ResNet, self).__init__()
        # 输入通道为1,初始卷积输出通道设为 8
        self.in_channels = inp
        self.conv1 = layers.Conv1D(inp, kernel_size=7, strides=2, padding='same', use_bias=False)
        self.bn1 = layers.BatchNormalization()
        self.relu = layers.ReLU()
        self.max_pool = layers.MaxPooling1D(pool_size=3, strides=2, padding='same')

        # 按照您的建议,layers使用固定的通道配置,而非倍增
        # num_blocks:如 [2,2,2,2] 对应 ResNet18, [3,4,6,3] 对应 ResNet34
        # channels 对应每个层的通道数,如 [5,5,10,10]
        self.layer1 = self._make_layers(block, inp, num_blocks[0], strides=1)
        self.layer2 = self._make_layers(block, inp * 2, num_blocks[1], strides=2)
        self.layer3 = self._make_layers(block, inp * 4, num_blocks[2], strides=2)
        self.layer4 = self._make_layers(block, inp * 8, num_blocks[3], strides=2)
        self.avg_pool = layers.GlobalAveragePooling1D()
		此处写全连接层或其他层。。。。。。

    def _make_layers(self, block, out_channels, blocks, strides):
        strides_list = [strides] + [1] * (blocks - 1)
        layers_list = []
        for stride in strides_list:
            layers_list.append(block(self.in_channels, out_channels, stride))
            self.in_channels = out_channels * block.expansion
        return models.Sequential(layers_list)

    def call(self, x, training=False):
        x = self.conv1(x)
        x = self.bn1(x, training=training)
        x = self.relu(x)
        x = self.max_pool(x)

        x = self.layer1(x, training=training)
        x = self.layer2(x, training=training)
        x = self.layer3(x, training=training)
        x = self.layer4(x, training=training)

        x = self.avg_pool(x)
		。。。。。。。
        return x


def ResNet1D18(inp=1, num_classes=8):
    return ResNet(inp, BasicBlock, [2, 2, 2, 2], num_classes)

def ResNet1D34(inp=1, num_classes=8):
    return ResNet(inp, BasicBlock, [3, 4, 6, 3], num_classes)

def ResNet1D50(inp=1, num_classes=8):
    return ResNet(inp, BottleNeck, [3, 4, 6, 3], num_classes)

if __name__ == "__main__":
    model = ResNet1D18(inp=1, num_classes=8)
    model.build(input_shape=(512, 19035, 1))  # batch=512, length=19035, channel=1
    model.summary()

不要和我的一样呦!


评论