Dive into DL TF2 -- DenseNet

  1 import tensorflow as tf 
  2 print(tf.__version__)
  3 
  4 
  5 class BottleNeck(tf.keras.layers.Layer):
  6   def __init__(self, growth_rate, drop_rate):
  7     super(BottleNeck, self).__init__()
  8     self.bn1 = tf.keras.layers.BatchNormalization()
  9     self.conv1 = tf.keras.layers.Conv2D(filters=4 * growth_rate,
 10                        kernel_size = (1, 1),
 11                        strides=1,
 12                        padding=same)
 13     self.bn2 = tf.keras.layers.BatchNormalization()
 14     self.conv2 = tf.keras.layers.Conv2D(filters=growth_rate, 
 15                        kernel_size = (3, 3),
 16                        strides=1, 
 17                        padding=same)
 18     self.dropout = tf.keras.layers.Dropout(rate=drop_rate)
 19 
 20     self.listLayers = [self.bn1,
 21               tf.keras.layers.Activation(relu),
 22               self.conv1,
 23               self.bn2,
 24               tf.keras.layers.Activation(relu),
 25               self.conv2,
 26               self.dropout]
 27     
 28   def call(self, x):
 29     y = x
 30     for layer in self.listLayers.layers:
 31       y = layer(y)
 32     y = tf.keras.layers.concatenate([x, y], axis=-1)
 33     return y                       
 34 
 35 
 36 # 稠密块由多个BottleNeck组成,每块使用相同的输出通道数
 37 
 38 class DenseBlock(tf.keras.layers.Layer):
 39   def __init__(self, num_layers, growth_rate, drop_rate=0.5):
 40     super(DenseBlock, self).__init__()
 41     self.num_layers = num_layers
 42     self.growth_rate = growth_rate
 43     self.drop_rate = drop_rate
 44     self.listLayers = []
 45     for _ in range(num_layers):
 46       self.listLayers.append(BottleNeck(growth_rate=self.growth_rate, drop_rate=self.drop_rate))
 47   
 48   def call(self, x):
 49     for layer in self.listLayers.layers:
 50       x = layer(x)
 51     return x
 52 
 53 
 54 class TransitionLayer(tf.keras.layers.Layer):
 55   def __init__(self, out_channels):
 56     super(TransitionLayer,self).__init__()
 57     self.bn = tf.keras.layers.BatchNormalization()
 58     self.conv = tf.keras.layers.Conv2D(filters=out_channels,
 59                       kernel_size=(1, 1),
 60                       strides=1,
 61                       padding=same)
 62     self.pool = tf.keras.layers.MaxPooling2D(pool_size=(2, 2),
 63                         strides=2,
 64                         padding=same)
 65     
 66   def call(self, inputs):
 67     x = self.bn(inputs)
 68     x = tf.keras.activations.relu(x)
 69     x = self.conv(x)
 70     x = self.pool(x)
 71     return x
 72 
 73 
 74 blk = TransitionLayer(10)
 75 blk(Y).shape
 76 
 77 
 78 class DenseNet(tf.keras.Model):
 79   def __init__(self, num_init_features, growth_rate, block_layers, compression_rate, drop_rate):
 80     super(DenseNet, self).__init__()
 81     self.conv = tf.keras.layers.Conv2D(filters=num_init_features,
 82                       kernel_size=(7, 7),
 83                       strides=2, 
 84                       padding=same)
 85     self.bn = tf.keras.layers.BatchNormalization()
 86     self.pool = tf.keras.layers.MaxPooling2D(pool_size=(3, 3),
 87                         strides=2,
 88                         padding=same)
 89     self.num_channels = num_init_features
 90     self.dense_block_1 = DenseBlock(num_layers=block_layers[0], growth_rate=growth_rate, drop_rate=drop_rate)                                       
 91     self.num_channels += growth_rate * block_layers[0]
 92     self.num_channels = compression_rate * self.num_channels
 93     self.transition_1 = TransitionLayer(out_channels=int(self.num_channels))
 94     self.dense_block_2 = DenseBlock(num_layers=block_layers[1], growth_rate=growth_rate, drop_rate=drop_rate)
 95     self.num_channels += growth_rate * block_layers[1]
 96     self.num_channels = compression_rate * self.num_channels
 97     self.transition_2 = TransitionLayer(out_channels=int(self.num_channels))
 98     self.dense_block_3 = DenseBlock(num_layers=block_layers[2], growth_rate=growth_rate, drop_rate=drop_rate)
 99     self.num_channels += growth_rate * block_layers[2]
100     self.num_channels = compression_rate * self.num_channels
101     self.transition_3 = TransitionLayer(out_channels=int(self.num_channels))
102     self.dense_block_4 = DenseBlock(num_layers=block_layers[3], growth_rate=growth_rate, drop_rate=drop_rate)
103 
104     self.avgpool = tf.keras.layers.GlobalAveragePooling2D()
105     self.fc = tf.keras.layers.Dense(units=10, activation=tf.keras.activations.softmax)
106   
107   def call(self, inputs):
108     x = self.conv(inputs)
109     x = self.bn(x)
110     x = tf.keras.activations.relu(x)
111     x = self.pool(x)
112 
113     x = self.dense_block_1(x)
114     x = self.transition_1(x)
115     x = self.dense_block_2(x)
116     x = self.transition_2(x)
117     x = self.dense_block_3(x)
118     x = self.transition_3(x)
119     x = self.dense_block_4(x)
120 
121     x = self.avgpool(x)
122     x = self.fc(x)
123     return x
124 
125 
126 
127 def densenet():
128   return DenseNet(num_init_features=64, growth_rate=32, block_layers=[4, 4, 4, 4], compression_rate=0.5, drop_rate=0.5)
129 
130 mynet = densenet()
131 
132 
133 X = tf.random.uniform(shape=(1, 96, 96, 1))
134 for layer in mynet.layers:
135   X = layer(X)
136   print(layer.name, ouput shape:\t, X.shape)
137 
138 
139 (x_train, y_train), (x_test, y_test) = tf.keras.datasets.fashion_mnist.load_data()
140 x_train = x_train.reshape((60000, 28, 28, 1)).astype(float32) / 255
141 x_test = x_test.reshape((10000, 28, 28, 1)).astype(float32) / 255
142 
143 mynet.compile(loss=sparse_categorical_crossentropy,
144               optimizer = tf.keras.optimizers.Adam(),
145               metrics=[accuracy])
146 
147 history = mynet.fit(x_train, y_train,
148           batch_size=64,
149           epochs=5, 
150           validation_split=0.2)
151 
152 test_scores = mynet.evaluate(x_test, y_test, verbose=2)

 

Dive into DL TF2 -- DenseNet

上一篇:PHP pcntl多进程实例


下一篇:PHP错误与异常处理【转】