1010HEIGHT = 32
1111WIDTH = 32
1212NUM_CHANNELS = 3
13+ num_classes = 10
14+
15+ num_gpus = 2
16+
1317INIT_LR = 1e-3
1418num_train_samples = 40000
15- bs_per_gpu = 128
16- num_gpus = 1
17- num_epochs = 10
18- num_classes = 10
19+ bs_per_gpu = 125
20+ num_epochs = 20
21+ epochs_drop = 5.0
22+
1923
2024class LRTensorBoard (TensorBoard ):
2125 def __init__ (self , log_dir , update_freq , histogram_freq ): # add other arguments to __init__ if you need
@@ -29,7 +33,7 @@ def on_epoch_end(self, epoch, logs=None):
2933
3034
3135def preprocess (x , y ):
32- image = tf .image .per_image_standardization (x )
36+ x = tf .image .per_image_standardization (x )
3337 return x , y
3438
3539
@@ -49,7 +53,7 @@ def augmentation(x, y):
4953def schedule (epoch ):
5054 initial_lrate = INIT_LR
5155 drop = 0.5
52- epochs_drop = 2.0
56+
5357 lrate = initial_lrate * math .pow (drop , math .floor ((1 + epoch )/ epochs_drop ))
5458 return lrate
5559
@@ -58,6 +62,7 @@ def schedule(epoch):
5862x_val = x [num_train_samples :, :]
5963y_val = y [num_train_samples :, :]
6064
65+
6166x = x [:num_train_samples , :]
6267y = y [:num_train_samples , :]
6368
@@ -77,6 +82,14 @@ def schedule(epoch):
7782 optimizer = keras .optimizers .Adam (learning_rate = INIT_LR ),
7883 loss = 'sparse_categorical_crossentropy' ,
7984 metrics = ['accuracy' ])
85+ else :
86+ mirrored_strategy = tf .distribute .MirroredStrategy ()
87+ with mirrored_strategy .scope ():
88+ model = resnet .resnet56 (classes = num_classes )
89+ model .compile (
90+ optimizer = keras .optimizers .Adam (learning_rate = INIT_LR ),
91+ loss = 'sparse_categorical_crossentropy' ,
92+ metrics = ['accuracy' ])
8093
8194log_dir = "logs/fit/" + datetime .datetime .now ().strftime ("%Y%m%d-%H%M%S" )
8295tensorboard_callback = LRTensorBoard (
0 commit comments