1+ import tensorflow as tf
2+ from tensorflow import keras
3+ from tensorflow .keras import datasets , layers , optimizers , models
4+ from tensorflow .keras import regularizers
5+
6+
7+ class VGG16 (models .Model ):
8+
9+
10+ def __init__ (self , input_shape ):
11+ """
12+
13+ :param input_shape: [32, 32, 3]
14+ """
15+ super (VGG16 , self ).__init__ ()
16+
17+ weight_decay = 0.000
18+ self .num_classes = 10
19+
20+ model = models .Sequential ()
21+
22+ model .add (layers .Conv2D (64 , (3 , 3 ), padding = 'same' ,
23+ input_shape = input_shape , kernel_regularizer = regularizers .l2 (weight_decay )))
24+ model .add (layers .Activation ('relu' ))
25+ model .add (layers .BatchNormalization ())
26+ model .add (layers .Dropout (0.3 ))
27+
28+ model .add (layers .Conv2D (64 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
29+ model .add (layers .Activation ('relu' ))
30+ model .add (layers .BatchNormalization ())
31+
32+ model .add (layers .MaxPooling2D (pool_size = (2 , 2 )))
33+
34+ model .add (layers .Conv2D (128 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
35+ model .add (layers .Activation ('relu' ))
36+ model .add (layers .BatchNormalization ())
37+ model .add (layers .Dropout (0.4 ))
38+
39+ model .add (layers .Conv2D (128 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
40+ model .add (layers .Activation ('relu' ))
41+ model .add (layers .BatchNormalization ())
42+
43+ model .add (layers .MaxPooling2D (pool_size = (2 , 2 )))
44+
45+ model .add (layers .Conv2D (256 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
46+ model .add (layers .Activation ('relu' ))
47+ model .add (layers .BatchNormalization ())
48+ model .add (layers .Dropout (0.4 ))
49+
50+ model .add (layers .Conv2D (256 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
51+ model .add (layers .Activation ('relu' ))
52+ model .add (layers .BatchNormalization ())
53+ model .add (layers .Dropout (0.4 ))
54+
55+ model .add (layers .Conv2D (256 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
56+ model .add (layers .Activation ('relu' ))
57+ model .add (layers .BatchNormalization ())
58+
59+ model .add (layers .MaxPooling2D (pool_size = (2 , 2 )))
60+
61+
62+ model .add (layers .Conv2D (512 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
63+ model .add (layers .Activation ('relu' ))
64+ model .add (layers .BatchNormalization ())
65+ model .add (layers .Dropout (0.4 ))
66+
67+ model .add (layers .Conv2D (512 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
68+ model .add (layers .Activation ('relu' ))
69+ model .add (layers .BatchNormalization ())
70+ model .add (layers .Dropout (0.4 ))
71+
72+ model .add (layers .Conv2D (512 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
73+ model .add (layers .Activation ('relu' ))
74+ model .add (layers .BatchNormalization ())
75+
76+ model .add (layers .MaxPooling2D (pool_size = (2 , 2 )))
77+
78+
79+ model .add (layers .Conv2D (512 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
80+ model .add (layers .Activation ('relu' ))
81+ model .add (layers .BatchNormalization ())
82+ model .add (layers .Dropout (0.4 ))
83+
84+ model .add (layers .Conv2D (512 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
85+ model .add (layers .Activation ('relu' ))
86+ model .add (layers .BatchNormalization ())
87+ model .add (layers .Dropout (0.4 ))
88+
89+ model .add (layers .Conv2D (512 , (3 , 3 ), padding = 'same' ,kernel_regularizer = regularizers .l2 (weight_decay )))
90+ model .add (layers .Activation ('relu' ))
91+ model .add (layers .BatchNormalization ())
92+
93+ model .add (layers .MaxPooling2D (pool_size = (2 , 2 )))
94+ model .add (layers .Dropout (0.5 ))
95+
96+ model .add (layers .Flatten ())
97+ model .add (layers .Dense (512 ,kernel_regularizer = regularizers .l2 (weight_decay )))
98+ model .add (layers .Activation ('relu' ))
99+ model .add (layers .BatchNormalization ())
100+
101+ model .add (layers .Dropout (0.5 ))
102+ model .add (layers .Dense (self .num_classes ))
103+ model .add (layers .Activation ('softmax' ))
104+
105+
106+ self .model = model
107+
108+
109+ def call (self , x ):
110+
111+ x = self .model (x )
112+
113+ return x
0 commit comments