From b354436c3af7c6a2a9258603b64be797df7072e9 Mon Sep 17 00:00:00 2001 From: Alex Fomenko Date: Mon, 3 Jun 2019 13:52:56 +0300 Subject: [PATCH] Fixed padding in the first convolution layer As stated in the comment, conv1 layer should have 'VALID' padding, but in the code (line 48) it had 'SAME' padding. That was confusing, and leaded to (?, 28, 28, 256) output shape of the conv2 layer, instead of (?, 27, 27, 256), as described in the original paper. --- alexnet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/alexnet.py b/alexnet.py index bfdaf49..b01f8a0 100644 --- a/alexnet.py +++ b/alexnet.py @@ -45,7 +45,7 @@ def AlexNet(features, feature_extract=False): s_w = 4 conv1W = tf.Variable(net_data["conv1"][0]) conv1b = tf.Variable(net_data["conv1"][1]) - conv1_in = conv(features, conv1W, conv1b, k_h, k_w, c_o, s_h, s_w, padding="SAME", group=1) + conv1_in = conv(features, conv1W, conv1b, k_h, k_w, c_o, s_h, s_w, padding="VALID", group=1) conv1 = tf.nn.relu(conv1_in) # lrn1