Skip to content

Commit b35a984

Browse files
authored
Removed extra relu
Manual commit to change activation_fn to None by default. Thanks to mlopezantequera for pointing out
1 parent b8b53a2 commit b35a984

1 file changed

Lines changed: 2 additions & 1 deletion

File tree

xception.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,8 @@ def xception_arg_scope(weight_decay=0.00001,
162162
# Set weight_decay for weights in conv2d and separable_conv2d layers.
163163
with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
164164
weights_regularizer=slim.l2_regularizer(weight_decay),
165-
biases_initializer=None):
165+
biases_initializer=None,
166+
activation_fn=None):
166167

167168
# Set parameters for batch_norm. Note: Do not set activation function as it's preset to None already.
168169
with slim.arg_scope([slim.batch_norm],

0 commit comments

Comments
 (0)