Skip to content

Commit 773b90a

Browse files
committed
Cleaning up
1 parent 979ceb8 commit 773b90a

6 files changed

Lines changed: 336 additions & 260 deletions

File tree

code/autoencoder_model/scripts/config_sigc.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -82,26 +82,28 @@
8282
A_TRAIN_RATIO = 1
8383
C_TRAIN_RATIO = 1
8484
RAM_DECIMATE = True
85-
RETRAIN_CLASSIFIER = False
85+
RETRAIN_CLASSIFIER = True
8686
CLASS_TARGET_INDEX = 8
8787
ROT_MAX = 5
8888
SFT_H_MAX = 0.02
8989
SFT_V_MAX = 0.02
9090
ZOOM_MAX = 0.2
9191
BRIGHT_RANGE_L = 0.5
9292
BRIGHT_RANGE_H = 1.5
93+
KL_COEFF = 0
94+
ATTN_COEFF = 10
9395

9496
ped_actions = ['slow down', 'standing', 'walking', 'speed up', 'nod', 'unknown',
9597
'clear path', 'handwave', 'crossing', 'looking', 'no ped']
9698

97-
simple_ped_set = ['standing', 'crossing', 'no ped']
99+
simple_ped_set = ['crossing']
98100

99101
# -------------------------------------------------
100102
# Network configuration:
101103
print ("Loading network/training configuration...")
102104
print ("Config file: " + str(__name__))
103105

104-
BATCH_SIZE = 10
106+
BATCH_SIZE = 15
105107
NB_EPOCHS_CLASS = 30
106108

107109
OPTIM_C = Adam(lr=0.0000002, beta_1=0.5)
@@ -125,9 +127,9 @@
125127

126128
def schedule(epoch_idx):
127129
if (epoch_idx + 1) < lr_schedule[0]:
128-
return 0.00001
130+
return 0.000001
129131
elif (epoch_idx + 1) < lr_schedule[1]:
130-
return 0.000001 # lr_decay_ratio = 10
132+
return 0.0000001 # lr_decay_ratio = 10
131133
elif (epoch_idx + 1) < lr_schedule[2]:
132134
return 0.0000001
133-
return 0.0000001
135+
return 0.000001

code/autoencoder_model/scripts/custom_layers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from keras.layers import Layer
66
from keras import backend as K
77
K.set_image_dim_ordering('tf')
8-
from config_aa import KL_COEFF, ATTN_COEFF
8+
from config_sigc import KL_COEFF, ATTN_COEFF
99

1010
# Custom loss layer
1111
class AttnLossLayer(Layer):

0 commit comments

Comments
 (0)