Skip to content

Commit

Permalink
[breaking] deprecate callbacks=Callbacks() but use a list.
Browse files Browse the repository at this point in the history
  • Loading branch information
ppwwyyxx committed Jan 25, 2017
1 parent c003b1c commit 243e957
Show file tree
Hide file tree
Showing 24 changed files with 68 additions and 71 deletions.
6 changes: 3 additions & 3 deletions examples/A3C-Gym/train-atari.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,16 +207,16 @@ def get_config():
return TrainConfig(
dataflow=dataflow,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
ScheduledHyperParamSetter('learning_rate', [(80, 0.0003), (120, 0.0001)]),
ScheduledHyperParamSetter('entropy_beta', [(80, 0.005)]),
ScheduledHyperParamSetter('explore_factor',
[(80, 2), (100, 3), (120, 4), (140, 5)]),
master,
StartProcOrThread(master),
PeriodicCallback(Evaluator(EVAL_EPISODE, ['state'], ['logits']), 2),
]),
],
session_config=get_default_sess_config(0.5),
model=M,
step_per_epoch=STEP_PER_EPOCH,
Expand Down
6 changes: 3 additions & 3 deletions examples/CTC-TIMIT/train-timit.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,14 +96,14 @@ def get_config(ds_train, ds_test):
return TrainConfig(
dataflow=ds_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
StatMonitorParamSetter('learning_rate', 'error',
lambda x: x * 0.2, 0, 5),
HumanHyperParamSetter('learning_rate'),
PeriodicCallback(
InferenceRunner(ds_test, [ScalarStats('error')]), 2),
]),
],
model=Model(),
step_per_epoch=step_per_epoch,
max_epoch=70,
Expand Down
6 changes: 3 additions & 3 deletions examples/Char-RNN/char-rnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,10 +110,10 @@ def get_config():
return TrainConfig(
dataflow=ds,
optimizer=tf.train.AdamOptimizer(lr),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
ScheduledHyperParamSetter('learning_rate', [(25, 2e-4)])
]),
],
model=Model(),
step_per_epoch=step_per_epoch,
max_epoch=50,
Expand Down
6 changes: 3 additions & 3 deletions examples/DeepQNetwork/DQN.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,16 +177,16 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
ScheduledHyperParamSetter('learning_rate',
[(150, 4e-4), (250, 1e-4), (350, 5e-5)]),
RunOp(lambda: M.update_target_param()),
dataset_train,
PeriodicCallback(Evaluator(EVAL_EPISODE, ['state'], ['Qvalue']), 3),
# HumanHyperParamSetter('learning_rate', 'hyper.txt'),
# HumanHyperParamSetter(ObjAttrParam(dataset_train, 'exploration'), 'hyper.txt'),
]),
],
# save memory for multiprocess evaluator
session_config=get_default_sess_config(0.6),
model=M,
Expand Down
6 changes: 3 additions & 3 deletions examples/DoReFa-Net/alexnet-dorefa.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,16 +236,16 @@ def get_config():
return TrainConfig(
dataflow=data_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-5),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
# HumanHyperParamSetter('learning_rate'),
ScheduledHyperParamSetter(
'learning_rate', [(56, 2e-5), (64, 4e-6)]),
InferenceRunner(data_test,
[ScalarStats('cost'),
ClassificationError('wrong-top1', 'val-error-top1'),
ClassificationError('wrong-top5', 'val-error-top5')])
]),
],
model=Model(),
step_per_epoch=10000,
max_epoch=100,
Expand Down
5 changes: 2 additions & 3 deletions examples/DoReFa-Net/svhn-digit-dorefa.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,12 +163,11 @@ def get_config():
return TrainConfig(
dataflow=data_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-5),
callbacks=Callbacks([
StatPrinter(),
callbacks=[
ModelSaver(),
InferenceRunner(data_test,
[ScalarStats('cost'), ClassificationError()])
]),
],
model=Model(),
step_per_epoch=step_per_epoch,
max_epoch=200,
Expand Down
4 changes: 1 addition & 3 deletions examples/GAN/DCGAN-CelebA.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,9 +109,7 @@ def get_config():
return TrainConfig(
dataflow=dataset,
optimizer=tf.train.AdamOptimizer(lr, beta1=0.5, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
]),
callbacks=[ModelSaver()],
session_config=get_default_sess_config(0.5),
model=Model(),
step_per_epoch=300,
Expand Down
1 change: 0 additions & 1 deletion examples/GAN/GAN.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,6 @@ def run_step(self):


class RandomZData(DataFlow):

def __init__(self, shape):
super(RandomZData, self).__init__()
self.shape = shape
Expand Down
6 changes: 3 additions & 3 deletions examples/GAN/Image2Image.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,10 +168,10 @@ def get_config():
return TrainConfig(
dataflow=dataset,
optimizer=tf.train.AdamOptimizer(lr, beta1=0.5, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), PeriodicCallback(ModelSaver(), 3),
callbacks=[
PeriodicCallback(ModelSaver(), 3),
ScheduledHyperParamSetter('learning_rate', [(200, 1e-4)])
]),
],
model=Model(),
step_per_epoch=dataset.size(),
max_epoch=300,
Expand Down
4 changes: 1 addition & 3 deletions examples/GAN/InfoGAN-mnist.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,9 +158,7 @@ def get_config():
return TrainConfig(
dataflow=dataset,
optimizer=tf.train.AdamOptimizer(lr, beta1=0.5, epsilon=1e-6),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
]),
callbacks=[ModelSaver()],
session_config=get_default_sess_config(0.5),
model=Model(),
step_per_epoch=500,
Expand Down
6 changes: 3 additions & 3 deletions examples/HED/hed.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,13 +173,13 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
ScheduledHyperParamSetter('learning_rate', [(30, 6e-6), (45, 1e-6), (60, 8e-7)]),
HumanHyperParamSetter('learning_rate'),
InferenceRunner(dataset_val,
BinaryClassificationStats('prediction', 'edgemap4d'))
]),
],
model=Model(),
step_per_epoch=step_per_epoch,
max_epoch=100,
Expand Down
7 changes: 3 additions & 4 deletions examples/Inception/inception-bn.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,17 +160,16 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.MomentumOptimizer(lr, 0.9),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
InferenceRunner(dataset_val, [
ClassificationError('wrong-top1', 'val-top1-error'),
ClassificationError('wrong-top5', 'val-top5-error')]),
# HumanHyperParamSetter('learning_rate', 'hyper-googlenet.txt')
ScheduledHyperParamSetter('learning_rate',
[(8, 0.03), (14, 0.02), (17, 5e-3),
(19, 3e-3), (24, 1e-3), (26, 2e-4),
(30, 5e-5)])
]),
],
session_config=get_default_sess_config(0.99),
model=Model(),
step_per_epoch=step_per_epoch,
Expand Down
6 changes: 3 additions & 3 deletions examples/Inception/inceptionv3.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,8 +268,8 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
InferenceRunner(dataset_val, [
ClassificationError('wrong-top1', 'val-error-top1'),
ClassificationError('wrong-top5', 'val-error-top5')]),
Expand All @@ -278,7 +278,7 @@ def get_config():
(17, 0.003), (22, 1e-3), (36, 2e-4),
(41, 8e-5), (48, 1e-5), (53, 2e-6)]),
HumanHyperParamSetter('learning_rate')
]),
],
session_config=get_default_sess_config(0.9),
model=Model(),
step_per_epoch=5000,
Expand Down
6 changes: 3 additions & 3 deletions examples/PennTreebank/PTB-LSTM.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,8 +126,8 @@ def get_config():
data=train_data,
model=M,
optimizer=tf.train.GradientDescentOptimizer(lr),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
HyperParamSetterWithFunc(
'learning_rate',
lambda e, x: x * 0.80 if e > 6 else x),
Expand All @@ -139,7 +139,7 @@ def get_config():
'validation_perplexity',
np.exp(self.trainer.stat_holder.get_stat_now('validation_cost') / SEQ_LEN))),
RunOp(lambda: M.reset_lstm_state()),
]),
],
max_epoch=70,
)

Expand Down
6 changes: 3 additions & 3 deletions examples/ResNet/cifar10-resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,13 +141,13 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.MomentumOptimizer(lr, 0.9),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
InferenceRunner(dataset_test,
[ScalarStats('cost'), ClassificationError()]),
ScheduledHyperParamSetter('learning_rate',
[(1, 0.1), (82, 0.01), (123, 0.001), (300, 0.0002)])
]),
],
model=Model(n=NUM_UNITS),
step_per_epoch=step_per_epoch,
max_epoch=400,
Expand Down
6 changes: 3 additions & 3 deletions examples/ResNet/imagenet-resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,15 +189,15 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.MomentumOptimizer(lr, 0.9, use_nesterov=True),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
InferenceRunner(dataset_val, [
ClassificationError('wrong-top1', 'val-error-top1'),
ClassificationError('wrong-top5', 'val-error-top5')]),
ScheduledHyperParamSetter('learning_rate',
[(30, 1e-2), (60, 1e-3), (85, 1e-4), (95, 1e-5)]),
HumanHyperParamSetter('learning_rate'),
]),
],
model=Model(),
step_per_epoch=5000,
max_epoch=110,
Expand Down
5 changes: 2 additions & 3 deletions examples/ResNet/svhn-resnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,14 +70,13 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.MomentumOptimizer(lr, 0.9),
callbacks=Callbacks([
StatPrinter(),
callbacks=[
ModelSaver(),
InferenceRunner(dataset_test,
[ScalarStats('cost'), ClassificationError()]),
ScheduledHyperParamSetter('learning_rate',
[(1, 0.1), (20, 0.01), (28, 0.001), (50, 0.0001)])
]),
],
model=Model(n=18),
step_per_epoch=step_per_epoch,
max_epoch=500,
Expand Down
5 changes: 2 additions & 3 deletions examples/SimilarityLearning/mnist-embeddings.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,11 +141,10 @@ def get_config(model):
dataflow=dataset,
model=model(),
optimizer=tf.train.GradientDescentOptimizer(lr),
callbacks=Callbacks([
StatPrinter(),
callbacks=[
ModelSaver(),
ScheduledHyperParamSetter('learning_rate', [(10, 1e-5), (20, 1e-6)])
]),
],
step_per_epoch=step_per_epoch,
max_epoch=20,
)
Expand Down
6 changes: 3 additions & 3 deletions examples/SpatialTransformer/mnist-addition.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,12 +155,12 @@ def get_config():
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
InferenceRunner(dataset_test,
[ScalarStats('cost'), ClassificationError()]),
ScheduledHyperParamSetter('learning_rate', [(200, 1e-4)])
]),
],
session_config=get_default_sess_config(0.5),
model=Model(),
step_per_epoch=step_per_epoch,
Expand Down
6 changes: 3 additions & 3 deletions examples/cifar-convnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,12 +122,12 @@ def lr_func(lr):
return TrainConfig(
dataflow=dataset_train,
optimizer=tf.train.AdamOptimizer(lr, epsilon=1e-3),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
InferenceRunner(dataset_test, ClassificationError()),
StatMonitorParamSetter('learning_rate', 'val_error', lr_func,
threshold=0.001, last_k=10),
]),
],
session_config=sess_config,
model=Model(cifar_classnum),
step_per_epoch=step_per_epoch,
Expand Down
5 changes: 2 additions & 3 deletions examples/mnist-convnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,14 +140,13 @@ def get_config():
return TrainConfig(
dataflow=dataset_train, # the DataFlow instance for training
optimizer=tf.train.AdamOptimizer(lr),
callbacks=Callbacks([
StatPrinter(), # print statistics in terminal after every epoch
callbacks=[
ModelSaver(), # save the model after every epoch
InferenceRunner( # run inference(for validation) after every epoch
dataset_test, # the DataFlow instance used for validation
# Calculate both the cost and the error for this DataFlow
[ScalarStats('cross_entropy_loss'), ClassificationError('incorrect')]),
]),
],
model=Model(),
step_per_epoch=step_per_epoch,
max_epoch=100,
Expand Down
6 changes: 3 additions & 3 deletions examples/svhn-digit-convnet.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,11 +101,11 @@ def get_config():
return TrainConfig(
dataflow=data_train,
optimizer=tf.train.AdamOptimizer(lr),
callbacks=Callbacks([
StatPrinter(), ModelSaver(),
callbacks=[
ModelSaver(),
InferenceRunner(data_test,
[ScalarStats('cost'), ClassificationError()])
]),
],
model=Model(),
step_per_epoch=step_per_epoch,
max_epoch=350,
Expand Down
13 changes: 10 additions & 3 deletions tensorpack/callbacks/group.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,13 +59,20 @@ def __init__(self, cbs):
for cb in cbs:
assert isinstance(cb, Callback), cb.__class__
# move "StatPrinter" to the last
for cb in cbs:
# TODO don't need to manually move in the future.
found = False
for idx, cb in enumerate(cbs):
if isinstance(cb, StatPrinter):
if found:
raise ValueError("Callbacks cannot contain two StatPrinter!")
sp = cb
cbs.remove(sp)
cbs.append(sp)
break
else:
if idx != len(cbs) - 1:
logger.warn("StatPrinter should appear as the last element of callbacks! "
"This is now fixed automatically, but may not work in the future.")
found = True
if not found:
raise ValueError("Callbacks must contain StatPrinter for stat and writer to work properly!")

self.cbs = cbs
Expand Down
6 changes: 3 additions & 3 deletions tensorpack/train/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,9 +79,9 @@ def assert_type(v, tp):

if isinstance(callbacks, Callbacks):
# keep quiet now because I haven't determined the final API yet.
# logger.warn("[Deprecated] API of TrainConfig(callbacks=) has changed!")
# logger.warn("[Deprecated] Please change the option 'callbacks=' to a list of "
# "callbacks without StatPrinter().")
logger.warn("[Deprecated] API of TrainConfig(callbacks=) has changed!")
logger.warn("[Deprecated] Please change the argument 'callbacks=' to a *list* of "
"callbacks without StatPrinter().")
callbacks = callbacks.cbs[:-1] # the last one is StatPrinter()
assert_type(callbacks, list)
if extra_callbacks is None:
Expand Down

0 comments on commit 243e957

Please sign in to comment.