diff --git a/docs/source/self_supervised_models.rst b/docs/source/self_supervised_models.rst index f499e1cfd5..dd3e0885c5 100644 --- a/docs/source/self_supervised_models.rst +++ b/docs/source/self_supervised_models.rst @@ -165,19 +165,19 @@ CIFAR-10 baseline - Hardware - LR * - `Original `_ - - `82.00? `_ - - resnet (depth 18) + - `92.00? `_ + - resnet50 - LARS - 512 - 1000 - 1 V100 (32GB) - 1.0 * - Ours - - `86.75 `_ + - `85.68 `_ - `resnet50 `_ - `LARS `_ - 512 - - 698 (10 hr) + - 960 (12 hr) - 1 V100 (32GB) - 1e-6 @@ -187,7 +187,7 @@ CIFAR-10 pretrained model:: from pl_bolts.models.self_supervised import SimCLR - weight_path = 'https://pl-bolts-weights.s3.us-east-2.amazonaws.com/simclr/simclr-cifar10-v1-exp2_acc_867/epoch%3D698.ckpt' + weight_path = 'https://pl-bolts-weights.s3.us-east-2.amazonaws.com/simclr/simclr-cifar10-v1-exp12_87_52/epoch%3D960.ckpt' simclr = SimCLR.load_from_checkpoint(weight_path, strict=False) simclr.freeze() diff --git a/pl_bolts/models/self_supervised/simclr/simclr_finetuner.py b/pl_bolts/models/self_supervised/simclr/simclr_finetuner.py index 4e228ddf7c..03d3c248ae 100644 --- a/pl_bolts/models/self_supervised/simclr/simclr_finetuner.py +++ b/pl_bolts/models/self_supervised/simclr/simclr_finetuner.py @@ -44,7 +44,7 @@ def cli_main(): # pragma: no-cover dm.val_transforms = SimCLREvalDataTransform(h) # finetune - tuner = SSLFineTuner(backbone, in_features=2048 * 2 * 2, num_classes=dm.num_classes) + tuner = SSLFineTuner(backbone, in_features=2048 * 2 * 2, num_classes=dm.num_classes, hidden_dim=None) trainer = pl.Trainer.from_argparse_args(args, early_stop_callback=True) trainer.fit(tuner, dm)