SimCLR weights (ResNet18, 256 batch size, 100 epochs)
config.yaml:
# train options
seed: 42 # sacred handles automatic seeding when passed in the config
batch_size: 256
workers: 16
start_epoch: 0
epochs: 100
dataset: "STL10" # STL10
# model options
resnet: "resnet18"
normalize: True
projection_dim: 64 # "[...] to project the representation to a 128-dimensional latent space"
# loss options
optimizer: "Adam" # or LARS (experimental)
temperature: 0.5 # see appendix B.7.: Optimal temperature under different batch sizes
# reload options
model_path: "logs/0" # set to the directory containing `checkpoint_##.tar`
model_num: 40 # set to checkpoint number
# mixed-precision training
fp16: False
fp16_opt_level: "O2"
# logistic regression options
logistic_batch_size: 256
logistic_epochs: 100