-
Notifications
You must be signed in to change notification settings - Fork 164
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
e36661c
commit c38be08
Showing
6 changed files
with
149 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,21 @@ | ||
# train options | ||
batch_size: 256 | ||
workers: 16 | ||
start_epoch: 0 | ||
epochs: 40 | ||
|
||
# model options | ||
resnet: "resnet18" | ||
normalize: True | ||
temperature: 0.5 | ||
n_out: 64 | ||
resnet: "resnet18" | ||
|
||
# loss options | ||
temperature: 0.5 | ||
|
||
# reload options | ||
model_path: "logs/182" # set to most directory containing `checkpoint_##.tar` | ||
model_num: 40 # set to checkpoint number | ||
|
||
# logistic regression options | ||
logistic_batch_size: 256 | ||
logistic_epochs: 100 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
import torch.nn as nn | ||
|
||
class LogisticRegression(nn.Module): | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,113 @@ | ||
import torch | ||
import torchvision | ||
import argparse | ||
|
||
from experiment import ex | ||
from model import load_model | ||
from utils import post_config_hook | ||
|
||
|
||
def train(args, loader, simclr_model, model, criterion, optimizer): | ||
loss_epoch = 0 | ||
accuracy_epoch = 0 | ||
for step, (x, y) in enumerate(loader): | ||
optimizer.zero_grad() | ||
|
||
x = x.to(args.device) | ||
y = y.to(args.device) | ||
|
||
# get encoding | ||
with torch.no_grad(): | ||
h, z = simclr_model(x) | ||
# h = 512 | ||
# z = 64 | ||
|
||
output = model(h) | ||
loss = criterion(output, y) | ||
|
||
predicted = output.argmax(1) | ||
acc = (predicted == y).sum().item() / y.size(0) | ||
accuracy_epoch += acc | ||
|
||
loss.backward() | ||
optimizer.step() | ||
|
||
loss_epoch += loss.item() | ||
|
||
return loss_epoch, accuracy_epoch | ||
|
||
def test(args, loader, simclr_model, model, criterion, optimizer): | ||
loss_epoch = 0 | ||
accuracy_epoch = 0 | ||
model.eval() | ||
for step, (x, y) in enumerate(loader): | ||
model.zero_grad() | ||
|
||
x = x.to(args.device) | ||
y = y.to(args.device) | ||
|
||
# get encoding | ||
with torch.no_grad(): | ||
h, z = simclr_model(x) | ||
# h = 512 | ||
# z = 64 | ||
|
||
output = model(h) | ||
loss = criterion(output, y) | ||
|
||
predicted = output.argmax(1) | ||
acc = (predicted == y).sum().item() / y.size(0) | ||
accuracy_epoch += acc | ||
|
||
loss_epoch += loss.item() | ||
|
||
return loss_epoch, accuracy_epoch | ||
|
||
@ex.automain | ||
def main(_run, _log): | ||
args = argparse.Namespace(**_run.config) | ||
args = post_config_hook(args, _run) | ||
|
||
args.device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") | ||
|
||
root = "./datasets" | ||
simclr_model = load_model(args, reload_model=True) | ||
simclr_model = simclr_model.to(args.device) | ||
simclr_model.eval() | ||
|
||
|
||
## Logistic Regression | ||
model = torch.nn.Sequential(torch.nn.Linear(simclr_model.n_features, 10)).to(args.device) | ||
|
||
optimizer = torch.optim.Adam(model.parameters(), lr=3e-4) | ||
criterion = torch.nn.CrossEntropyLoss() | ||
|
||
train_dataset = torchvision.datasets.STL10( | ||
root, split="train", download=True, transform=torchvision.transforms.ToTensor() | ||
) | ||
|
||
test_dataset = torchvision.datasets.STL10( | ||
root, split="test", download=True, transform=torchvision.transforms.ToTensor() | ||
) | ||
|
||
train_loader = torch.utils.data.DataLoader( | ||
train_dataset, | ||
batch_size=args.logistic_batch_size, | ||
drop_last=True, | ||
num_workers=args.workers, | ||
) | ||
|
||
test_loader = torch.utils.data.DataLoader( | ||
test_dataset, | ||
batch_size=args.logistic_batch_size, | ||
drop_last=True, | ||
num_workers=args.workers, | ||
) | ||
|
||
for epoch in range(args.logistic_epochs): | ||
loss_epoch, accuracy_epoch = train(args, train_loader, simclr_model, model, criterion, optimizer) | ||
print(f"Epoch [{epoch}/{args.logistic_epochs}]\t Loss: {loss_epoch / len(train_loader)}\t Accuracy: {accuracy_epoch / len(train_loader)}") | ||
|
||
# final testing | ||
loss_epoch, accuracy_epoch = test(args, test_loader, simclr_model, model, criterion, optimizer) | ||
print(f"[FINAL]\t Loss: {loss_epoch / len(test_loader)}\t Accuracy: {accuracy_epoch / len(test_loader)}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters