!pip install comet_ml
import comet_ml
import getpass, os
os.environ["COMET_API_KEY"] = getpass.getpass("Paste your COMET API KEY: ")
Paste your COMET API KEY: ··········
experiment = comet_ml.Experiment(project_name="logging-curves")
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torchvision import datasets, transforms
from torch.optim.lr_scheduler import StepLR
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(3, 6, 5)
self.pool = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(6, 16, 5)
self.fc1 = nn.Linear(16 * 5 * 5, 120)
self.fc2 = nn.Linear(120, 84)
self.fc3 = nn.Linear(84, 10)
def forward(self, x):
x = self.pool(F.relu(self.conv1(x)))
x = self.pool(F.relu(self.conv2(x)))
x = x.view(-1, 16 * 5 * 5)
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
x = F.log_softmax(x, 1)
return x
from sklearn.metrics import precision_recall_curve, roc_curve
N_CLASSES = 10
def train(model, device, train_loader, optimizer, epoch):
model.train()
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
def test(model, device, test_loader, epoch):
model.eval()
test_loss = 0
correct = 0
class_probabilities = []
targets = []
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
output_proba = output.exp().squeeze(0).cpu().detach().numpy().tolist()
class_probabilities.extend(output_proba)
targets.extend(F.one_hot(torch.tensor(target), N_CLASSES).cpu().numpy().tolist())
test_loss += F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
return np.array(targets), np.array(class_probabilities)
def log_curves(y_true, y_pred, step):
for i in range(N_CLASSES):
precision, recall, _ = precision_recall_curve(y_true[:, i], y_pred[:, i])
fpr, tpr, _ = roc_curve(y_true[:, i], y_pred[:, i])
experiment.log_curve(f"pr-curve-class-{i}", recall, precision, step=step)
experiment.log_curve(f"roc-curve-class-{i}", fpr, tpr, step=step)
RANDOM_STATE = 42
torch.manual_seed(RANDOM_STATE)
device = torch.device("cuda")
train_kwargs = {'batch_size': 128}
test_kwargs = {'batch_size': 128}
cuda_kwargs = {'num_workers': 1, 'pin_memory': True, 'shuffle': True}
train_kwargs.update(cuda_kwargs)
test_kwargs.update(cuda_kwargs)
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.1307,), (0.3081,))
])
train_data = datasets.CIFAR10('../data', train=True, download=True,
transform=transform)
test_data = datasets.CIFAR10('../data', train=False,
transform=transform)
train_loader = torch.utils.data.DataLoader(train_data,**train_kwargs)
test_loader = torch.utils.data.DataLoader(test_data, **test_kwargs)
Files already downloaded and verified
EPOCHS = 10
model = Net().to(device)
optimizer = optim.Adadelta(model.parameters(), lr=1.0)
scheduler = StepLR(optimizer, step_size=1, gamma=0.1)
for epoch in range(1, EPOCHS + 1):
train(model, device, train_loader, optimizer, epoch)
targets, class_probabilities = test(model, device, test_loader, epoch)
log_curves(targets, class_probabilities, epoch)
scheduler.step()