Source code for datasets.seq_cifar10

# Copyright 2022-present, Lorenzo Bonicelli, Pietro Buzzega, Matteo Boschini, Angelo Porrello, Simone Calderara.
# All rights reserved.
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.

from argparse import Namespace
import logging
from typing import Tuple

import torch
import torch.nn.functional as F
import torchvision.transforms as transforms
from PIL import Image
from torchvision.datasets import CIFAR10

from utils.conf import base_path
from datasets.transforms.denormalization import DeNormalize
from datasets.utils.continual_dataset import (ContinualDataset, fix_class_names_order,
                                              store_masked_loaders)
from datasets.utils import set_default_from_args


[docs] class TCIFAR10(CIFAR10): """Workaround to avoid printing the already downloaded messages.""" def __init__(self, root, train=True, transform=None, target_transform=None, download=False) -> None: self.root = root super(TCIFAR10, self).__init__(root, train, transform, target_transform, download=not self._check_integrity())
[docs] class MyCIFAR10(CIFAR10): """ Overrides the CIFAR10 dataset to change the getitem function. """ def __init__(self, root, train=True, transform=None, target_transform=None, download=False) -> None: self.not_aug_transform = transforms.Compose([transforms.ToTensor()]) self.root = root super(MyCIFAR10, self).__init__(root, train, transform, target_transform, download=not self._check_integrity()) def __getitem__(self, index: int) -> Tuple[Image.Image, int, Image.Image]: """ Gets the requested element from the dataset. Args: index: index of the element to be returned Returns: tuple: (image, target) where target is index of the target class. """ img, target = self.data[index], self.targets[index] # to return a PIL Image img = Image.fromarray(img, mode='RGB') original_img = img.copy() not_aug_img = self.not_aug_transform(original_img) if self.transform is not None: img = self.transform(img) if self.target_transform is not None: target = self.target_transform(target) if hasattr(self, 'logits'): return img, target, not_aug_img, self.logits[index] return img, target, not_aug_img
[docs] class SequentialCIFAR10(ContinualDataset): """Sequential CIFAR10 Dataset. Args: NAME (str): name of the dataset. SETTING (str): setting of the dataset. N_CLASSES_PER_TASK (int): number of classes per task. N_TASKS (int): number of tasks. N_CLASSES (int): number of classes. SIZE (tuple): size of the images. MEAN (tuple): mean of the dataset. STD (tuple): standard deviation of the dataset. TRANSFORM (torchvision.transforms): transformations to apply to the dataset. """ NAME = 'seq-cifar10' SETTING = 'class-il' N_CLASSES_PER_TASK = 2 N_TASKS = 5 N_CLASSES = N_CLASSES_PER_TASK * N_TASKS SIZE = (32, 32) MEAN, STD = (0.4914, 0.4822, 0.4465), (0.2470, 0.2435, 0.2615) TRANSFORM = transforms.Compose( [transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(MEAN, STD)]) TEST_TRANSFORM = transforms.Compose([transforms.ToTensor(), transforms.Normalize(MEAN, STD)]) def __init__(self, args, transform_type: str = 'weak'): super().__init__(args) assert transform_type in ['weak', 'strong'], "Transform type must be either 'weak' or 'strong'." if transform_type == 'strong': logging.info("Using strong augmentation for CIFAR10") self.TRANSFORM = transforms.Compose( [transforms.RandomCrop(32, padding=4), transforms.RandomHorizontalFlip(), transforms.ColorJitter(brightness=0.2, contrast=0.2, saturation=0.2, hue=0.1), transforms.ToTensor(), transforms.Normalize(SequentialCIFAR10.MEAN, SequentialCIFAR10.STD)])
[docs] def get_data_loaders(self) -> Tuple[torch.utils.data.DataLoader, torch.utils.data.DataLoader]: """Class method that returns the train and test loaders.""" transform = self.TRANSFORM train_dataset = MyCIFAR10(base_path() + 'CIFAR10', train=True, download=True, transform=transform) test_dataset = TCIFAR10(base_path() + 'CIFAR10', train=False, download=True, transform=self.TEST_TRANSFORM) train, test = store_masked_loaders(train_dataset, test_dataset, self) return train, test
[docs] @staticmethod def get_transform(): transform = transforms.Compose( [transforms.ToPILImage(), SequentialCIFAR10.TRANSFORM]) return transform
[docs] @set_default_from_args("backbone") def get_backbone(): return "resnet18"
[docs] @staticmethod def get_loss(): return F.cross_entropy
[docs] @staticmethod def get_normalization_transform(): transform = transforms.Normalize(SequentialCIFAR10.MEAN, SequentialCIFAR10.STD) return transform
[docs] @staticmethod def get_denormalization_transform(): transform = DeNormalize(SequentialCIFAR10.MEAN, SequentialCIFAR10.STD) return transform
[docs] @set_default_from_args('n_epochs') def get_epochs(self): return 50
[docs] @set_default_from_args('batch_size') def get_batch_size(self): return 32
[docs] def get_class_names(self): if self.class_names is not None: return self.class_names classes = CIFAR10(base_path() + 'CIFAR10', train=True, download=True).classes classes = fix_class_names_order(classes, self.args) self.class_names = classes return self.class_names