types=torch.full((labels.size(0),),0,dtype=torch.long)# they are all known classes, so type is 0
logger.debug(f"Creating dataset for session 0 (pretraining). There are {len(samples)} samples, and {len(labels)} labels. There are {labels.unique().size(0)} different classes")
logger.debug(f"Classes in Pretraining Dataset: {labels.unique(sorted=True)}")
model.head.expand(args.dataset.novel_inc)# we are cheating here, we know the number of novel classes
assertmodel.head.fc.out_features==args.dataset.known+session*args.dataset.novel_inc,f"Head has {model.head.fc.out_features} features, expected {args.dataset.known+session*args.dataset.novel_inc}"
# freeze the weights for the existing classes. We are only training unknown samples (EG: 50 (known) + (2 (session) - 1) * 10 (novel_inc) = 60 classes have been seen in cl session 2)