LORA PROMPT#

Classes#

class models.lora_prototype_utils.lora_prompt.Model(args, seq_dataset, backbone)[source]#

Bases: Module

build_optimizer_args(lr_params, lr_classifier=None, wd_params=0, wd_classifier=0)[source]#
compute_classifier_reg_loss(cls_ref, do_backward)[source]#
compute_fisher_cls_generative_hooks(_, param_resolution_dict_cls, generative_dataloader, debug_mode=False)[source]#
compute_reg_loss(do_backward, do_loss_computation)[source]#
ensemble(mode=True)[source]#
forward(x, train=True, return_all=False, return_features=False, use_lora=True, task_weights=None)[source]#
get_params_of_classifier()[source]#
get_params_to_tune()[source]#
init_tuner(args, seq_dataset)[source]#
set_current_task(task_id)[source]#
train(mode=True)[source]#
update_fisher(dataset, generative_dataloader=None, debug_mode=False)[source]#

Functions#

models.lora_prototype_utils.lora_prompt.get_fisher_caller(args)[source]#