FULL TUNER#

Classes#

class models.lora_prototype_utils.tuners.full_tuner.FullTuner(args, device, seq_dataset, embed_dim, mlp_ratio)[source]#

Bases: Module

build_lora_config()[source]#
compute_fisher_loss(fisher_dict, do_backward, do_loss_computation=False)[source]#
create_BA(fin, fout, transpose=False)[source]#
ensemble(mode)[source]#
fisher_loss_v1(fisher_dict)[source]#
fisher_loss_v2(fisher_dict, do_backward, beta_iel)[source]#
get_current_optimizing_parameters()[source]#
get_lora_config()[source]#
get_lora_matrices(train=True, task_weights=None)[source]#
get_lora_matrices_by_layer(layer_idx, train)[source]#
get_params_to_tune()[source]#
set_current_task(current_task)[source]#
set_requires_grad_to_by_task(task_id, mode)[source]#