LORA TUNER# Classes# class models.lora_prototype_utils.tuners.lora_tuner.LoRATuner(args, device, seq_dataset, embed_dim, mlp_ratio)[source]# Bases: Module build_lora_config()[source]# compute_fisher_loss(fisher_dict, do_backward, do_loss_computation=False)[source]# create_AB(fin, fout, r1=None, r2=None, enable_op=True, transpose=False)[source]# ensemble(mode)[source]# fisher_loss_v1(fisher_dict)[source]# fisher_loss_v2(fisher_dict, do_backward, beta_iel)[source]# get_current_optimizing_parameters()[source]# get_current_optimizing_parameters_names()[source]# get_lora_config()[source]# get_lora_matrices(train=True, task_weights=None)[source]# get_lora_matrices_by_layer(layer_idx, train, task_weights=None)[source]# get_params_to_tune()[source]# get_weights()[source]# set_current_task(current_task)[source]# set_requires_grad_to_by_task(task_id, mode)[source]#