genbio-model / config.json
DianLiI's picture
Upload model
fade19c verified
{
"architectures": [
"GenBioModel"
],
"auto_map": {
"AutoConfig": "modeling_genbio.GenBioConfig",
"AutoModel": "modeling_genbio.GenBioModel"
},
"hparams": {
"_class_path": "genbio_finetune.tasks.SequenceClassification",
"_instantiator": "lightning.pytorch.cli.instantiate_module",
"adapter": {
"class_path": "genbio_finetune.models.MLPAdapter",
"dict_kwargs": {
"hidden_sizes": [
64
]
},
"init_args": {
"bias": true,
"dropout": 0.0
}
},
"backbone": {
"class_path": "genbio_finetune.models._proteinfm_debug",
"init_args": {
"lora_alpha": 32,
"lora_dropout": 0.1,
"lora_modules_to_save": null,
"lora_r": 16,
"lora_target_modules": [
"query",
"value",
"key",
"dense",
"router"
],
"lora_use_rslora": false,
"max_length": null,
"model_init_args": {
"add_pooling_layer": false
},
"save_peft_only": true,
"use_peft": false
}
},
"batch_size": 2,
"lr_scheduler": {
"class_path": "genbio_finetune.lr_schedulers.CosineWithWarmup",
"init_args": {
"last_epoch": -1,
"num_warmup_steps": null,
"verbose": "deprecated",
"warmup_ratio": 0.01
}
},
"n_classes": 2,
"optimizer": {
"class_path": "torch.optim.AdamW",
"init_args": {
"amsgrad": false,
"betas": [
0.9,
0.95
],
"capturable": false,
"differentiable": false,
"eps": 1e-08,
"foreach": null,
"fused": null,
"lr": 0.001,
"maximize": false,
"weight_decay": 0.01
}
},
"reset_optimizer_states": false,
"strict_loading": true,
"use_legacy_adapter": false
},
"model_type": "genbio",
"torch_dtype": "float32",
"transformers_version": "4.38.0.dev0"
}