File size: 1,391 Bytes
d2522ae
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
{
  "architectures": [
    "GenBioModel"
  ],
  "auto_map": {
    "AutoConfig": "modeling_genbio.GenBioConfig",
    "AutoModel": "modeling_genbio.GenBioModel"
  },
  "hparams": {
    "_class_path": "genbio_finetune.tasks.SequenceClassification",
    "_instantiator": "lightning.pytorch.cli.instantiate_module",
    "adapter": "genbio_finetune.models.LinearCLSAdapter",
    "backbone": {
      "class_path": "genbio_finetune.models.dummy",
      "init_args": {
        "config_overwrites": null,
        "from_scratch": false,
        "lora_alpha": 32,
        "lora_dropout": 0.1,
        "lora_r": 16,
        "max_length": null,
        "model_init_args": null,
        "save_peft_only": true,
        "use_peft": false
      }
    },
    "batch_size": 128,
    "lr_scheduler": null,
    "n_classes": 2,
    "optimizer": {
      "class_path": "torch.optim.AdamW",
      "init_args": {
        "amsgrad": false,
        "betas": [
          0.9,
          0.999
        ],
        "capturable": false,
        "differentiable": false,
        "eps": 1e-08,
        "foreach": null,
        "fused": null,
        "lr": 0.001,
        "maximize": false,
        "weight_decay": 0.01
      }
    },
    "reset_optimizer_states": false,
    "strict_loading": true,
    "use_legacy_adapter": false
  },
  "model_type": "genbio",
  "torch_dtype": "float32",
  "transformers_version": "4.38.0"
}