File size: 1,295 Bytes
92d3638 9ee5338 92d3638 9ee5338 92d3638 9ee5338 92d3638 9ee5338 92d3638 9ee5338 92d3638 9ee5338 92d3638 9ee5338 92d3638 9ee5338 92d3638 e9cdb01 92d3638 9ee5338 92d3638 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 |
{
"architectures": [
"FIMMJP"
],
"auto_map": {
"AutoConfig": "mjp.FIMMJPConfig",
"AutoModel": "mjp.FIMMJP"
},
"initial_distribution_decoder": {
"dropout": 0,
"hidden_act": {
"name": "torch.nn.SELU"
},
"hidden_layers": [
128,
128
],
"in_features": 2049,
"initialization_scheme": "lecun_normal",
"name": "fim.models.blocks.base.MLP"
},
"intensity_matrix_decoder": {
"dropout": 0,
"hidden_act": {
"name": "torch.nn.SELU"
},
"hidden_layers": [
128,
128
],
"in_features": 2049,
"initialization_scheme": "lecun_normal",
"name": "fim.models.blocks.base.MLP"
},
"model_type": "fimmjp",
"n_states": 6,
"path_attention": {
"embed_dim": 512,
"kv_dim": 128,
"n_heads": 1,
"n_queries": 16,
"name": "fim.models.blocks.MultiHeadLearnableQueryAttention"
},
"pos_encodings": {
"name": "fim.models.blocks.positional_encodings.DeltaTimeEncoding"
},
"torch_dtype": "float32",
"transformers_version": "4.46.0",
"ts_encoder": {
"name": "fim.models.blocks.base.RNNEncoder",
"rnn": {
"batch_first": true,
"bidirectional": true,
"hidden_size": 256,
"name": "torch.nn.LSTM"
}
},
"use_adjacency_matrix": false
}
|