Sadjad Alikhani commited on
Commit
e6b18cb
·
verified ·
1 Parent(s): 417e856

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "architectures": ["LWM"],
3
  "model_type": "custom",
4
- "hidden_size": 64, # Replace with your actual model's hidden size (e.g., D_MODEL)
5
- "num_attention_heads": 12, # Replace with your model's number of attention heads
6
- "num_hidden_layers": 12 # Replace with your model's number of layers
7
  }
 
1
  {
2
  "architectures": ["LWM"],
3
  "model_type": "custom",
4
+ "hidden_size": 64,
5
+ "num_attention_heads": 12,
6
+ "num_hidden_layers": 12
7
  }