File size: 1,648 Bytes
15f0c5c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
{
  "data_root_dir": "/iliad/group/datasets/OXE_OCTO",
  "hf_token": ".hf_token",
  "image_aug": false,
  "is_resume": true,
  "pretrained_checkpoint": "runs/prism-qwen25-dinosiglip-224px+0_5b+mx-bridge+n1+b16+x7--bridge_vq_extra_tokenizer_lowlr/checkpoints/step-305000-epoch-18-loss=0.5682.pt",
  "resume_epoch": 18,
  "resume_step": 305000,
  "run_id": "prism-qwen25-dinosiglip-224px+0_5b+mx-bridge+n1+b16+x7--bridge_vq_extra_tokenizer_evenlowerlr",
  "run_id_note": "bridge_vq_extra_tokenizer_evenlowerlr",
  "run_root_dir": "runs",
  "save_interval": 2500,
  "seed": 7,
  "trackers": [
    "jsonl",
    "wandb"
  ],
  "vla": {
    "action_tokenizer": "bridge_vq_extra_action_tokenizer",
    "base_vlm": "prism-qwen25-extra-dinosiglip-224px+0_5b",
    "data_mix": "bridge_dataset",
    "enable_gradient_checkpointing": true,
    "enable_mixed_precision_training": true,
    "epochs": 1000,
    "expected_world_size": 8,
    "freeze_llm_backbone": false,
    "freeze_vision_backbone": false,
    "global_batch_size": 128,
    "image_sequence_len": 1,
    "learning_rate": 1e-06,
    "lr_scheduler_type": "constant",
    "max_grad_norm": 1.0,
    "max_steps": null,
    "per_device_batch_size": 16,
    "reduce_in_full_precision": true,
    "save_every_n_steps": 25000,
    "shuffle_buffer_size": 256000,
    "train_strategy": "fsdp-full-shard",
    "type": "prism-qwen25-dinosiglip-224px+0_5b+mx-bridge",
    "unfreeze_last_llm_layer": false,
    "use_wrist_image": false,
    "vla_id": "prism-qwen25-dinosiglip-224px+0_5b+mx-bridge",
    "warmup_ratio": 0.0,
    "weight_decay": 0.0
  },
  "wandb_entity": null,
  "wandb_project": "prismatic"
}