wrkspace-backup-ttl / zero3_bf16_moe.json
agreeupon's picture
Add files using upload-large-folder tool
39049ac verified
{
"zero_optimization": {
"stage": 3,
"offload_param": {
"device": "cpu",
"pin_memory": true
},
"offload_optimizer": {
"device": "cpu"
},
"overlap_comm": true,
"contiguous_gradients": true,
"override_module_apply": false
},
"moe": {
"enabled": true,
"ep_size": 2, # each expert is sharded across 2 GPUs
"moe_experts": [256] # DeepSeek-V3 uses 256 routed experts per MoE layer :contentReference[oaicite:0]{index=0}
},
"bf16": {
"enabled": true,
"gradient_checkpointing": true,
"train_micro_batch_size_per_gpu": 1,
"steps_per_print": 50
}
}