Skip to main content

Saforem2's workspace

MODEL_SIZE: GPT33B, machine: ThetaGPU, world_size: 32, env.SP_TYPE: megatron, micro_batch_size: 1, seq_length: 300000, env.GAS: 1, global_batch_size: 1, zero_stage: 1, env.MPSIZE: 32, env.PPSIZE: 1, env.SPSIZE: 1, use_flash_attn: trueMODEL_SIZE: GPT33B, machine: ThetaGPU, world_size: 32, env.SP_TYPE: megatron, micro_batch_size: 1, seq_length: 288000, env.GAS: 1, global_batch_size: 1, zero_stage: 1, env.MPSIZE: 32, env.PPSIZE: 1, env.SPSIZE: 1, use_flash_attn: trueMODEL_SIZE: GPT33B, machine: ThetaGPU, world_size: 32, env.SP_TYPE: megatron, micro_batch_size: 1, seq_length: 200000, env.GAS: 1, global_batch_size: 1, zero_stage: 1, env.MPSIZE: 32, env.PPSIZE: 1, env.SPSIZE: 1, use_flash_attn: trueMODEL_SIZE: GPT33B, machine: ThetaGPU, world_size: 32, env.SP_TYPE: megatron, micro_batch_size: 1, seq_length: 192000, env.GAS: 1, global_batch_size: 1, zero_stage: 1, env.MPSIZE: 32, env.PPSIZE: 1, env.SPSIZE: 1, use_flash_attn: true0.0000.0010.0020.0030.0040.0050.0060.0070.0080.0090.010