Kastan's group workspace
Group: Aug-05__14:16
Name
32 visualized
State
Notes
User
Tags
Created
Runtime
Sweep
BATCH_SIZE
LEARNING_RATE
LOG_PATH
NUM_EPOCHS
SEQ_LENGTH
TOTAL_BATCH_SIZE
VOCAB_SIZE
WARMUP_EPOCHS
WEIGHT_DECAY
clip_grad_norm
conda_env_name
data_dir
fp16.mode
gpt2_8B
gpt2_large
gpt2_medium
gpt2_xl
gradient_accumulation
model.checkpoint
model.decoder_dtype
model.embed_dtype
model.head_dtype
model.layernorm_dtype
model.max_position_embeddings
model.vocab_size
model_dtypes.decoder_dtype
model_dtypes.embed_dtype
model_dtypes.head_dtype
model_dtypes.layernorm_dtype
num_gpus_per_node
optimizer.lr
optimizer.weight_decay
parallel.pipeline
quant_gpt2_8B
quant_gpt2_micro
quant_gpt2_small
quant_gpt2_xl
total_gpus
MICRO_BATCH_SIZE
NUM_MICRO_BATCHES
PIPELINE_SIZE
TENSOR_PARALLEL_MODE
TENSOR_PARALLEL_SIZE
loss.type
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 37s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
28m
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 34s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 53s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 44s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 58s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 51s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 45s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 44s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
28m 1s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 51s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 37s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 35s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 53s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 53s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 59s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 59s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 52s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 52s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__14:16
BATCH_SIZE1280
NUM_EPOCHS=60
NUM_MICRO_BATCHES=8
SLURM=513930
TP=16
WORLD_SIZE=32
27m 55s
-
1280
0.00015
./gpt2_2.5d_tp16_bs1280_lr0.00015_accum1_clip_grad1.0/
60
1024
1280
50304
21
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
-
-
-
-
-
1
false
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
1024
50304
torch.bfloat16
torch.bfloat16
torch.bfloat16
torch.bfloat16
4
0.00015
0.01
2
titans.model.quant_gpt.quant_gpt.quant_gpt2_8B
-
-
-
32
-
8
-
2.5d
16
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
1-20
of 32