Kastan's group workspace
Group: Aug-05__13:58
Name
16 visualized
State
Notes
User
Tags
Created
Runtime
Sweep
BATCH_SIZE
LEARNING_RATE
LOG_PATH
NUM_EPOCHS
SEQ_LENGTH
TOTAL_BATCH_SIZE
VOCAB_SIZE
WARMUP_EPOCHS
WEIGHT_DECAY
clip_grad_norm
conda_env_name
data_dir
fp16.mode
gpt2_8B
gpt2_large
gpt2_medium
gpt2_xl
gradient_accumulation
model.checkpoint
model.decoder_dtype
model.embed_dtype
model.head_dtype
model.layernorm_dtype
model.max_position_embeddings
model.vocab_size
model_dtypes.decoder_dtype
model_dtypes.embed_dtype
model_dtypes.head_dtype
model_dtypes.layernorm_dtype
num_gpus_per_node
optimizer.lr
optimizer.weight_decay
parallel.pipeline
quant_gpt2_8B
quant_gpt2_micro
quant_gpt2_small
quant_gpt2_xl
total_gpus
MICRO_BATCH_SIZE
NUM_MICRO_BATCHES
PIPELINE_SIZE
TENSOR_PARALLEL_MODE
TENSOR_PARALLEL_SIZE
loss.type
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 3s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 16s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 13s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 33s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 14s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 10s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 26s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 17s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 14s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 26s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 37s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 26s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 17s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 33s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 10s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
Crashed
kastan
Aug-05__13:58
BATCH_SIZE32
MICRO_BATCH_SIZE=4
NUM_EPOCHS=20
NUM_MICRO_BATCHES=8
PP=2
SLURM=513923
TP=4
WORLD_SIZE=16
28m 19s
-
32
0.00015
-
20
1024
32
50304
1
0.01
1
col_ai_quant
/u/kastanday/LLM-Distributed-Quantization/datasets/small-gpt-dataset.json
AMP_TYPE.NAIVE
-
-
titans.model.gpt.gpt.gpt2_medium
-
1
true
-
-
-
-
1024
50304
-
-
-
-
4
0.00015
0.01
2
-
-
-
-
16
4
8
2
2d
4
titans.loss.lm_loss.gpt_lmloss.GPTLMLoss
1-16
of 16