Hlasse's workspace
Runs
374
Name
374 visualized
State
Notes
User
Tags
Created
Runtime
Sweep
_n_gpu
_name_or_path
adafactor
adam_beta1
adam_beta2
adam_epsilon
add_cross_attention
architectures
attention_head_size
attention_probs_dropout_prob
auto_find_batch_size
bf16
bf16_full_eval
bos_token_id
chunk_size_feed_forward
config_name
conv_act
conv_kernel_size
dagw_dfm_weight
danews_weight
data_seed
dataloader_drop_last
dataloader_num_workers
dataloader_pin_memory
dataset_name
ddp_bucket_cap_mb
ddp_find_unused_parameters
debug
deepspeed
directionality
disable_tqdm
diversity_penalty
do_eval
do_predict
do_sample
do_train
dtype
early_stopping
embedding_size
encoder_no_repeat_ngram_size
eos_token_id
eval_accumulation_steps
eval_batch_size
eval_delay
Finished
-
saattrupdan
1mo 13d 18h 25m 53s
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Crashed
-
kenevoldsen
10mo 2d 15h 29m 17s
-
2.78195
["/home/kenneth/github/danish-foundation-models/default-models-configs/large-deberta-v2-32000-config.json","/home/kenneth/github/danish-foundation-models/default-models-configs/small-deberta-v2-32000-config_test.json","/home/kenneth/github/danish-foundation-models/default-models-configs/small-roberta-32000-config.json","/home/ucloud/danish-foundation-models/default-models-configs/base-deberta-v2-32000-config.json","/home/ucloud/danish-foundation-models/default-models-configs/small-deberta-v2-32000-config.json","/home/ucloud/danish-foundation-models/default-models-configs/small-roberta-32000-config.json","Maltehb/aelaectra-danish-electra-small-cased","NbAiLab/nb-bert-large","jonfd/electra-small-nordic","vesteinn/DanskBERT"]
false
0.9
0.98201
8.9508e-7
false
["BertForMaskedLM","BertForTokenClassification","DebertaV2Model","ElectraForPreTraining","RobertaForMaskedLM","XLMRobertaForMaskedLM"]
64
0.1
[false,true]
false
false
0.78146
0
["/home/kenneth/github/danish-foundation-models/default-models-configs/large-deberta-v2-32000-config.json","/home/kenneth/github/danish-foundation-models/default-models-configs/large-roberta-config.json","/home/kenneth/github/danish-foundation-models/default-models-configs/small-deberta-v2-32000-config.json","/home/kenneth/github/danish-foundation-models/default-models-configs/small-deberta-v2-32000-config_test.json","/home/kenneth/github/danish-foundation-models/default-models-configs/small-deberta-v2-config.json","/home/kenneth/github/danish-foundation-models/default-models-configs/small-roberta-32000-config.json","/home/ucloud/danish-foundation-models/default-models-configs/base-deberta-v2-32000-config.json","/home/ucloud/danish-foundation-models/default-models-configs/small-deberta-v2-32000-config.json","/home/ucloud/danish-foundation-models/default-models-configs/small-roberta-32000-config.json","~/danish-foundation-models/default-models-configs/small-deberta-v2-32000-config.json"]
gelu
3
0.13158
0.20749
None
false
0
true
["dcc-v1","dcc_v1.1.0"]
None
None
["","[]"]
None
bidi
false
0
[false,true]
false
false
[false,true]
float32
false
128
0
2
None
333.09023
0
Crashed
-
kenevoldsen
mlm
pytorch
2d 21h 5m 13s
-
4
vesteinn/DanskBERT
false
0.9
0.98
0.000001
false
["XLMRobertaForMaskedLM"]
-
0.1
false
false
false
0
0
-
-
-
0.2
0.25
None
false
0
true
dcc_v1.1.0
None
None
[]
None
-
false
0
true
false
false
true
-
false
-
0
2
None
256
0
Crashed
-
kenevoldsen
mlm
pytorch
2d 1h 19m 32s
-
4
vesteinn/DanskBERT
false
0.9
0.98
0.000001
false
["XLMRobertaForMaskedLM"]
-
0.1
false
false
false
0
0
-
-
-
0.2
0.25
None
false
0
true
dcc_v1.1.0
None
None
[]
None
-
false
0
true
false
false
true
-
false
-
0
2
None
256
0
Crashed
-
kenevoldsen
mlm
pytorch
5d 2h 15m 48s
-
4
vesteinn/DanskBERT
false
0.9
0.98
0.000001
false
["XLMRobertaForMaskedLM"]
-
0.1
false
false
false
0
0
-
-
-
0.2
0.25
None
false
0
true
dcc_v1.1.0
None
None
[]
None
-
false
0
true
false
false
true
-
false
-
0
2
None
256
0
Failed
-
kenevoldsen
mlm
pytorch
33s
-
-
-
false
0.9
0.98
0.000001
-
-
-
-
false
false
false
-
-
-
-
-
0.2
0.25
-
false
0
true
dcc_v1.1.0
-
-
-
-
-
-
false
false
-
true
-
-
-
-
-
-
-
0
Failed
-
kenevoldsen
mlm
pytorch
32s
-
-
-
false
0.9
0.98
0.000001
-
-
-
-
false
false
false
-
-
-
-
-
0.2
0.25
-
false
0
true
dcc_v1.1.0
-
-
-
-
-
-
false
false
-
false
-
-
-
-
-
-
-
0
Crashed
-
kenevoldsen
mlm
pytorch
5d 16h 57m 32s
-
4
vesteinn/DanskBERT
false
0.9
0.98
0.000001
false
["XLMRobertaForMaskedLM"]
-
0.1
false
false
false
0
0
-
-
-
0.2
0.25
None
false
0
true
dcc_v1.1.0
None
None
[]
None
-
false
0
true
false
false
true
-
false
-
0
2
None
256
0
Failed
-
kenevoldsen
mlm
pytorch
5s
-
-
-
false
0.9
0.98
0.000001
-
-
-
-
false
false
false
-
-
-
-
-
0.2
0.25
-
false
0
true
dcc_v1.1.0
-
-
-
-
-
-
true
false
-
true
-
-
-
-
-
-
-
0
Failed
-
kenevoldsen
mlm
pytorch
7s
-
-
-
false
0.9
0.98
0.000001
-
-
-
-
false
false
false
-
-
-
-
-
0.2
0.25
-
false
0
true
dcc_v1.1.0
-
-
-
-
-
-
true
false
-
true
-
-
-
-
-
-
-
0
Crashed
-
kenevoldsen
mlm
pytorch
4h 59m 57s
-
4
vesteinn/DanskBERT
false
0.9
0.98
0.000001
false
["XLMRobertaForMaskedLM"]
-
0.1
false
false
false
0
0
-
-
-
0.2
0.25
None
false
0
true
dcc_v1.1.0
None
None
[]
None
-
false
0
true
false
false
true
-
false
-
0
2
None
256
0
Killed
-
kenevoldsen
mlm
pytorch
2m 42s
-
4
vesteinn/DanskBERT
false
0.9
0.98
0.000001
false
["XLMRobertaForMaskedLM"]
-
0.1
false
false
false
0
0
-
-
-
0.2
0.25
None
false
0
true
dcc_v1.1.0
None
None
[]
None
-
false
0
true
false
false
true
-
false
-
0
2
None
256
0
1-2
of 2