Darinka's group workspace
Group: test_code_carbon_9
Name
0 visualized
Created
Runtime
End Time
ID
Notes
Updated
Tags
configs.decision
configs.embedding
configs.regex
configs.scoring
embedder_config.batch_size
embedder_config.model_name
embedder_config.tokenizer_config.padding
embedder_config.tokenizer_config.truncation
embedder_config.use_cache
k
metrics.decision
metrics.embedding
metrics.regex
metrics.scoring
pipeline_metrics.decision_accuracy
pipeline_metrics.decision_f1
pipeline_metrics.decision_precision
pipeline_metrics.decision_recall
pipeline_metrics.decision_roc_auc
decision_accuracy
decision_f1
decision_precision
decision_recall
decision_roc_auc
emissions/cloud_provider
emissions/cloud_region
emissions/codecarbon_version
emissions/country_iso_code
emissions/country_name
emissions/cpu_count
emissions/cpu_energy
emissions/cpu_model
emissions/cpu_power
emissions/duration
emissions/emissions
emissions/emissions_rate
emissions/energy_consumed
emissions/gpu_count
emissions/gpu_energy
emissions/gpu_model
emissions/gpu_power
emissions/latitude
emissions/longitude
2s
Apr 08 '25 16:15
2o1rk9xn
-
Apr 08 '25 16:15
[{"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.decision/threshold/comb_0","metrics":{"decision_accuracy":0.625},"metric_name":"decision_accuracy","module_name":"threshold","metric_value":0.625,"module_params":{}}]
[]
[]
[{"module_name":"linear","metric_value":0.9166666666666669,"module_params":{"embedder_config":{"default_prompt":null,"tokenizer_config":{"truncation":true,"padding":true,"max_length":null},"use_cache":false,"batch_size":32,"model_name":"BAAI/bge-reranker-base","query_prompt":null,"cluster_prompt":null,"passage_prompt":null,"classifier_prompt":null,"similarity_fn_name":"cosine","device":null,"sts_prompt":null}},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/linear/comb_0","metrics":{"scoring_roc_auc":0.9166666666666669,"scoring_accuracy":0.5,"scoring_precision":0.25},"metric_name":"scoring_roc_auc"},{"metrics":{"scoring_roc_auc":1,"scoring_accuracy":1,"scoring_precision":1},"metric_name":"scoring_roc_auc","module_name":"knn","metric_value":1,"module_params":{"k":3,"embedder_config":{"tokenizer_config":{"padding":true,"max_length":null,"truncation":true},"device":null,"use_cache":false,"batch_size":32,"model_name":"sentence-transformers/all-MiniLM-L6-v2","cluster_prompt":null,"default_prompt":null,"classifier_prompt":null,"similarity_fn_name":"cosine","sts_prompt":null,"query_prompt":null,"passage_prompt":null}},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/knn/comb_0"},{"metrics":{"scoring_roc_auc":0.4166666666666667,"scoring_accuracy":0.5,"scoring_precision":0.3333333333333333},"metric_name":"scoring_roc_auc","module_name":"description","metric_value":0.4166666666666667,"module_params":{"encoder_type":"cross","embedder_config":{"device":null,"batch_size":32,"sts_prompt":null,"query_prompt":null,"passage_prompt":null,"tokenizer_config":{"max_length":null,"truncation":true,"padding":true},"classifier_prompt":null,"similarity_fn_name":"dot","use_cache":false,"model_name":"BAAI/bge-reranker-base","cluster_prompt":null,"default_prompt":null},"cross_encoder_config":"BAAI/bge-reranker-base"},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/description/comb_0"},{"metrics":{"scoring_roc_auc":0.5,"scoring_accuracy":0.25,"scoring_precision":0.125},"metric_name":"scoring_roc_auc","module_name":"description","metric_value":0.5,"module_params":{"encoder_type":"bi","embedder_config":{"sts_prompt":null,"cluster_prompt":null,"default_prompt":null,"passage_prompt":null,"tokenizer_config":{"truncation":true,"padding":true,"max_length":null},"batch_size":32,"query_prompt":null,"classifier_prompt":null,"similarity_fn_name":"dot","device":null,"use_cache":false,"model_name":"BAAI/bge-reranker-base"},"cross_encoder_config":"BAAI/bge-reranker-base"},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/description/comb_1"}]
-
-
-
-
-
-
[0.625]
[]
[]
[0.9166666666666669,1,0.4166666666666667,0.5]
0.66667
0.65333
0.56667
0.8
0.86
0.66667
0.65333
0.56667
0.8
0.86
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
3s
Apr 08 '25 16:15
5mr8nev0
-
Apr 08 '25 16:15
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
0.625
-
-
-
-
-
-
-
-
-
28
7.5370e-8
-
42.5
-1744125461.01563
3.7543e-8
0.0000074814
8.5125e-8
1
0
-
0
55.7487
37.6187
5s
Apr 08 '25 16:15
0x8nj567
-
Apr 08 '25 16:15
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.00003087
-
42.5
-1744125458.40648
0.000026418
0.000010108
0.0000599
1
0.000022462
-
31.02899
55.7487
37.6187
4s
Apr 08 '25 16:15
7e5rwb4k
-
Apr 08 '25 16:15
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000029415
-
42.5
-1744125458.52972
0.000030192
0.000012123
0.000068457
1
0.000032767
-
47.38073
55.7487
37.6187
4s
Apr 08 '25 16:15
fk4k7sd6
-
Apr 08 '25 16:15
-
-
-
-
32
sentence-transformers/all-MiniLM-L6-v2
true
true
false
3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000021323
-
42.5
-1744125459.21492
0.000014803
0.0000082012
0.000033564
1
0.000007697
-
15.36612
55.7487
37.6187
5s
Apr 08 '25 16:15
xcbe70bf
-
Apr 08 '25 16:15
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000042873
-
42.5
-1744125457.39026
0.000032151
0.0000088574
0.000072899
1
0.000020877
-
20.70444
55.7487
37.6187
6s
Apr 08 '25 16:14
c4zc45wp
-
Apr 08 '25 16:14
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000045456
-
42.5
-1744125457.17113
0.000045525
0.000011827
0.00010322
1
0.000048066
-
44.95737
55.7487
37.6187
5s
Apr 08 '25 16:14
wt3ogpfw
-
Apr 08 '25 16:14
-
-
-
-
32
sentence-transformers/all-MiniLM-L6-v2
true
true
false
3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000030445
-
42.5
-1744125458.44232
0.000026778
0.000010388
0.000060716
1
0.000023778
-
33.22154
55.7487
37.6187
6s
Apr 08 '25 16:13
gsq5fkbu
-
Apr 08 '25 16:13
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000043602
-
42.5
-1744125457.33022
0.000032313
0.000008757
0.000073266
1
0.000020359
-
19.85197
55.7487
37.6187
2s
Apr 08 '25 15:59
itsnv57j
-
Apr 08 '25 15:59
[{"metrics":{"decision_accuracy":0.625},"metric_name":"decision_accuracy","module_name":"threshold","metric_value":0.625,"module_params":{},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.decision/threshold/comb_0"}]
[]
[]
[{"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/linear/comb_0","metrics":{"scoring_roc_auc":0.9166666666666669,"scoring_accuracy":0.5,"scoring_precision":0.25},"metric_name":"scoring_roc_auc","module_name":"linear","metric_value":0.9166666666666669,"module_params":{"embedder_config":{"sts_prompt":null,"tokenizer_config":{"max_length":null,"truncation":true,"padding":true},"classifier_prompt":null,"similarity_fn_name":"cosine","device":null,"use_cache":false,"batch_size":32,"model_name":"BAAI/bge-reranker-base","query_prompt":null,"cluster_prompt":null,"default_prompt":null,"passage_prompt":null}}},{"module_params":{"k":3,"embedder_config":{"device":null,"use_cache":false,"batch_size":32,"sts_prompt":null,"query_prompt":null,"cluster_prompt":null,"passage_prompt":null,"tokenizer_config":{"padding":true,"max_length":null,"truncation":true},"model_name":"sentence-transformers/all-MiniLM-L6-v2","default_prompt":null,"classifier_prompt":null,"similarity_fn_name":"cosine"}},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/knn/comb_0","metrics":{"scoring_roc_auc":1,"scoring_accuracy":1,"scoring_precision":1},"metric_name":"scoring_roc_auc","module_name":"knn","metric_value":1},{"metrics":{"scoring_precision":0.3333333333333333,"scoring_roc_auc":0.4166666666666667,"scoring_accuracy":0.5},"metric_name":"scoring_roc_auc","module_name":"description","metric_value":0.4166666666666667,"module_params":{"cross_encoder_config":"BAAI/bge-reranker-base","encoder_type":"cross","embedder_config":{"use_cache":false,"batch_size":32,"cluster_prompt":null,"default_prompt":null,"passage_prompt":null,"classifier_prompt":null,"similarity_fn_name":"cosine","device":null,"model_name":"BAAI/bge-reranker-base","sts_prompt":null,"query_prompt":null,"tokenizer_config":{"padding":true,"max_length":null,"truncation":true}}},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/description/comb_0"},{"module_name":"description","metric_value":0.5,"module_params":{"encoder_type":"bi","embedder_config":{"tokenizer_config":{"padding":true,"max_length":null,"truncation":true},"similarity_fn_name":"cosine","model_name":"BAAI/bge-reranker-base","sts_prompt":null,"passage_prompt":null,"classifier_prompt":null,"device":null,"use_cache":false,"batch_size":32,"query_prompt":null,"cluster_prompt":null,"default_prompt":null},"cross_encoder_config":"BAAI/bge-reranker-base"},"module_dump_dir":"/home/darinka/AutoIntent/autointent/experiments/runs/test_code_carbon_9/modules_dumps/NodeType.scoring/description/comb_1","metrics":{"scoring_roc_auc":0.5,"scoring_accuracy":0.25,"scoring_precision":0.125},"metric_name":"scoring_roc_auc"}]
-
-
-
-
-
-
[0.625]
[]
[]
[0.9166666666666669,1,0.4166666666666667,0.5]
0.66667
0.65333
0.56667
0.8
0.86
0.66667
0.65333
0.56667
0.8
0.86
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
2s
Apr 08 '25 15:59
8zl0o0mm
-
Apr 08 '25 15:59
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
0.625
-
-
-
-
-
-
-
-
-
28
6.4319e-8
-
42.5
-1744125461.01657
3.2711e-8
0.0000077946
7.4169e-8
1
0
-
0
55.7487
37.6187
5s
Apr 08 '25 15:59
cuftzu71
-
Apr 08 '25 15:59
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000030003
-
42.5
-1744125458.48005
0.000020931
0.0000082404
0.000047459
1
0.000011056
-
15.67081
55.7487
37.6187
5s
Apr 08 '25 15:58
34zx4f1m
-
Apr 08 '25 15:58
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000030393
-
42.5
-1744125458.44763
0.000031041
0.000012066
0.000070382
1
0.000033505
-
46.87864
55.7487
37.6187
9s
Apr 08 '25 15:58
u6hyzr5q
-
Apr 08 '25 15:58
-
-
-
-
32
sentence-transformers/all-MiniLM-L6-v2
true
true
false
3
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.000073687
-
42.5
-1744125454.77904
0.00005528
0.0000088584
0.00012534
1
0.000035927
-
20.72625
55.7487
37.6187
20s
Apr 08 '25 15:58
qm9k12nz
-
Apr 08 '25 15:58
-
-
-
-
32
BAAI/bge-reranker-base
true
true
false
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
28
0.00020865
-
42.5
-1744125443.34896
0.00013107
0.0000074169
0.00029719
1
0.000044002
-
8.96378
55.7487
37.6187
1-15
of 15