Batu's workspace
Runs
1,796
Name
1 visualized
State
Notes
User
Tags
Created
Runtime
Sweep
activation
add_node_id
aggregation_method
batch_size
edge_type
hidden_dim
input_dim
learning_rate
linear_input
linear_output
model_type
node_id
num_layers
residual
self_loop
activation_first_layer
dropout
h_feats
seed
architecture
dropout_percentage
env_name
k
l2_regularization
loss
max_epochs
obs
observation
scale_by_10
sweep
goal_connection_type
id_one_hot
node_observation_specification
observation_specification.agent_pos.slice_start
observation_specification.agent_pos.slice_stop
observation_specification.full.slice_start
observation_specification.full.slice_stop
observation_specification.goal_pos.slice_start
observation_specification.goal_pos.slice_stop
observation_specification.vector.slice_start
observation_specification.vector.slice_stop
observation_specification.velocity.slice_start
observation_specification.velocity.slice_stop
typeID_one_hot
Crashed
-
batu
6d 23h 21m 44s
-
torch.nn.functional.relu
false
node
128
goal_to_agent
128
23
0.0052174
true
true
__main__.GCNLayer
-
3
false
true
false
true
-
-
-
-
Jump
-
-
-
30
pos
-
-
JumpGraphTest
["base","random"]
true
abs_pos
13
16
-
-
16
19
-
-
9
12
true
Finished
-
batu
1h 4m 51s
-
torch.nn.functional.relu
-
-
146.28571
-
-
128.7619
0.001
-
-
-
-
3
-
-
-
-
512
3120.19048
["BaselineNoGraph(\n (dropout): Dropout(p=0, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=19, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=19, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=505, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=505, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=9, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=9, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)"]
0
Jump
-
0
torch.nn.functional.mse_loss
50
-
pos
-
InputSweep
-
-
-
13
16
0
505
16
19
0
19
9
12
-
Finished
-
batu
42m 1s
-
-
-
-
-
-
-
-
-
-
-
KNN Regression
-
-
-
-
-
-
-
-
-
-
-
2.77778
-
-
-
-
-
-
-
-
-
-
1.6875
4.6875
-
-
4.6875
7.6875
-
-
0
3
-
Finished
-
batu
1h 11m 14s
-
torch.nn.functional.relu
-
-
512
-
-
8.0625
0.001
-
-
-
-
3
-
-
-
-
512
3713.4375
["BaselineNoGraph(\n (dropout): Dropout(p=0, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=9, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=9, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)"]
0
Jump
-
0
torch.nn.functional.mse_loss
50
-
pos
-
SizeSweep
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
1m 1s
-
torch.nn.functional.relu
-
-
512
-
-
6
0.001
-
-
-
-
3
-
-
-
-
512
4752
BaselineNoGraph(
(dropout): Dropout(p=0, inplace=False)
(layers): ModuleList(
(0): Linear(in_features=6, out_features=512, bias=True)
(1): Linear(in_features=512, out_features=512, bias=True)
(2): Linear(in_features=512, out_features=3, bias=True)
)
(input_layer): Linear(in_features=6, out_features=512, bias=True)
(hidden_layers): ModuleList(
(0): Linear(in_features=512, out_features=512, bias=True)
)
(output_layer): Linear(in_features=512, out_features=3, bias=True)
)
0
Jump
-
0
torch.nn.functional.mse_loss
50
-
pos
-
SizeSweep
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
2h 33s
-
-
-
-
-
-
-
-
-
-
-
KNN Regression
-
-
-
-
-
-
-
-
-
-
-
4.66667
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Crashed
-
batu
7d 1h 11m 40s
-
torch.nn.functional.relu
false
node
429.41935
goal_to_agent
128
23
0.0088387
true
true
__main__.GCNLayer
-
3.46774
[false,true]
true
false
true
-
-
-
-
Jump
-
-
-
94.6129
pos
-
-
JumpGraphTest
-
-
abs_pos
13
16
-
-
16
19
-
-
9
12
-
Finished
-
batu
51m 25s
-
torch.nn.functional.relu
false
node
286.31579
goal_to_agent
128
23
0.0066842
true
true
__main__.GCNLayer
-
4.89474
[false,true]
true
false
true
-
-
-
-
Jump
-
-
-
30
pos
-
-
JumpGraphTest
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Crashed
-
batu
1d 20h 18m 28s
-
torch.nn.functional.relu
[false,true]
node
252.40237
goal_to_agent
159.48387
9.6568
0.004847
true
true
__main__.GCNLayer
-
4.13018
[false,true]
[false,true]
false
true
814.50467
4840.95794
["BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=1024, bias=True)\n (1): Linear(in_features=1024, out_features=1024, bias=True)\n (2): Linear(in_features=1024, out_features=1024, bias=True)\n (3): Linear(in_features=1024, out_features=1024, bias=True)\n (4): Linear(in_features=1024, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=1024, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=1024, out_features=1024, bias=True)\n (1): Linear(in_features=1024, out_features=1024, bias=True)\n (2): Linear(in_features=1024, out_features=1024, bias=True)\n )\n (output_layer): Linear(in_features=1024, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=1024, bias=True)\n (1): Linear(in_features=1024, out_features=1024, bias=True)\n (2): Linear(in_features=1024, out_features=1024, bias=True)\n (3): Linear(in_features=1024, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=1024, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=1024, out_features=1024, bias=True)\n (1): Linear(in_features=1024, out_features=1024, bias=True)\n )\n (output_layer): Linear(in_features=1024, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=1024, bias=True)\n (1): Linear(in_features=1024, out_features=1024, bias=True)\n (2): Linear(in_features=1024, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=1024, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=1024, out_features=1024, bias=True)\n )\n (output_layer): Linear(in_features=1024, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=128, bias=True)\n (1): Linear(in_features=128, out_features=128, bias=True)\n (2): Linear(in_features=128, out_features=128, bias=True)\n (3): Linear(in_features=128, out_features=128, bias=True)\n (4): Linear(in_features=128, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=128, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=128, out_features=128, bias=True)\n (1): Linear(in_features=128, out_features=128, bias=True)\n (2): Linear(in_features=128, out_features=128, bias=True)\n )\n (output_layer): Linear(in_features=128, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=128, bias=True)\n (1): Linear(in_features=128, out_features=128, bias=True)\n (2): Linear(in_features=128, out_features=128, bias=True)\n (3): Linear(in_features=128, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=128, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=128, out_features=128, bias=True)\n (1): Linear(in_features=128, out_features=128, bias=True)\n )\n (output_layer): Linear(in_features=128, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=128, bias=True)\n (1): Linear(in_features=128, out_features=128, bias=True)\n (2): Linear(in_features=128, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=128, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=128, out_features=128, bias=True)\n )\n (output_layer): Linear(in_features=128, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=2048, bias=True)\n (1): Linear(in_features=2048, out_features=2048, bias=True)\n (2): Linear(in_features=2048, out_features=2048, bias=True)\n (3): Linear(in_features=2048, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=2048, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=2048, out_features=2048, bias=True)\n (1): Linear(in_features=2048, out_features=2048, bias=True)\n )\n (output_layer): Linear(in_features=2048, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=2048, bias=True)\n (1): Linear(in_features=2048, out_features=2048, bias=True)\n (2): Linear(in_features=2048, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=2048, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=2048, out_features=2048, bias=True)\n )\n (output_layer): Linear(in_features=2048, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)","BaselineNoGraph(\n (dropout): Dropout(p=0.005, inplace=False)\n (layers): ModuleList(\n (0): Linear(in_features=6, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n (2): Linear(in_features=512, out_features=512, bias=True)\n (3): Linear(in_features=512, out_features=3, bias=True)\n )\n (input_layer): Linear(in_features=6, out_features=512, bias=True)\n (hidden_layers): ModuleList(\n (0): Linear(in_features=512, out_features=512, bias=True)\n (1): Linear(in_features=512, out_features=512, bias=True)\n )\n (output_layer): Linear(in_features=512, out_features=3, bias=True)\n)"]
0.0053738
Jump
-
0
torch.nn.functional.mse_loss
35.35503
pos
["full","pos","vector"]
true
["JumpEnvDebugGraph","JumpGraphTest","SizeSweep"]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
2h 47m 1s
-
torch.nn.functional.relu
-
-
128
-
-
505
0.001
-
-
-
-
4
-
-
-
-
256
5198.86813
-
0.2033
-
-
0.029154
torch.nn.functional.mse_loss
31.62637
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
2h 2m 7s
-
torch.nn.functional.relu
-
-
128
-
-
505
0.00098875
-
-
-
-
3.975
-
-
-
-
253
5306.3
-
0.17595
-
-
0.028114
torch.nn.functional.mse_loss
28.60759
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
15h 17m 39s
-
["torch.nn.functional.leaky_relu","torch.nn.functional.relu","torch.nn.functional.tanh"]
-
-
128
-
-
503.08812
0.0034276
-
-
-
-
2.96169
-
-
-
-
207.69349
4837.97701
-
-
-
-
-
["torch.nn.functional.cross_entropy","torch.nn.functional.kl_div","torch.nn.functional.l1_loss","torch.nn.functional.mse_loss"]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
9h 43m 23s
-
torch.nn.functional.relu
-
-
128
-
-
140.5
0.0051625
-
-
-
-
3.875
-
-
-
-
288
4688.25
-
-
-
-
-
torch.nn.functional.mse_loss
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
9h 18m 48s
-
["torch.nn.functional.leaky_relu","torch.nn.functional.relu","torch.nn.functional.tanh"]
-
-
128
-
-
459.4375
0.0058094
-
-
-
-
3.125
-
-
-
-
202
4185.84375
-
-
-
-
-
["torch.nn.functional.cross_entropy","torch.nn.functional.kl_div","torch.nn.functional.l1_loss","torch.nn.functional.mse_loss"]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Crashed
-
batu
1d 11h 45m 40s
-
["torch.nn.functional.relu","torch.nn.functional.tanh"]
-
-
128
-
-
373.68421
0.032263
-
-
-
-
3.78947
-
-
-
-
445.47368
6184.68421
-
-
-
-
-
["torch.nn.functional.cross_entropy","torch.nn.functional.kl_div","torch.nn.functional.l1_loss"]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Crashed
-
batu
29m 56s
-
torch.nn.functional.relu
true
node
116
goal_to_agent
76
5
0.01
true
true
__main__.GCNLayer
-
3
false
true
false
true
32
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
2h 36m 34s
-
torch.nn.functional.relu
true
node
128
goal_to_agent
57.6
5
0.01
true
true
__main__.GCNLayer
-
3
false
true
false
true
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Crashed
-
batu
11h 47m 42s
-
["torch.nn.functional.relu","torch.nn.functional.tanh"]
[false,true]
node
128
["agent_to_goal","bi_directional","goal_to_agent"]
57.06667
3.27027
0.0092
true
true
__main__.GCNLayer
-
3
[false,true]
[false,true]
false
[false,true]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
17h 22m 58s
-
["torch.nn.functional.relu","torch.nn.functional.tanh"]
[false,true]
node
138.34944
goal_to_agent
40.26766
4.02642
0.0037535
true
true
__main__.GCNLayer
-
3
[false,true]
true
[false,true]
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
Finished
-
batu
1h 16m 58s
-
relu
[false,true]
node
139.29412
goal_to_agent
128
3.875
0.0028
true
true
__main__.GCNLayer
-
3
[false,true]
true
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
1-20
of 32