Skip to main content

Josselin's group workspace

Timestamps visible
2023-06-02 08:09:40
Deleted files from: /home/ubuntu/mtrl/logs/90f2497ff4cee27c0d30fbc66e6ba205f94808ba4ea16e057df58e73_issue_None_seed_1/buffer
2023-06-02 08:09:44
[2023-06-02 08:09:43,973][default_logger][INFO] - {"name": "PAL_not_shared", "num_seeds": 5, "setup": {"seed": 1, "setup": "metaworld", "base_path": "/home/ubuntu/mtrl", "save_dir": "${setup.base_path}/logs/${setup.id}", "device": "cuda:0", "id": "90f2497ff4cee27c0d30fbc66e6ba205f94808ba4ea16e057df58e73_issue_None_seed_1", "description": "Sample Task", "tags": null, "git": {"commit_id": "b112a394c7c5e732c7a04da188a39c17d628378a", "has_uncommitted_changes": null, "issue_id": null}, "date": "2023-06-01 19:39:45", "slurm_id": "-1", "debug": {"should_enable": false}}, "experiment": {"name": "metaworld", "builder": {"_target_": "mtrl.experiment.${experiment.name}.Experiment"}, "init_steps": 1500, "num_train_steps": 200000, "eval_freq": 10000, "num_eval_episodes": 10, "should_resume": true, "save": {"model": {"retain_last_n": 1}, "buffer": {"should_save": true, "size_per_chunk": 10000, "num_samples_to_save": -1}}, "save_dir": "${setup.save_dir}", "save_video": false, "envs_to_exclude_during_training": null}, "agent": {"name": "state_sac", "encoder_feature_dim": 50, "num_layers": 0, "num_filters": 0, "builder": {"_target_": "mtrl.agent.sac.Agent", "actor_cfg": "${agent.actor}", "critic_cfg": "${agent.critic}", "multitask_cfg": "${agent.multitask}", "alpha_optimizer_cfg": "${agent.optimizers.alpha}", "actor_optimizer_cfg": "${agent.optimizers.actor}", "critic_optimizer_cfg": "${agent.optimizers.critic}", "discount": 0.99, "init_temperature": 1.0, "actor_update_freq": 1, "critic_tau": 0.005, "critic_target_update_freq": 1, "encoder_tau": 0.05}, "actor": {"_target_": "mtrl.agent.components.actor.Actor", "num_layers": 3, "hidden_dim": 400, "log_std_bounds": [-20, 2], "encoder_cfg": "${agent.encoder}", "multitask_cfg": "${agent.multitask}"}, "critic": {"_target_": "mtrl.agent.components.critic.Critic", "hidden_dim": "${agent.actor.hidden_dim}", "num_layers": "${agent.actor.num_layers}", "encoder_cfg": "${agent.encoder}", "multitask_cfg": "${agent.multitask}"}, "encoder": {"type_to_select": "moe", "identity": {"type": "identity", "feature_dim": "${agent.encoder_feature_dim}"}, "feedforward": {"type": "feedforward", "hidden_dim": 50, "num_layers": 2, "feature_dim": "${agent.encoder_feature_dim}", "should_tie_encoders": true}, "film": {"type": "film", "hidden_dim": 50, "num_layers": 2, "feature_dim": "${agent.encoder_feature_dim}", "should_tie_encoders": true}, "moe": {"type": "moe", "encoder_cfg": {"type": "feedforward", "hidden_dim": 50, "num_layers": 2, "feature_dim": "${agent.encoder_feature_dim}", "should_tie_encoders": true}, "num_experts": 4, "task_id_to_encoder_id_cfg": {"mode": "attention", "num_envs": "${env.num_envs}", "gate": {"embedding_dim": 50, "hidden_dim": 50, "num_layers": 2, "temperature": 1.0, "should_use_soft_attention": false, "topk": 2, "task_encoder_cfg": {"should_use_task_encoding": true, "should_detach_task_encoding": true}}, "attention": {"embedding_dim": 50, "hidden_dim": 50, "num_layers": 2, "temperature": 1.0, "should_use_soft_attention": true, "task_encoder_cfg": {"should_use_task_encoding": true, "should_detach_task-encoding": true}}, "cluster": {"env_name": "${env.name}", "task_description": "${env.description}", "ordered_task_list": "${env.ordered_task_list}", "mapping_cfg": "${agent.task_to_encoder_cluster}", "num_eval_episodes": "${experiment.num_eval_episodes}", "batch_size": "${replay_buffer.batch_size}"}, "identity": {"num_eval_episodes": "${experiment.num_eval_episodes}", "batch_size": "${replay_buffer.batch_size}"}, "ensemble": {"num_eval_episodes": "${experiment.num_eval_episodes}", "batch_size": "${replay_buffer.batch_size}"}}}, "factorized_moe": {"type": "fmoe", "encoder_cfg": "${agent.encoder.feedforward}", "num_factors": 2, "num_experts_per_factor": [5, 5]}, "pixel": {"type": "pixel", "feature_dim": "${agent.encoder_feature_dim}", "num_filters": "${agent.num_filters}", "num_layers": "${agent.num_layers}"}}, "transition_model": {"_target_": "mtrl.agent.components.transition_model.make_transition_model", "transition_cfg": {"type": "", "feature_dim": "${agent.encoder_feature_dim}", "layer_width": 512}, "multitask_cfg": "${agent.multitask}"}, "mask": {"num_tasks": "${env.num_envs}", "num_eval_episodes": "${experiment.num_eval_episodes}", "batch_size": "${replay_buffer.batch_size}"}, "multitask": {"num_envs": 10, "should_use_disentangled_alpha": true, "should_use_task_encoder": true, "should_use_multi_head_policy": false, "should_use_pal": true, "should_use_disjoint_policy": false, "pal_cfg": {"pal_dim": 100, "shared_projection": false, "use_residual_connections": false}, "task_encoder_cfg": {"model_cfg": {"_target_": "mtrl.agent.components.task_encoder.TaskEncoder", "pretrained_embedding_cfg": {"should_use": true, "path_to_load_from": "/home/ubuntu/mtrl/metadata/task_embedding/roberta_small/${env.name}.json", "ordered_task_list": "${env.ordered_task_list}"}, "num_embeddings": "${agent.multitask.num_envs}", "embedding_dim": 50, "hidden_dim": 50, "num_layers": 2, "output_dim": 50}, [wandb: long log line truncated]