-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathfinal2.sh
32 lines (31 loc) · 8.11 KB
/
final2.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
# CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_gen_expt --folder_name DiNOFeats
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2
CUDA_VISIBLE_DEVICES=0,1 python3 -W ignore -m torch.distributed.launch --nnodes=1 --node_rank=0 --nproc_per_node=2 --use_env --master_port=1234 train.py --config configs/transibr_full.txt --expname transibr_gnt_dinov2_ft_viewindependent_512rays_trainall --dinofield --dino_dim 64 --folder_name DiNOFeats_v2