-
Notifications
You must be signed in to change notification settings - Fork 62
/
cfg.yaml
117 lines (108 loc) · 2.64 KB
/
cfg.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
dataset:
return_mode: "default"
data_aug_mode: null
data_aug_cfg:
mixup_alpha: 0.2
test_subset_name: ["iid", "ood"]
in_len: 10
out_len: 20
layout: "THWC"
static_layout: "CHW"
val_ratio: 0.1
train_val_split_seed: null
highresstatic_expand_t: false
mesostatic_expand_t: false
meso_crop: null
fp16: false
layout:
in_len: 10
out_len: 20
layout: "NTHWC"
img_height: 128
img_width: 128
optim:
total_batch_size: 32
micro_batch_size: 2
seed: 0
method: "adamw"
lr: 0.0005
wd: 1.0e-05
gradient_clip_val: 1.0
max_epochs: 200
# scheduler
lr_scheduler_mode: "cosine"
min_lr_ratio: 1.0e-3
warmup_min_lr_ratio: 0.0
warmup_percentage: 0.2
# early stopping
early_stop: true
early_stop_mode: "min"
early_stop_patience: 5
save_top_k: 5
logging:
logging_prefix: "Cuboid_EarthNet"
monitor_lr: true
monitor_device: false
track_grad_norm: -1
use_wandb: false
trainer:
check_val_every_n_epoch: 5
log_step_ratio: 0.001
precision: 32
vis:
train_example_data_idx_list: [0, ]
val_example_data_idx_list: [0, ]
test_example_data_idx_list: [0, 40, 80, 120, 160, 200, 240, 280, 320, 360, 400]
eval_example_only: false
model:
input_shape: [10, 128, 128, 4]
target_shape: [20, 128, 128, 4]
base_units: 256
# block_units: null
scale_alpha: 1.0
enc_depth: [1, 1]
dec_depth: [1, 1]
enc_use_inter_ffn: true
dec_use_inter_ffn: true
dec_hierarchical_pos_embed: false
downsample: 2
downsample_type: "patch_merge"
upsample_type: "upsample"
num_global_vectors: 8
use_dec_self_global: false
dec_self_update_global: true
use_dec_cross_global: false
use_global_vector_ffn: false
use_global_self_attn: true
separate_global_qkv: true
global_dim_ratio: 1
self_pattern: "axial"
cross_self_pattern: "axial"
cross_pattern: "cross_1x1"
dec_cross_last_n_frames: null
attn_drop: 0.1
proj_drop: 0.1
ffn_drop: 0.1
num_heads: 4
ffn_activation: "gelu"
gated_ffn: false
norm_layer: "layer_norm"
padding_type: "zeros"
pos_embed_type: "t+hw"
use_relative_pos: true
self_attn_use_final_proj: true
checkpoint_level: 0
initial_downsample_type: "stack_conv"
initial_downsample_activation: "leaky"
initial_downsample_stack_conv_num_layers: 2
initial_downsample_stack_conv_dim_list: [64, 256]
initial_downsample_stack_conv_downscale_list: [2, 2]
initial_downsample_stack_conv_num_conv_list: [2, 2]
attn_linear_init_mode: "0"
ffn_linear_init_mode: "0"
conv_init_mode: "0"
down_up_linear_init_mode: "0"
norm_init_mode: "0"
# different from CuboidTransformerModel, no arg `dec_use_first_self_attn=False`
auxiliary_channels: 7
unet_dec_cross_mode: "both"