-
Notifications
You must be signed in to change notification settings - Fork 0
/
params.py
61 lines (50 loc) · 1.31 KB
/
params.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
"""
********************************************************************************
all your params
********************************************************************************
"""
import tensorflow as tf
# network structure
f_in = 3
f_out = 1
width = 2 ** 8 # 2 ** 6 = 64, 2 ** 8 = 256
depth = 5
# training setting
n_epch = int(1e4)
n_btch = 2 ** 12 # 2 ** 6 = 64, 2 ** 8 = 256
c_tol = 1e-8
# initializers
w_init = "Glorot"
b_init = "zeros"
act = "tanh"
# optimization
lr0 = 1e-2
gam = 1e-2
lrd_exp = tf.keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate = lr0, decay_steps = n_epch, decay_rate = gam, staircase=False)
lrd_cos = tf.keras.optimizers.schedules.CosineDecay(
initial_learning_rate = lr0, decay_steps = n_epch, alpha = gam)
lr = lrd_cos
opt = "Adam"
f_scl = "minmax" # "minmax" or "mean"
laaf = True
# system params
c = 1.
# weights
w_ini = 1.
w_bnd = 1.
w_pde = 1.
# boundary condition
BC = "Neu" # "Dir" for Dirichlet, "Neu" for Neumann
# rarely changed params
f_mntr = 10
r_seed = 1234
def params():
return \
f_in, f_out, width, depth, \
w_init, b_init, act, \
lr, opt, \
f_scl, laaf, c, \
w_ini, w_bnd, w_pde, BC, \
f_mntr, r_seed, \
n_epch, n_btch, c_tol, \