-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathmodel.py
53 lines (47 loc) · 2.11 KB
/
model.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
"""
Model class contained encoder, decoder and head modules.
"""
import torch.nn as nn
from cgcnn_modules import CrystalGraphConvNet
from nwr_gae_modules import GNNAutoDecoder
class CrystalGraph(nn.Module):
def __init__(self, crystal_gnn_config, head_output_dim, drop_rate, decoder_sample_size, device):
super().__init__()
self.encoder = CrystalGraphConvNet(
orig_atom_fea_len=crystal_gnn_config['orig_atom_fea_len'],
atom_fea_len=crystal_gnn_config['atom_fea_len'],
nbr_fea_len=crystal_gnn_config['nbr_fea_len'],
n_conv=crystal_gnn_config['n_conv']
)
self.decoder = GNNAutoDecoder(
input_dim=crystal_gnn_config['orig_atom_fea_len'],
hidden_dim=crystal_gnn_config['atom_fea_len'],
layer_num=crystal_gnn_config['n_conv']-1,
sample_size=decoder_sample_size,
device=device,
degree_lambda=1e-4,
lambda_loss1=1e-6,
lambda_loss2=1
)
self.head = nn.Sequential(
nn.Linear(crystal_gnn_config['atom_fea_len'], crystal_gnn_config['atom_fea_len']),
nn.Dropout(p=drop_rate),
nn.ReLU(),
nn.Linear(crystal_gnn_config['atom_fea_len'], head_output_dim)
)
self.softmax = nn.Softmax(dim=1)
def pretrain(self, atom_fea, nbr_fea, nbr_fea_idx, degree, crystal_atom_idx):
self.encoder.set_pre_train(True)
outputs_per_layer = self.encoder(atom_fea, nbr_fea, nbr_fea_idx, crystal_atom_idx)
loss = self.decoder(degree, nbr_fea_idx, outputs_per_layer)
return loss
def forward(self, atom_fea, nbr_fea, nbr_fea_idx, crystal_atom_idx):
self.encoder.set_pre_train(False)
feature = self.encoder(atom_fea, nbr_fea, nbr_fea_idx, crystal_atom_idx)
# feature = F.normalize(self.head(feature), dim=1)
feature = self.head(feature)
return feature
def predict(self, atom_fea, nbr_fea, nbr_fea_idx, crystal_atom_idx):
feature = self.forward(atom_fea, nbr_fea, nbr_fea_idx, crystal_atom_idx)
prob = self.softmax(feature)
return prob