-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathmake_encoder.py
30 lines (24 loc) · 1.04 KB
/
make_encoder.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import os.path
import torch
import collections
import numpy as np
import matplotlib.pyplot as plt
from torch.autograd import Variable
from define_network import Compression_encoder,AutoEncoder
from sample_set import Sample_set
if __name__ == '__main__': # trunc the autoencoder and reserve the first half part
path_ = os.path.abspath('.')
fname = path_ + '/conv_autoencoder.pth'
ae = AutoEncoder()
ae.load_state_dict(torch.load(fname))
#print ae.state_dict()
ce = Compression_encoder()
new_dict = collections.OrderedDict()
new_dict['encoder.0.weight'] = ae.state_dict()['encoder.0.weight']
new_dict['encoder.0.bias'] = ae.state_dict()['encoder.0.bias']
new_dict['encoder.2.weight'] = ae.state_dict()['encoder.2.weight']
new_dict['encoder.2.bias'] = ae.state_dict()['encoder.2.bias']
new_dict['encoder.4.weight'] = ae.state_dict()['encoder.4.weight']
new_dict['encoder.4.bias'] = ae.state_dict()['encoder.4.bias']
ce.load_state_dict(new_dict)
torch.save(ce.state_dict(),path_+'/compression_encoder.pth')