forked from fh295/semanticCNN
-
Notifications
You must be signed in to change notification settings - Fork 0
/
model.lua
58 lines (51 loc) · 1.63 KB
/
model.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
--
-- Copyright (c) 2014, Facebook, Inc.
-- All rights reserved.
--
-- This source code is licensed under the BSD-style license found in the
-- LICENSE file in the root directory of this source tree. An additional grant
-- of patent rights can be found in the PATENTS file in the same directory.
--
require 'nn'
require 'cunn'
require 'optim'
--[[
1. Create Model
2. Create Criterion
3. Convert model to CUDA
]]--
-- 1. Create Network
-- 1.1 If preloading option is set, preload weights from existing models appropriately
if opt.retrain ~= 'none' then
assert(paths.filep(opt.retrain), 'File not found: ' .. opt.retrain)
print('Loading model from file: ' .. opt.retrain);
model = loadDataParallel(opt.retrain, opt.nGPU) -- defined in util.lua
else
paths.dofile('models/' .. opt.netType .. '.lua')
print('=> Creating model from file: models/' .. opt.netType .. '.lua')
model = createModel(opt.nGPU) -- for the model creation code, check the models/ folder
if opt.backend == 'cudnn' then
require 'cudnn'
cudnn.convert(model, cudnn)
elseif opt.backend ~= 'nn' then
error'Unsupported backend'
end
end
-- 2. Create Criterion based on word embeddings
if opt.crit == 'sem' then
criterion = nn.CosineEmbeddingCriterion(opt.margin)
elseif opt.crit == 'class' or opt.crit == 'softsem' then
criterion = nn.ClassNLLCriterion()
else
criterion = nn.MSECriterion()
end
print('=> Model')
print(model)
print('=> Criterion')
print(criterion)
-- 3. Convert model to CUDA
print('==> Converting model to CUDA')
-- model is converted to CUDA in the init script itself
-- model = model:cuda()
criterion:cuda()
collectgarbage()