-
Notifications
You must be signed in to change notification settings - Fork 0
/
CosineEmbeddingCriterion.lua
50 lines (44 loc) · 1.3 KB
/
CosineEmbeddingCriterion.lua
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
local CosineEmbeddingCriterion, parent = torch.class('nn.CosineEmbeddingCriterion', 'nn.Criterion')
function CosineEmbeddingCriterion:__init(margin)
parent.__init(self)
margin = margin or 0
self.margin = margin
self.gradInput = {torch.Tensor(), torch.Tensor()}
end
function CosineEmbeddingCriterion:updateOutput(input,y)
local input1, input2 = input[1], input[2]
self.w1 = input1:dot(input2)
self.w22 = input1:dot(input1)
self.w2 = math.sqrt(self.w22)
self.w32 = input2:dot(input2)
self.w3 = math.sqrt(self.w32)
self.output = self.w1/self.w2/self.w3
if y == -1 then
self.output = math.max(0, self.output - self.margin);
else
self.output = 1 - self.output
end
return self.output
end
function CosineEmbeddingCriterion:updateGradInput(input, y)
local v1 = input[1]
local v2 = input[2]
local gw1 = input[1].new()
local gw2 = input[2].new()
gw1:resizeAs(v1)
gw2:resizeAs(v1)
gw1:zero()
gw2:zero()
if self.output > 0 then
gw1:add(1/(self.w2*self.w3), v2)
gw1:add(-self.w1/(self.w22*self.w2*self.w3), v1)
gw2:add(1/(self.w2*self.w3), v1)
gw2:add(-self.w1/(self.w32*self.w2*self.w3), v2)
end
if y == 1 then
gw1:mul(-1)
gw2:mul(-1)
end
self.gradInput = {gw1, gw2}
return self.gradInput
end