Skip to content

Commit

Permalink
Change softmax iterator, also make example compile
Browse files Browse the repository at this point in the history
  • Loading branch information
mratsim committed Jan 3, 2021
1 parent c69835a commit 1910009
Show file tree
Hide file tree
Showing 4 changed files with 18 additions and 15 deletions.
2 changes: 1 addition & 1 deletion examples/ex06_shakespeare_generator.nim
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ const
#
# ################################################################

func strToTensor(str: string|TaintedString): Tensor[PrintableIdx] =
proc strToTensor(str: string|TaintedString): Tensor[PrintableIdx] =
result = newTensor[PrintableIdx](str.len)

# For each x in result, map the corresponding char index
Expand Down
21 changes: 11 additions & 10 deletions src/arraymancer/laser/strided_iteration/foreach_common.nim
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@ proc aliasTensor(id: int, tensor: NimNode): tuple[alias: NimNode, isVar: NimNode
for i in 2 ..< tensor.len:
t.add tensor[i]

var alias = ""
let isVar = block:
# Handle slicing cases like foo[0..<1, 0..<2]
# that do not return `var` but are technically `var`
Expand All @@ -57,16 +56,18 @@ proc aliasTensor(id: int, tensor: NimNode): tuple[alias: NimNode, isVar: NimNode
else:
quote do: isVar(`t`)

while t.kind in {nnkDotExpr, nnkBracketExpr}:
if t[0].kind notin {nnkIdent, nnkSym}:
error "Expected a field name but found \"" & t[0].repr()
alias.add $t[0]
if t.kind == nnkBracketExpr and validIdentifier(t[1].repr()):
alias.add $t[1]
alias.add "_"
t = t[1]
proc flattenedName(node: NimNode): string =
if node.kind in {nnkDotExpr, nnkBracketExpr}:
result = flattenedName(node[0])
result &= '_'
result &= flattenedName(node[1])
elif node.kind in {nnkIdent, nnkSym}:
result = $node
else:
error "Expected a field nameor dot expression or array access but found \"" &
t.repr()

alias &= $t
let alias = flattenedName(t)

return (newIdentNode($alias & "_alias" & $id & '_'), isVar)

Expand Down
5 changes: 3 additions & 2 deletions src/arraymancer/nn_primitives/nnp_softmax.nim
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,11 @@ proc softmax*[T](input: Tensor[T]): Tensor[T] {.noInit.} =
let batch_size = input.shape[0]
result = zeros_like(input)

# TODO: the performance of nested parallel regions is highly suspect
for i in 0||(batch_size-1):
let (max, sumexp) = input[i, _].streaming_max_sumexp

var res_slice = result[i, _]

apply2_inline(res_slice, input[i, _]):
stable_softmax(y, max, sumexp)
forEachSerial r in res_slice, inpi in input[i, _]:
r = stable_softmax(inpi, max, sumexp)
5 changes: 3 additions & 2 deletions src/arraymancer/nn_primitives/nnp_softmax_cross_entropy.nim
Original file line number Diff line number Diff line change
Expand Up @@ -185,13 +185,14 @@ proc sparse_softmax_cross_entropy_backward*[T; Idx: SomeNumber or byte or char o
for i, truth_idx in enumerate(target):
result[i, int(truth_idx)] = -1

# TODO: the performance of nested parallel regions is highly suspect
for i in 0||(batch_size-1):
let (max, sumexp) = cached_tensor[i, _].streaming_max_sumexp

var res_slice = result[i, _]

apply2_inline(res_slice, cached_tensor[i, _]):
grad * (stable_softmax(y, max, sumexp) + x) / T(batch_size)
forEachSerial r in res_slice, ci in cached_tensor[i, _]:
r = grad * (stable_softmax(ci, max, sumexp) + r) / T(batch_size)


# ################################################
Expand Down

0 comments on commit 1910009

Please sign in to comment.