From 27092b757f591abbaac4ba56379e872d12c42f16 Mon Sep 17 00:00:00 2001 From: Tatsunosuke Shimada Date: Wed, 16 Dec 2015 15:06:42 +0900 Subject: [PATCH 1/3] -m 'lower cammel and snake case fix; unused variable' --- scripts/rnn.lua | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/scripts/rnn.lua b/scripts/rnn.lua index 5cd7649..1692c4f 100644 --- a/scripts/rnn.lua +++ b/scripts/rnn.lua @@ -14,7 +14,7 @@ function RNN:__init(params) self.use_dropout = params['dropout'] self.max_grad = params['maxGrad'] self.dropoutPred = params['dropoutPred'] - self.max_steps = params['maxSteps'] + self.max_steps = params['max_steps'] self.n_input = self.n_questions * 2 self.compressedSensing = params['compressedSensing'] @@ -86,7 +86,7 @@ function RNN:zeroGrad(n_steps) self.layer:zeroGradParameters() end -function RNN:update(n_steps, rate) +function RNN:update(rate) self.start:updateParameters(rate) self.layer:updateParameters(rate) end @@ -293,4 +293,4 @@ function RNN:getPredictionTruth(batch) -- for dropout self.layer:training() return predictionTruths -end \ No newline at end of file +end From 5b2c5b0dd23bdd16b8e2baf20d50a76a422912e0 Mon Sep 17 00:00:00 2001 From: Tatsunosuke Shimada Date: Wed, 16 Dec 2015 15:07:55 +0900 Subject: [PATCH 2/3] dir creation for initial user(dirty hack) --- scripts/trainSynthetic.lua | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/trainSynthetic.lua b/scripts/trainSynthetic.lua index c682354..2b9f927 100644 --- a/scripts/trainSynthetic.lua +++ b/scripts/trainSynthetic.lua @@ -29,6 +29,7 @@ function run() } local name = "result_c" .. CONCEPT_NUM .. "_v" .. VERSION + lfs.mkdir(paths.dirname(outputRoot)) lfs.mkdir(outputRoot) lfs.mkdir(outputRoot .. "models") @@ -147,4 +148,4 @@ function semiSortedMiniBatches(dataset, mini_batch_size, trimToBatchSize) return shuffledBatches end -run() \ No newline at end of file +run() From 536fe720e7518bd02372bb60e27fbe52de0f0fc6 Mon Sep 17 00:00:00 2001 From: Tatsunosuke Shimada Date: Wed, 16 Dec 2015 17:01:29 +0900 Subject: [PATCH 3/3] adjusted to Assist --- scripts/rnn.lua | 4 ++-- scripts/trainSynthetic.lua | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/rnn.lua b/scripts/rnn.lua index 1692c4f..b893d8d 100644 --- a/scripts/rnn.lua +++ b/scripts/rnn.lua @@ -14,7 +14,7 @@ function RNN:__init(params) self.use_dropout = params['dropout'] self.max_grad = params['maxGrad'] self.dropoutPred = params['dropoutPred'] - self.max_steps = params['max_steps'] + self.max_steps = params['maxSteps'] self.n_input = self.n_questions * 2 self.compressedSensing = params['compressedSensing'] @@ -86,7 +86,7 @@ function RNN:zeroGrad(n_steps) self.layer:zeroGradParameters() end -function RNN:update(rate) +function RNN:update(n_steps, rate) self.start:updateParameters(rate) self.layer:updateParameters(rate) end diff --git a/scripts/trainSynthetic.lua b/scripts/trainSynthetic.lua index 2b9f927..7b7d732 100644 --- a/scripts/trainSynthetic.lua +++ b/scripts/trainSynthetic.lua @@ -24,7 +24,7 @@ function run() n_hidden = n_hidden, n_questions = data.n_questions, max_grad = 100, - max_steps = data.n_questions, + maxSteps = data.n_questions, --modelDir = outputRoot .. '/models/result_c5_v0_98' } @@ -73,7 +73,7 @@ function trainMiniBatch(rnn, data, init_rate, decay_rate, mini_batch_size, blob_ miniErr = miniErr + err miniTests = miniTests + tests if done % mini_batch_size == 0 then - rnn:update(rate) + rnn:update(nil, rate) rnn:zeroGrad() print(i/#miniBatches, sumErr/numTests) miniErr = 0