From f828368c4d1526ac04e666053b6aa9d7800e5f32 Mon Sep 17 00:00:00 2001 From: ProGamerGov Date: Thu, 22 Feb 2018 18:21:01 -0700 Subject: [PATCH] Improve the Adam optimizer's quality Source: https://github.com/jcjohnson/neural-style/wiki/Fine-Tuning-The-Adam-Optimizer Examples: https://github.com/jcjohnson/neural-style/issues/428#issuecomment-342691522 https://i.imgur.com/iIRLvKZ.png These parameters have also been tested with numerous other individuals and their own customized sets of normal Neural-Style parameter values. These individuals found that the new Adam parameters were a lot more like their L-BFGS results, than the old Adam parameters. --- neural_style.lua | 2 ++ 1 file changed, 2 insertions(+) diff --git a/neural_style.lua b/neural_style.lua index adc7621..34c9a1d 100644 --- a/neural_style.lua +++ b/neural_style.lua @@ -233,6 +233,8 @@ local function main(params) elseif params.optimizer == 'adam' then optim_state = { learningRate = params.learning_rate, + beta1 = 0.99, + epsilon = 1e-1, } else error(string.format('Unrecognized optimizer "%s"', params.optimizer))