From abff8e05bc62c81568527e73b00b085e7d05aad1 Mon Sep 17 00:00:00 2001 From: Jeshua Lacock Date: Tue, 15 Dec 2015 12:48:25 -0700 Subject: [PATCH] Re-seed image if needed If the total loss from previous iteration is 0, the image is re-seeded, otherwise a "function value changing less than tolX" message will appear and fail. Tested with many images and has worked every time. Note that you are more likely to reproduce the issue the larger the image output size is. Try 1024-1568. --- neural_style.lua | 62 ++++++++++++++++++++++++++++++++---------------- 1 file changed, 42 insertions(+), 20 deletions(-) diff --git a/neural_style.lua b/neural_style.lua index 7fd5aa5..4d8d48e 100644 --- a/neural_style.lua +++ b/neural_style.lua @@ -49,6 +49,9 @@ function nn.SpatialConvolutionMM:accGradParameters() end local function main(params) + + local lastTotalLoss = 0.0 + if params.gpu >= 0 then require 'cutorch' require 'cunn' @@ -198,27 +201,36 @@ local function main(params) end collectgarbage() - -- Initialize the image - if params.seed >= 0 then - torch.manualSeed(params.seed) - end - local img = nil - if params.init == 'random' then - img = torch.randn(content_image:size()):float():mul(0.001) - elseif params.init == 'image' then - img = content_image_caffe:clone():float() - else - error('Invalid init type') - end - if params.gpu >= 0 then - img = img:cuda() - end + local y + local dy + + local function initImage(params) + -- Initialize the image + if params.seed >= 0 then + torch.manualSeed(params.seed) + end + local img = nil + if params.init == 'random' then + img = torch.randn(content_image:size()):float():mul(0.001) + elseif params.init == 'image' then + img = content_image_caffe:clone():float() + else + error('Invalid init type') + end + if params.gpu >= 0 then + img = img:cuda() + end - -- Run it through the network once to get the proper size for the gradient - -- All the gradients will come from the extra loss modules, so we just pass - -- zeros into the top of the net on the backward pass. - local y = net:forward(img) - local dy = img.new(#y):zero() + -- Run it through the network once to get the proper size for the gradient + -- All the gradients will come from the extra loss modules, so we just pass + -- zeros into the top of the net on the backward pass. + y = net:forward(img) + dy = img.new(#y):zero() + + return img + end + + local img = initImage(params) -- Declaring this here lets us access it in maybe_print local optim_state = nil @@ -283,6 +295,16 @@ local function main(params) maybe_print(num_calls, loss) maybe_save(num_calls) + if (lastTotalLoss - loss) == 0.0 then + print('********** 0 total loss from last iteration; re-seeding **********') + -- need to reinitialize image with new noise seed + initImage(params) + lastTotalLoss = 0 + collectgarbage() + return 0, grad:view(grad:nElement()) + end + lastTotalLoss = loss + collectgarbage() -- optim.lbfgs expects a vector for gradients return loss, grad:view(grad:nElement())