Browse Source

Saving memory at inference (visualization) by not calculating gradients

Jason Antic 6 years ago
parent
commit
e4f4bb9c34
1 changed files with 6 additions and 4 deletions
  1. 6 4
      fasterai/visualize.py

+ 6 - 4
fasterai/visualize.py

@@ -63,10 +63,12 @@ class ModelImageVisualizer():
     def get_transformed_image_ndarray(self, path:Path, model:nn.Module, sz:int=None, tfms:[Transform]=[]):
         training = model.training 
         model.eval()
-        orig = self._get_model_ready_image_ndarray(path, model, sz, tfms)
-        orig = VV(orig[None])
-        result = model(orig).detach().cpu().numpy()
-        result = self._denorm(result)
+        with torch.no_grad():
+            orig = self._get_model_ready_image_ndarray(path, model, sz, tfms)
+            orig = VV_(orig[None])
+            result = model(orig).detach().cpu().numpy()
+            result = self._denorm(result)
+
         if training:
             model.train()
         return result[0]