tensorboard.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269
  1. import fastai
  2. from fastai import *
  3. from fastai.vision import *
  4. from fastai.callbacks import *
  5. from fastai.vision.gan import *
  6. from fastai.core import *
  7. import statistics
  8. from .images import ModelImageSet
  9. import torchvision.utils as vutils
  10. from tensorboardX import SummaryWriter
  11. class ModelGraphVisualizer():
  12. def __init__(self):
  13. return
  14. def write_model_graph_to_tensorboard(self, md:DataBunch, model:nn.Module, tbwriter:SummaryWriter):
  15. try:
  16. x,y = md.one_batch(DatasetType.Valid, detach=False, denorm=False)
  17. tbwriter.add_graph(model, x)
  18. except Exception as e:
  19. print(("Failed to generate graph for model: {0}. Note that there's an outstanding issue with "
  20. + "scopes being addressed here: https://github.com/pytorch/pytorch/pull/12400").format(e))
  21. class ModelHistogramVisualizer():
  22. def __init__(self):
  23. return
  24. def write_tensorboard_histograms(self, model:nn.Module, iter_count:int, tbwriter:SummaryWriter, name:str='model'):
  25. for param_name, param in model.named_parameters():
  26. tbwriter.add_histogram(name + '/weights/' + param_name, param, iter_count)
  27. class ModelStatsVisualizer():
  28. def __init__(self):
  29. return
  30. def write_tensorboard_stats(self, model:nn.Module, iter_count:int, tbwriter:SummaryWriter, name:str='model'):
  31. gradients = [x.grad for x in model.parameters() if x.grad is not None]
  32. gradient_nps = [to_np(x.data) for x in gradients]
  33. if len(gradients) == 0:
  34. return
  35. avg_norm = sum(x.data.norm() for x in gradients)/len(gradients)
  36. tbwriter.add_scalar(name + '/gradients/avg_norm', avg_norm, iter_count)
  37. median_norm = statistics.median(x.data.norm() for x in gradients)
  38. tbwriter.add_scalar(name + '/gradients/median_norm', median_norm, iter_count)
  39. max_norm = max(x.data.norm() for x in gradients)
  40. tbwriter.add_scalar(name + '/gradients/max_norm', max_norm, iter_count)
  41. min_norm = min(x.data.norm() for x in gradients)
  42. tbwriter.add_scalar(name + '/gradients/min_norm', min_norm, iter_count)
  43. num_zeros = sum((np.asarray(x)==0.0).sum() for x in gradient_nps)
  44. tbwriter.add_scalar(name + '/gradients/num_zeros', num_zeros, iter_count)
  45. avg_gradient= sum(x.data.mean() for x in gradients)/len(gradients)
  46. tbwriter.add_scalar(name + '/gradients/avg_gradient', avg_gradient, iter_count)
  47. median_gradient = statistics.median(x.data.median() for x in gradients)
  48. tbwriter.add_scalar(name + '/gradients/median_gradient', median_gradient, iter_count)
  49. max_gradient = max(x.data.max() for x in gradients)
  50. tbwriter.add_scalar(name + '/gradients/max_gradient', max_gradient, iter_count)
  51. min_gradient = min(x.data.min() for x in gradients)
  52. tbwriter.add_scalar(name + '/gradients/min_gradient', min_gradient, iter_count)
  53. class ImageGenVisualizer():
  54. def output_image_gen_visuals(self, learn:Learner, trn_batch:Tuple, val_batch:Tuple, iter_count:int, tbwriter:SummaryWriter):
  55. self._output_visuals(learn=learn, batch=val_batch, iter_count=iter_count, tbwriter=tbwriter, ds_type=DatasetType.Valid)
  56. self._output_visuals(learn=learn, batch=trn_batch, iter_count=iter_count, tbwriter=tbwriter, ds_type=DatasetType.Train)
  57. def _output_visuals(self, learn:Learner, batch:Tuple, iter_count:int, tbwriter:SummaryWriter, ds_type: DatasetType):
  58. image_sets = ModelImageSet.get_list_from_model(learn=learn, batch=batch, ds_type=ds_type)
  59. self._write_tensorboard_images(image_sets=image_sets, iter_count=iter_count, tbwriter=tbwriter, ds_type=ds_type)
  60. def _write_tensorboard_images(self, image_sets:[ModelImageSet], iter_count:int, tbwriter:SummaryWriter, ds_type: DatasetType):
  61. orig_images = []
  62. gen_images = []
  63. real_images = []
  64. for image_set in image_sets:
  65. orig_images.append(image_set.orig.px)
  66. gen_images.append(image_set.gen.px)
  67. real_images.append(image_set.real.px)
  68. prefix = str(ds_type)
  69. tbwriter.add_image(prefix + ' orig images', vutils.make_grid(orig_images, normalize=True), iter_count)
  70. tbwriter.add_image(prefix + ' gen images', vutils.make_grid(gen_images, normalize=True), iter_count)
  71. tbwriter.add_image(prefix + ' real images', vutils.make_grid(real_images, normalize=True), iter_count)
  72. #--------Below are what you actually want ot use, in practice----------------#
  73. class LearnerTensorboardWriter(LearnerCallback):
  74. def __init__(self, learn:Learner, base_dir:Path, name:str, loss_iters:int=25, weight_iters:int=1000, stats_iters:int=1000):
  75. super().__init__(learn=learn)
  76. self.base_dir = base_dir
  77. self.name = name
  78. log_dir = base_dir/name
  79. self.tbwriter = SummaryWriter(log_dir=str(log_dir))
  80. self.loss_iters = loss_iters
  81. self.weight_iters = weight_iters
  82. self.stats_iters = stats_iters
  83. self.iter_count = 0
  84. self.weight_vis = ModelHistogramVisualizer()
  85. self.model_vis = ModelStatsVisualizer()
  86. self.data = None
  87. #Keeping track of iterations in callback, because callback can be used for multiple epocs and multiple fit calls.
  88. #This ensures that graphs show continuous iterations rather than resetting to 0 (which makes them much harder to read!)
  89. self.iteration = -1
  90. def _update_batches_if_needed(self):
  91. #one_batch is extremely slow. this is an optimization
  92. update_batches = self.data is not self.learn.data
  93. if update_batches:
  94. self.data = self.learn.data
  95. self.trn_batch = self.learn.data.one_batch(DatasetType.Train, detach=False, denorm=False)
  96. self.val_batch = self.learn.data.one_batch(DatasetType.Valid, detach=False, denorm=False)
  97. def _write_model_stats(self, iteration):
  98. self.model_vis.write_tensorboard_stats(model=self.learn.model, iter_count=iteration, tbwriter=self.tbwriter)
  99. def _write_training_loss(self, iteration, last_loss):
  100. trn_loss = to_np(last_loss)
  101. self.tbwriter.add_scalar('/loss/trn_loss', trn_loss, iteration)
  102. def _write_weight_histograms(self, iteration):
  103. self.weight_vis.write_tensorboard_histograms(model=self.learn.model, iter_count=iteration, tbwriter=self.tbwriter)
  104. def _write_val_loss(self, iteration, last_metrics):
  105. #TODO: Not a fan of this indexing but...what to do?
  106. val_loss = last_metrics[0]
  107. self.tbwriter.add_scalar('/loss/val_loss', val_loss, iteration)
  108. def _write_metrics(self, iteration):
  109. rec = self.learn.recorder
  110. for i, name in enumerate(rec.names[3:]):
  111. if len(rec.metrics) == 0: continue
  112. if len(rec.metrics[-1:]) == 0: continue
  113. if len(rec.metrics[-1:][0]) == 0: continue
  114. value = rec.metrics[-1:][0][i]
  115. if value is None: continue
  116. self.tbwriter.add_scalar('/metrics/' + name, to_np(value), iteration)
  117. def on_batch_end(self, last_loss, metrics, **kwargs):
  118. self.iteration +=1
  119. iteration = self.iteration
  120. if iteration==0:
  121. return
  122. self._update_batches_if_needed()
  123. if iteration % self.loss_iters == 0:
  124. self._write_training_loss(iteration, last_loss)
  125. if iteration % self.weight_iters == 0:
  126. self._write_weight_histograms(iteration)
  127. if iteration % self.stats_iters == 0:
  128. self._write_model_stats(iteration)
  129. def on_epoch_end(self, metrics, last_metrics, **kwargs):
  130. iteration = self.iteration
  131. self._write_val_loss(iteration, last_metrics)
  132. self._write_metrics(iteration)
  133. class GANTensorboardWriter(LearnerTensorboardWriter):
  134. def __init__(self, learn:Learner, base_dir:Path, name:str, loss_iters:int=25, weight_iters:int=1000,
  135. stats_iters:int=1000, visual_iters:int=100):
  136. super().__init__(learn=learn, base_dir=base_dir, name=name, loss_iters=loss_iters,
  137. weight_iters=weight_iters, stats_iters=stats_iters)
  138. self.visual_iters = visual_iters
  139. self.img_gen_vis = ImageGenVisualizer()
  140. #override
  141. def _write_training_loss(self, iteration, last_loss):
  142. trainer = self.learn.gan_trainer
  143. recorder = trainer.recorder
  144. if len(recorder.losses) > 0:
  145. trn_loss = to_np((recorder.losses[-1:])[0])
  146. self.tbwriter.add_scalar('/loss/trn_loss', trn_loss, iteration)
  147. #override
  148. def _write_weight_histograms(self, iteration):
  149. trainer = self.learn.gan_trainer
  150. generator = trainer.generator
  151. critic = trainer.critic
  152. self.weight_vis.write_tensorboard_histograms(model=generator, iter_count=iteration, tbwriter=self.tbwriter, name='generator')
  153. self.weight_vis.write_tensorboard_histograms(model=critic, iter_count=iteration, tbwriter=self.tbwriter, name='critic')
  154. #override
  155. def _write_model_stats(self, iteration):
  156. trainer = self.learn.gan_trainer
  157. generator = trainer.generator
  158. critic = trainer.critic
  159. self.model_vis.write_tensorboard_stats(model=generator, iter_count=iteration, tbwriter=self.tbwriter, name='generator')
  160. self.model_vis.write_tensorboard_stats(model=critic, iter_count=iteration, tbwriter=self.tbwriter, name='critic')
  161. #override
  162. def _write_val_loss(self, iteration, last_metrics):
  163. trainer = self.learn.gan_trainer
  164. recorder = trainer.recorder
  165. if len(recorder.val_losses) > 0:
  166. val_loss = (recorder.val_losses[-1:])[0]
  167. self.tbwriter.add_scalar('/loss/val_loss', val_loss, iteration)
  168. def _write_images(self, iteration):
  169. trainer = self.learn.gan_trainer
  170. recorder = trainer.recorder
  171. gen_mode = trainer.gen_mode
  172. trainer.switch(gen_mode=True)
  173. self.img_gen_vis.output_image_gen_visuals(learn=self.learn, trn_batch=self.trn_batch, val_batch=self.val_batch,
  174. iter_count=iteration, tbwriter=self.tbwriter)
  175. trainer.switch(gen_mode=gen_mode)
  176. def on_batch_end(self, metrics, **kwargs):
  177. super().on_batch_end(metrics=metrics, **kwargs)
  178. iteration = self.iteration
  179. if iteration==0:
  180. return
  181. if iteration % self.visual_iters == 0:
  182. self._write_images(iteration)
  183. class ImageGenTensorboardWriter(LearnerTensorboardWriter):
  184. def __init__(self, learn:Learner, base_dir:Path, name:str, loss_iters:int=25, weight_iters:int=1000,
  185. stats_iters:int=1000, visual_iters:int=100):
  186. super().__init__(learn=learn, base_dir=base_dir, name=name, loss_iters=loss_iters, weight_iters=weight_iters,
  187. stats_iters=stats_iters)
  188. self.visual_iters = visual_iters
  189. self.img_gen_vis = ImageGenVisualizer()
  190. def _write_images(self, iteration):
  191. self.img_gen_vis.output_image_gen_visuals(learn=self.learn, trn_batch=self.trn_batch, val_batch=self.val_batch,
  192. iter_count=iteration, tbwriter=self.tbwriter)
  193. def on_batch_end(self, metrics, **kwargs):
  194. super().on_batch_end(metrics=metrics, **kwargs)
  195. iteration = self.iteration
  196. if iteration==0:
  197. return
  198. if iteration % self.visual_iters == 0:
  199. self._write_images(iteration)