From 3f4fc5016334255d6908b20202267ca0b0287335 Mon Sep 17 00:00:00 2001 From: Jerry Ma Date: Mon, 23 Sep 2019 12:25:44 -0700 Subject: [PATCH] Miscellaneous documentation improvements: (#868) Summary: - More clearly document the correspondence between FairseqAdam and torch.optim.AdamW - Add ResamplingDataset to Sphinx docs Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/868 Differential Revision: D17523244 Pulled By: jma127 fbshipit-source-id: 8e7b34b24889b2c8f70b09a52a625d2af135734b --- docs/data.rst | 2 ++ fairseq/optim/adam.py | 6 ++++++ 2 files changed, 8 insertions(+) diff --git a/docs/data.rst b/docs/data.rst index a2a464ec..6a390cb3 100644 --- a/docs/data.rst +++ b/docs/data.rst @@ -30,6 +30,8 @@ provide additional functionality: :members: .. autoclass:: fairseq.data.ConcatDataset :members: +.. autoclass:: fairseq.data.ResamplingDataset + :members: .. autoclass:: fairseq.data.RoundRobinZipDatasets :members: .. autoclass:: fairseq.data.TransformEosDataset diff --git a/fairseq/optim/adam.py b/fairseq/optim/adam.py index 80de7f00..e60a7db5 100644 --- a/fairseq/optim/adam.py +++ b/fairseq/optim/adam.py @@ -15,6 +15,12 @@ from . import FairseqOptimizer, register_optimizer @register_optimizer('adam') class FairseqAdam(FairseqOptimizer): + """Adam optimizer for fairseq. + + Important note: this optimizer corresponds to the "AdamW" variant of + Adam in its weight decay behavior. As such, it is most closely + analogous to torch.optim.AdamW from PyTorch. + """ def __init__(self, args, params): super().__init__(args)