From a0f75996b1e25c97149bcbc6aad7eed5601daab0 Mon Sep 17 00:00:00 2001 From: Myle Ott Date: Sat, 2 Nov 2019 16:51:32 -0700 Subject: [PATCH] Fix building of docs Summary: Pull Request resolved: https://github.com/pytorch/fairseq/pull/1340 Differential Revision: D18289455 Pulled By: myleott fbshipit-source-id: a1c8163a35273b6c646d300142701e8a317d7378 --- docs/conf.py | 2 +- examples/backtranslation/README.md | 9 ++++-- examples/language_model/README.md | 5 +++ examples/translation/README.md | 5 +++ examples/wmt19/README.md | 13 ++++++++ setup.py | 49 +++++++++++++++++++++++------- 6 files changed, 69 insertions(+), 14 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d6ee5c4eb..11358ca2e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ master_doc = 'index' # General information about the project. project = 'fairseq' -copyright = '2018, Facebook AI Research (FAIR)' +copyright = '2019, Facebook AI Research (FAIR)' author = 'Facebook AI Research (FAIR)' github_doc_root = 'https://github.com/pytorch/fairseq/tree/master/docs/' diff --git a/examples/backtranslation/README.md b/examples/backtranslation/README.md index a834214ad..bc32675de 100644 --- a/examples/backtranslation/README.md +++ b/examples/backtranslation/README.md @@ -8,9 +8,14 @@ Model | Description | Dataset | Download ---|---|---|--- `transformer.wmt18.en-de` | Transformer
([Edunov et al., 2018](https://arxiv.org/abs/1808.09381))
WMT'18 winner | [WMT'18 English-German](http://www.statmt.org/wmt18/translation-task.html) | [download (.tar.gz)](https://dl.fbaipublicfiles.com/fairseq/models/wmt18.en-de.ensemble.tar.gz)
See NOTE in the archive -## Example usage +## Example usage (torch.hub) -Interactive generation from the full ensemble via PyTorch Hub: +We require a few additional Python dependencies for preprocessing: +```bash +pip install subword_nmt sacremoses +``` + +Then to generate translations from the full model ensemble: ```python import torch diff --git a/examples/language_model/README.md b/examples/language_model/README.md index 8c7da50f3..f10eb4cb2 100644 --- a/examples/language_model/README.md +++ b/examples/language_model/README.md @@ -12,6 +12,11 @@ Model | Description | Dataset | Download ## Example usage +We require a few additional Python dependencies for preprocessing: +```bash +pip install fastBPE sacremoses +``` + To sample from a language model using PyTorch Hub: ```python import torch diff --git a/examples/translation/README.md b/examples/translation/README.md index 9807a13e9..37c44690c 100644 --- a/examples/translation/README.md +++ b/examples/translation/README.md @@ -20,6 +20,11 @@ Model | Description | Dataset | Download ## Example usage (torch.hub) +We require a few additional Python dependencies for preprocessing: +```bash +pip install sacremoses subword_nmt +``` + Interactive translation via PyTorch Hub: ```python import torch diff --git a/examples/wmt19/README.md b/examples/wmt19/README.md index 6eb781892..34623575d 100644 --- a/examples/wmt19/README.md +++ b/examples/wmt19/README.md @@ -16,6 +16,15 @@ Model | Description | Download ## Example usage (torch.hub) +#### Requirements + +We require a few additional Python dependencies for preprocessing: +```bash +pip install fastBPE sacremoses +``` + +#### Translation + ```python import torch @@ -38,7 +47,11 @@ en2ru.translate("Machine learning is great!") # 'Машинное обучен ru2en = torch.hub.load('pytorch/fairseq', 'transformer.wmt19.ru-en', checkpoint_file='model1.pt:model2.pt:model3.pt:model4.pt', tokenizer='moses', bpe='fastbpe') ru2en.translate("Машинное обучение - это здорово!") # 'Machine learning is great!' +``` +#### Language Modeling + +```python # Sample from the English LM en_lm = torch.hub.load('pytorch.fairseq', 'transformer_lm.wmt19.en', tokenizer='moses', bpe='fastbpe') en_lm.sample("Machine learning is") # 'Machine learning is the future of computing, says Microsoft boss Satya Nadella ...' diff --git a/setup.py b/setup.py index 33849f810..06d21e265 100644 --- a/setup.py +++ b/setup.py @@ -4,13 +4,13 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import os from setuptools import setup, find_packages, Extension -from torch.utils import cpp_extension import sys -if sys.version_info < (3,): - sys.exit('Sorry, Python3 is required for fairseq.') +if sys.version_info < (3, 5): + sys.exit('Sorry, Python >=3.5 is required for fairseq.') with open('README.md') as f: @@ -61,15 +61,42 @@ extensions = [ language='c++', extra_compile_args=extra_compile_args, ), - cpp_extension.CppExtension( - 'fairseq.libnat', - sources=[ - 'fairseq/clib/libnat/edit_dist.cpp', - ], - ) ] +cmdclass = {} + + +try: + # torch is not available when generating docs + from torch.utils import cpp_extension + extensions.extend([ + cpp_extension.CppExtension( + 'fairseq.libnat', + sources=[ + 'fairseq/clib/libnat/edit_dist.cpp', + ], + ), + ]) + cmdclass['build_ext'] = cpp_extension.BuildExtension +except ImportError: + pass + + +if 'READTHEDOCS' in os.environ: + # don't build extensions when generating docs + extensions = [] + if 'build_ext' in cmdclass: + del cmdclass['build_ext'] + + # use CPU build of PyTorch + dependency_links = [ + 'https://download.pytorch.org/whl/cpu/torch-1.3.0%2Bcpu-cp36-cp36m-linux_x86_64.whl' + ] +else: + dependency_links = [] + + setup( name='fairseq', version='0.8.0', @@ -92,13 +119,13 @@ setup( install_requires=[ 'cffi', 'cython', - 'fastBPE', 'numpy', 'regex', 'sacrebleu', 'torch', 'tqdm', ], + dependency_links=dependency_links, packages=find_packages(exclude=['scripts', 'tests']), ext_modules=extensions, test_suite='tests', @@ -113,6 +140,6 @@ setup( 'fairseq-validate = fairseq_cli.validate:cli_main', ], }, - cmdclass={'build_ext': cpp_extension.BuildExtension}, + cmdclass=cmdclass, zip_safe=False, )