Summary:
We need to keep `--num-workers=0` during tests

Pull Request resolved: https://github.com/fairinternal/fairseq-py/pull/1352

Reviewed By: alexeib

Differential Revision: D24375411

Pulled By: myleott

fbshipit-source-id: 9975ed5405f3b19b4dd0877ca15ee3081b185942
This commit is contained in:
Myle Ott 2020-10-16 17:35:01 -07:00 committed by Facebook GitHub Bot
parent f2fa07106c
commit 2d900bf308
3 changed files with 15 additions and 6 deletions

View File

@ -139,7 +139,7 @@ def _quantize_language_model(data_dir, arch, extra_flags=None, run_validation=Fa
"--ddp-backend",
"no_c10d",
"--num-workers",
0,
"0",
]
+ (extra_flags or []),
)
@ -177,7 +177,7 @@ def _quantize_language_model(data_dir, arch, extra_flags=None, run_validation=Fa
"--ddp-backend",
"no_c10d",
"--num-workers",
0,
"0",
"--quant-noise-scalar",
"0.5",
]
@ -215,7 +215,7 @@ def _quantize_language_model(data_dir, arch, extra_flags=None, run_validation=Fa
"--ddp-backend",
"no_c10d",
"--num-workers",
0,
"0",
"--restore-file",
os.path.join(data_dir, "checkpoint_last.pt"),
"--reset-optimizer",

View File

@ -888,6 +888,8 @@ def train_legacy_masked_language_model(data_dir, arch, extra_args=()):
"1",
"--dataset-impl",
"raw",
"--num-workers",
"0",
] + list(extra_args),
)
train.main(train_args)
@ -973,7 +975,7 @@ def train_masked_lm(data_dir, arch, extra_flags=None):
'--no-progress-bar',
'--distributed-world-size', '1',
'--ddp-backend', 'no_c10d',
'--num-workers', 0,
'--num-workers', '0',
] + (extra_flags or []),
)
train.main(train_args)
@ -1000,7 +1002,7 @@ def train_roberta_head(data_dir, arch, num_classes=2, extra_flags=None):
'--no-progress-bar',
'--distributed-world-size', '1',
'--ddp-backend', 'no_c10d',
'--num-workers', 0,
'--num-workers', '0',
] + (extra_flags or []),
)
train.main(train_args)
@ -1025,6 +1027,7 @@ def train_language_model(data_dir, arch, extra_flags=None, run_validation=False)
'--no-progress-bar',
'--distributed-world-size', '1',
'--ddp-backend', 'no_c10d',
'--num-workers', '0',
] + (extra_flags or []),
)
train.main(train_args)
@ -1041,6 +1044,7 @@ def train_language_model(data_dir, arch, extra_flags=None, run_validation=False)
'--valid-subset', 'valid',
'--max-tokens', '500',
'--no-progress-bar',
'--num-workers', '0',
]
)
validate.main(validate_args)
@ -1054,6 +1058,7 @@ def eval_lm_main(data_dir):
data_dir,
'--path', os.path.join(data_dir, 'checkpoint_last.pt'),
'--no-progress-bar',
'--num-workers', '0',
],
)
eval_lm.main(eval_lm_args)
@ -1117,6 +1122,8 @@ def train_masked_language_model(data_dir, arch, extra_args=()):
"1",
"--dataset-impl",
"raw",
"--num-workers",
"0",
] + list(extra_args),
)
train.main(train_args)

View File

@ -235,7 +235,7 @@ def train_translation_model(data_dir, arch, extra_flags=None, task='translation'
'--max-epoch', '1',
'--no-progress-bar',
'--distributed-world-size', '1',
'--num-workers', 0,
'--num-workers', '0',
] + lang_flags + (extra_flags or []),
)
train.main(train_args)
@ -252,6 +252,7 @@ def train_translation_model(data_dir, arch, extra_flags=None, task='translation'
'--valid-subset', 'valid',
'--max-tokens', '500',
'--no-progress-bar',
'--num-workers', '0',
] + lang_flags + (extra_valid_flags or [])
)
validate.main(validate_args)
@ -273,6 +274,7 @@ def generate_main(data_dir, extra_flags=None):
'--max-len-b', '5',
'--gen-subset', 'valid',
'--no-progress-bar',
'--num-workers', '0',
] + (extra_flags or []),
)