Update transformer_config.py

fix the default factory for 'decoder' in DecoderConfig
This commit is contained in:
lif 2023-11-13 10:43:54 +08:00 committed by GitHub
parent 9b31f305d8
commit d01a9c30d8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -117,7 +117,7 @@ class TransformerConfig(FairseqDataclass):
default=DEFAULT_MAX_SOURCE_POSITIONS,
metadata={"help": "Maximum input length supported by the encoder"},
)
decoder: DecoderConfig = field(default_factory=EncDecBaseConfig)
decoder: DecoderConfig = field(default_factory=DecoderConfig)
# TODO should really be in the decoder config
max_target_positions: int = field(
default=DEFAULT_MAX_TARGET_POSITIONS,