Skip to content

Commit

Permalink
Merge branch 'modular-multilingual-translation' into mtee
Browse files Browse the repository at this point in the history
  • Loading branch information
taidopurason committed Nov 26, 2021
2 parents 871fe19 + 7093ca2 commit ee7db79
Showing 1 changed file with 11 additions and 0 deletions.
11 changes: 11 additions & 0 deletions fairseq/tasks/multilingual_translation_sampled.py
Original file line number Diff line number Diff line change
Expand Up @@ -226,6 +226,9 @@ def get_batch_iterator(
epoch=1,
data_buffer_size=0,
disable_iterator_cache=False,
skip_remainder_batch=False,
grouped_shuffling=False,
update_epoch_batch_itr=False,
):
"""
Get an iterator that yields batches of data from the given dataset.
Expand Down Expand Up @@ -258,6 +261,11 @@ def get_batch_iterator(
disable_iterator_cache (bool, optional): don't cache the
EpochBatchIterator (ignores `FairseqTask::can_reuse_epoch_itr`)
(default: False).
grouped_shuffling (bool, optional): group batches with each groups
containing num_shards batches and shuffle groups. Reduces difference
between sequence lengths among workers for batches sorted by length.
update_epoch_batch_itr (bool optional): if true then donot use the cached
batch iterator for the epoch
Returns:
~fairseq.iterators.EpochBatchIterator: a batched iterator over the
given dataset split
Expand All @@ -281,6 +289,9 @@ def get_batch_iterator(
epoch=epoch,
data_buffer_size=data_buffer_size,
disable_iterator_cache=disable_iterator_cache,
skip_remainder_batch=False,
grouped_shuffling=False,
update_epoch_batch_itr=False,
)
self.dataset_to_epoch_iter[dataset] = batch_iter
return batch_iter
Expand Down

0 comments on commit ee7db79

Please sign in to comment.