Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add test to enforce infinite buffer size for all applicable datapipes #5707

Merged
merged 4 commits into from
Mar 30, 2022
Merged
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 14 additions & 35 deletions test/test_prototype_builtin_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,8 @@
from builtin_dataset_mocks import parametrize_dataset_mocks, DATASET_MOCKS
from torch.testing._comparison import assert_equal, TensorLikePair, ObjectPair
from torch.utils.data.graph import traverse
from torchdata.datapipes.iter import (
IterDataPipe,
Shuffler,
ShardingFilter,
Demultiplexer,
Forker,
Grouper,
MaxTokenBucketizer,
UnZipper,
IterKeyZipper,
)
from torch.utils.data.graph_settings import get_all_graph_pipes
from torchdata.datapipes.iter import IterDataPipe, Shuffler, ShardingFilter
from torchvision._utils import sequence_to_str
from torchvision.prototype import transforms, datasets
from torchvision.prototype.datasets.utils._internal import INFINITE_BUFFER_SIZE
Expand All @@ -29,6 +20,10 @@
)


def extract_datapipes(dp):
return get_all_graph_pipes(traverse(dp, only_datapipe=True))


@pytest.fixture
def test_home(mocker, tmp_path):
mocker.patch("torchvision.prototype.datasets._api.home", return_value=str(tmp_path))
Expand All @@ -45,15 +40,6 @@ def test_coverage():
)


def extract_datapipes(dp):
def scan(graph):
for node, sub_graph in graph.items():
yield node
yield from scan(sub_graph)

yield from scan(traverse(dp))


@pytest.mark.filterwarnings("error")
class TestCommon:
@parametrize_dataset_mocks(DATASET_MOCKS)
Expand Down Expand Up @@ -169,21 +155,14 @@ def test_infinite_buffer_size(self, test_home, dataset_mock, config):
dataset = datasets.load(dataset_mock.name, **config)

for dp in extract_datapipes(dataset):
if isinstance(
dp,
(
Shuffler,
Demultiplexer,
Forker,
Grouper,
MaxTokenBucketizer,
UnZipper,
IterKeyZipper,
),
):
# TODO: replace this with the proper sentinel as soon as https://github.com/pytorch/data/issues/335 is
# resolved
assert dp.buffer_size == INFINITE_BUFFER_SIZE
try:
buffer_size = getattr(dp, "buffer_size")
except AttributeError:
continue
pmeier marked this conversation as resolved.
Show resolved Hide resolved

# TODO: replace this with the proper sentinel as soon as https://github.com/pytorch/data/issues/335 is
# resolved
assert buffer_size == INFINITE_BUFFER_SIZE


@parametrize_dataset_mocks(DATASET_MOCKS["qmnist"])
Expand Down