diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index e29e7242f5..8d36a68951 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -157,7 +157,7 @@ jobs: # this workflow - name: Run tests run: | - python tests/utils/pytest_wrapper.py tests/ --verbose --ignore tests/benchmarking/ --ignore tests/isolated/ --ignore tests/utils/ --ignore tests/import_export/ + python tests/utils/pytest_wrapper.py tests/ --verbose --ignore tests/benchmarking/ --ignore tests/isolated/ --ignore tests/utils/ --ignore tests/intensive/ # Intended to run even if the tests above failed (todo: don't run if the # tests were skipped due to an earlier step failing?) - name: Run isolated tests diff --git a/tests/README.md b/tests/README.md index 13911c7770..a150c4e95b 100644 --- a/tests/README.md +++ b/tests/README.md @@ -6,13 +6,13 @@ FiftyOne currently uses both ## Contents -| File | Description | -| -------------------- | ------------------------------------------------------------------------------------------- | -| `unittests/*.py` | Unit tests checking expected behavior of FiftyOne | -| `benchmarking/*.py` | Tests related to benchmarking the performance of FiftyOne | -| `import_export/*.py` | Tests for importing/exporting datasets | -| `isolated/*.py` | Tests that must be run in a separate `pytest` process to avoid interfering with other tests | -| `misc/*.py` | Miscellaneous tests that have not been upgraded to official unit tests | +| File | Description | +| ------------------- | ------------------------------------------------------------------------------------------- | +| `unittests/*.py` | Unit tests checking expected behavior of FiftyOne | +| `benchmarking/*.py` | Tests related to benchmarking the performance of FiftyOne | +| `intensive/*.py` | Computationally intensive tests | +| `isolated/*.py` | Tests that must be run in a separate `pytest` process to avoid interfering with other tests | +| `misc/*.py` | Miscellaneous tests that have not been upgraded to official unit tests | ## Running tests diff --git a/tests/import_export/zoo_tests.py b/tests/intensive/dataset_zoo_tests.py similarity index 100% rename from tests/import_export/zoo_tests.py rename to tests/intensive/dataset_zoo_tests.py diff --git a/tests/import_export/dataset_tests.py b/tests/intensive/import_export_tests.py similarity index 100% rename from tests/import_export/dataset_tests.py rename to tests/intensive/import_export_tests.py diff --git a/tests/import_export/model_tests.py b/tests/intensive/model_zoo_tests.py similarity index 100% rename from tests/import_export/model_tests.py rename to tests/intensive/model_zoo_tests.py diff --git a/tests/intensive/utils_tests.py b/tests/intensive/utils_tests.py new file mode 100644 index 0000000000..949b327b90 --- /dev/null +++ b/tests/intensive/utils_tests.py @@ -0,0 +1,169 @@ +""" +Utils tests. + +| Copyright 2017-2020, Voxel51, Inc. +| `voxel51.com `_ +| +""" +import os + +import numpy as np +import pytest + +import eta.core.image as etai +import eta.core.utils as etau +import eta.core.video as etav + +import fiftyone as fo +import fiftyone.utils.image as foui +import fiftyone.utils.video as fouv + + +@pytest.fixture +def tmpdir(): + with etau.TempDir() as _tmpdir: + yield _tmpdir + + +def _write_image(image_path, size): + img = np.random.randint(255, size=size + (3,), dtype=np.uint8) + etai.write(img, image_path) + + +def _write_video(video_path, fps, size, num_frames): + frame_size = (size[1], size[0]) + with etav.FFmpegVideoWriter(video_path, fps, frame_size) as writer: + for _ in range(num_frames): + img = np.random.randint(255, size=size + (3,), dtype=np.uint8) + writer.write(img) + + +def _make_dataset(source_path, dataset_dir, num_samples): + ext = os.path.splitext(source_path)[1] + + samples = [] + for idx in range(num_samples): + filepath = os.path.join(dataset_dir, "%06d%s" % (idx, ext)) + etau.copy_file(source_path, filepath) + samples.append(fo.Sample(filepath=filepath)) + + dataset = fo.Dataset() + dataset.add_samples(samples) + return dataset + + +def test_compute_image_metadata(tmpdir): + image_path = os.path.join(tmpdir, "image.png") + dataset_dir = os.path.join(tmpdir, "images") + + _height = 720 + _width = 1280 + + _write_image(image_path, size=(_height, _width)) + dataset = _make_dataset(image_path, dataset_dir, num_samples=100) + + sample = dataset.first() + assert sample.metadata is None + + dataset.compute_metadata() + assert sample.metadata.height == _height + assert sample.metadata.width == _width + + dataset.compute_metadata(overwrite=True) + assert sample.metadata.height == _height + assert sample.metadata.width == _width + + dataset.clear_sample_field("metadata") + assert sample.metadata is None + + dataset.compute_metadata(num_workers=1) + assert sample.metadata.height == _height + assert sample.metadata.width == _width + + +def test_compute_video_metadata(tmpdir): + video_path = os.path.join(tmpdir, "video.mp4") + dataset_dir = os.path.join(tmpdir, "videos") + + _height = 720 + _width = 1280 + + _write_video(video_path, fps=5, size=(_height, _width), num_frames=30) + dataset = _make_dataset(video_path, dataset_dir, num_samples=10) + + sample = dataset.first() + assert sample.metadata is None + + dataset.compute_metadata() + assert sample.metadata.frame_height == _height + assert sample.metadata.frame_width == _width + + dataset.compute_metadata(overwrite=True) + assert sample.metadata.frame_height == _height + assert sample.metadata.frame_width == _width + + dataset.clear_sample_field("metadata") + assert sample.metadata is None + + dataset.compute_metadata(num_workers=1) + assert sample.metadata.frame_height == _height + assert sample.metadata.frame_width == _width + + +def test_transform_images(tmpdir): + image_path = os.path.join(tmpdir, "image.png") + dataset_dir = os.path.join(tmpdir, "images") + + _write_image(image_path, size=(720, 1280)) + dataset = _make_dataset(image_path, dataset_dir, num_samples=100) + + sample = dataset.first() + assert sample.filepath.endswith(".png") + + foui.reencode_images(dataset, ext=".jpg") + assert sample.filepath.endswith(".jpg") + + foui.transform_images(dataset, ext=".jpg", force_reencode=True) + assert sample.filepath.endswith(".jpg") + + foui.transform_images(dataset, max_size=(256, 256), num_workers=1) + dataset.compute_metadata(overwrite=True) + assert sample.metadata.height <= 256 + assert sample.metadata.width <= 256 + + foui.transform_images(dataset, min_size=(512, 512), delete_originals=True) + dataset.compute_metadata(overwrite=True) + assert sample.metadata.height >= 512 + assert sample.metadata.width >= 512 + + +def test_transform_videos(tmpdir): + video_path = os.path.join(tmpdir, "video.avi") + dataset_dir = os.path.join(tmpdir, "videos") + + _write_video(video_path, fps=5, size=(720, 1280), num_frames=30) + dataset = _make_dataset(video_path, dataset_dir, num_samples=2) + + sample = dataset.first() + assert sample.filepath.endswith(".avi") + + fouv.reencode_videos(dataset) + assert sample.filepath.endswith(".mp4") + + fouv.transform_videos(dataset, reencode=True, force_reencode=True) + assert sample.filepath.endswith(".mp4") + + fouv.transform_videos(dataset, max_size=(256, 256)) + dataset.compute_metadata(overwrite=True) + assert sample.metadata.frame_height <= 256 + assert sample.metadata.frame_width <= 256 + + fouv.transform_videos(dataset, min_size=(512, 512), delete_originals=True) + dataset.compute_metadata(overwrite=True) + assert sample.metadata.frame_height >= 512 + assert sample.metadata.frame_width >= 512 + + +if __name__ == "__main__": + fo.config.show_progress_bars = False + pytest.main([__file__])