Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Regression test for missing frames after exporting a CVAT dataset #8827

Merged
merged 12 commits into from
Dec 19, 2024
Prev Previous commit
Next Next commit
linted changes with Black
  • Loading branch information
Oleg Valiulin committed Dec 13, 2024
commit 7dfe012843d022483c5329b5c544611b1994ac52
44 changes: 21 additions & 23 deletions tests/python/rest_api/test_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1013,16 +1013,16 @@ def test_datumaro_export_without_annotations_includes_image_info(
assert tuple(related_image["size"]) > (0, 0)

@pytest.mark.parametrize(
"format_name, num_frames, frame_step, expected_frames",
[
("Datumaro 1.0", 100, 5, 20),
("COCO 1.0", 100, 5, 20),
("CVAT for video 1.1", 100, 5, 20), # no remainder
("CVAT for video 1.1", 97, 5, 20), # prime
("CVAT for video 1.1", 97, 2, 49),
("CVAT for video 1.1", 100, 3, 34), # three
# we assert that expected frames are ceil(frames / step)
]
"format_name, num_frames, frame_step, expected_frames",
[
("Datumaro 1.0", 100, 5, 20),
("COCO 1.0", 100, 5, 20),
("CVAT for video 1.1", 100, 5, 20), # no remainder
("CVAT for video 1.1", 97, 5, 20), # prime
("CVAT for video 1.1", 97, 2, 49),
("CVAT for video 1.1", 100, 3, 34), # three
zhiltsov-max marked this conversation as resolved.
Show resolved Hide resolved
# we assert that expected frames are ceil(frames / step)
],
zhiltsov-max marked this conversation as resolved.
Show resolved Hide resolved
)
def test_export_with_non_default_frame_step(
self,
Expand All @@ -1036,34 +1036,34 @@ def test_export_with_non_default_frame_step(
expected_frames: int,
zhiltsov-max marked this conversation as resolved.
Show resolved Hide resolved
):
# parameter validation
assert expected_frames == math.ceil(num_frames / frame_step), 'Test params are wrong'
assert expected_frames == math.ceil(num_frames / frame_step), "Test params are wrong"

spec = {
'name': f"test_video_frames_in_{format_name}_after_export",
"labels": [{"name": "goofy ahh car"}]
"name": f"test_video_frames_in_{format_name}_after_export",
"labels": [{"name": "goofy ahh car"}],
}

data = {
'image_quality': 70,
'client_files': [generate_video_file(num_frames)],
'frame_filter': f"step={frame_step}"
"image_quality": 70,
"client_files": [generate_video_file(num_frames)],
zhiltsov-max marked this conversation as resolved.
Show resolved Hide resolved
"frame_filter": f"step={frame_step}",
}

# create a task and get its instance
(task_id, _) = create_task(admin_user, spec, data)
with make_sdk_client(admin_user) as client:
task_obj: Task = client.tasks.retrieve(task_id)
task_obj: Task = client.tasks.retrieve(task_id)
zhiltsov-max marked this conversation as resolved.
Show resolved Hide resolved

# export the video
dataset_file = tmp_path / "dataset.zip"
task_obj.export_dataset(format_name, dataset_file, include_images=True)

def get_png_index(zinfo: zipfile.ZipInfo) -> int:
name = PurePosixPath(zinfo.filename)
if name.suffix.lower() != '.png':
if name.suffix.lower() != ".png":
return -1
name = os.path.basename(name).removesuffix(name.suffix)
idx = name[name.rfind('_') + 1:]
idx = name[name.rfind("_") + 1 :]
assert idx.isnumeric()
return int(idx)

Expand All @@ -1072,10 +1072,8 @@ def get_png_index(zinfo: zipfile.ZipInfo) -> int:
frames = [png_idx for png_idx in map(get_png_index, dataset.filelist) if png_idx != -1]
frames.sort()


assert len(frames) == expected_frames, 'Some frames were lost'
assert frames == list(range(0, num_frames, frame_step)), 'Some frames are wrong'

assert len(frames) == expected_frames, "Some frames were lost"
assert frames == list(range(0, num_frames, frame_step)), "Some frames are wrong"


@pytest.mark.usefixtures("restore_db_per_function")
Expand Down
Loading