diff --git a/pyproject.toml b/pyproject.toml index 54fecf8ce4..1fea5a0729 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -107,6 +107,7 @@ dev = [ "pydoc-markdown<5.0.0", "pytest-asyncio<2.0.0", "pytest-cov<8.0.0", + "pytest-rerunfailures<17.0.0", "pytest-timeout<3.0.0", "pytest-xdist<4.0.0", "pytest<9.0.0", diff --git a/tests/unit/_autoscaling/test_autoscaled_pool.py b/tests/unit/_autoscaling/test_autoscaled_pool.py index 645cd12cdd..5995d86556 100644 --- a/tests/unit/_autoscaling/test_autoscaled_pool.py +++ b/tests/unit/_autoscaling/test_autoscaled_pool.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -import sys from contextlib import suppress from datetime import datetime, timedelta, timezone from itertools import chain, repeat @@ -145,8 +144,8 @@ async def run() -> None: await pool.run() -@pytest.mark.skipif( - sys.platform != 'linux', +@pytest.mark.flaky( + rerun=3, reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1655.', ) async def test_autoscales( diff --git a/tests/unit/browsers/test_browser_pool.py b/tests/unit/browsers/test_browser_pool.py index 2c38d3b601..51b05339d2 100644 --- a/tests/unit/browsers/test_browser_pool.py +++ b/tests/unit/browsers/test_browser_pool.py @@ -1,6 +1,5 @@ from __future__ import annotations -import sys from typing import TYPE_CHECKING import pytest @@ -63,8 +62,8 @@ async def test_multiple_plugins_new_page_creation(server_url: URL) -> None: assert browser_pool.total_pages_count == 3 -@pytest.mark.skipif( - sys.platform != 'linux', +@pytest.mark.flaky( + rerun=3, reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1660.', ) async def test_new_page_with_each_plugin(server_url: URL) -> None: diff --git a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py index 4aedeff2eb..b883324c32 100644 --- a/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py +++ b/tests/unit/crawlers/_adaptive_playwright/test_adaptive_playwright_crawler.py @@ -2,7 +2,6 @@ import asyncio import logging -import sys from dataclasses import dataclass from datetime import timedelta from itertools import cycle @@ -607,8 +606,8 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None: mocked_h2_handler.assert_has_calls([call(expected_h2_tag)]) -@pytest.mark.skipif( - sys.platform != 'linux', +@pytest.mark.flaky( + rerun=3, reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1650.', ) async def test_adaptive_context_query_selector_parsel(test_urls: list[str]) -> None: diff --git a/tests/unit/crawlers/_basic/test_basic_crawler.py b/tests/unit/crawlers/_basic/test_basic_crawler.py index 5800e59d4e..1ff6d8a201 100644 --- a/tests/unit/crawlers/_basic/test_basic_crawler.py +++ b/tests/unit/crawlers/_basic/test_basic_crawler.py @@ -1302,9 +1302,8 @@ async def handler(context: BasicCrawlingContext) -> None: @pytest.mark.run_alone -@pytest.mark.skipif( - sys.platform != 'linux', - reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1652.', +@pytest.mark.flaky( + reruns=3, reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1652.' ) @pytest.mark.skipif(sys.version_info[:3] < (3, 11), reason='asyncio.timeout was introduced in Python 3.11.') @pytest.mark.parametrize( @@ -1328,7 +1327,11 @@ async def test_timeout_in_handler(sleep_type: str) -> None: double_handler_timeout_s = handler_timeout.total_seconds() * 2 handler_sleep = iter([double_handler_timeout_s, double_handler_timeout_s, 0]) - crawler = BasicCrawler(request_handler_timeout=handler_timeout, max_request_retries=max_request_retries) + crawler = BasicCrawler( + request_handler_timeout=handler_timeout, + max_request_retries=max_request_retries, + storage_client=MemoryStorageClient(), + ) mocked_handler_before_sleep = Mock() mocked_handler_after_sleep = Mock() @@ -1355,8 +1358,8 @@ async def handler(context: BasicCrawlingContext) -> None: assert mocked_handler_after_sleep.call_count == 1 -@pytest.mark.skipif( - sys.platform != 'linux', +@pytest.mark.flaky( + reruns=3, reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1649.', ) @pytest.mark.parametrize( @@ -1381,6 +1384,7 @@ async def test_keep_alive( max_requests_per_crawl=max_requests_per_crawl, # If more request can run in parallel, then max_requests_per_crawl is not deterministic. concurrency_settings=ConcurrencySettings(desired_concurrency=1, max_concurrency=1), + storage_client=MemoryStorageClient(), ) mocked_handler = Mock() diff --git a/uv.lock b/uv.lock index 01edf5feda..0da466547f 100644 --- a/uv.lock +++ b/uv.lock @@ -817,6 +817,7 @@ dev = [ { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, + { name = "pytest-rerunfailures" }, { name = "pytest-timeout" }, { name = "pytest-xdist" }, { name = "ruff" }, @@ -892,6 +893,7 @@ dev = [ { name = "pytest", specifier = "<9.0.0" }, { name = "pytest-asyncio", specifier = "<2.0.0" }, { name = "pytest-cov", specifier = "<8.0.0" }, + { name = "pytest-rerunfailures", specifier = "<17.0.0" }, { name = "pytest-timeout", specifier = "<3.0.0" }, { name = "pytest-xdist", specifier = "<4.0.0" }, { name = "ruff", specifier = "~=0.14.0" }, @@ -2989,6 +2991,19 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] +[[package]] +name = "pytest-rerunfailures" +version = "16.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/de/04/71e9520551fc8fe2cf5c1a1842e4e600265b0815f2016b7c27ec85688682/pytest_rerunfailures-16.1.tar.gz", hash = "sha256:c38b266db8a808953ebd71ac25c381cb1981a78ff9340a14bcb9f1b9bff1899e", size = 30889, upload-time = "2025-10-10T07:06:01.238Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/54/60eabb34445e3db3d3d874dc1dfa72751bfec3265bd611cb13c8b290adea/pytest_rerunfailures-16.1-py3-none-any.whl", hash = "sha256:5d11b12c0ca9a1665b5054052fcc1084f8deadd9328962745ef6b04e26382e86", size = 14093, upload-time = "2025-10-10T07:06:00.019Z" }, +] + [[package]] name = "pytest-timeout" version = "2.4.0"