Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ dev = [
"pydoc-markdown<5.0.0",
"pytest-asyncio<2.0.0",
"pytest-cov<8.0.0",
"pytest-rerunfailures<17.0.0",
"pytest-timeout<3.0.0",
"pytest-xdist<4.0.0",
"pytest<9.0.0",
Expand Down
5 changes: 2 additions & 3 deletions tests/unit/_autoscaling/test_autoscaled_pool.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
from __future__ import annotations

import asyncio
import sys
from contextlib import suppress
from datetime import datetime, timedelta, timezone
from itertools import chain, repeat
Expand Down Expand Up @@ -145,8 +144,8 @@ async def run() -> None:
await pool.run()


@pytest.mark.skipif(
sys.platform != 'linux',
@pytest.mark.flaky(
rerun=3,
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1655.',
)
async def test_autoscales(
Expand Down
5 changes: 2 additions & 3 deletions tests/unit/browsers/test_browser_pool.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from __future__ import annotations

import sys
from typing import TYPE_CHECKING

import pytest
Expand Down Expand Up @@ -63,8 +62,8 @@ async def test_multiple_plugins_new_page_creation(server_url: URL) -> None:
assert browser_pool.total_pages_count == 3


@pytest.mark.skipif(
sys.platform != 'linux',
@pytest.mark.flaky(
rerun=3,
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1660.',
)
async def test_new_page_with_each_plugin(server_url: URL) -> None:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import asyncio
import logging
import sys
from dataclasses import dataclass
from datetime import timedelta
from itertools import cycle
Expand Down Expand Up @@ -607,8 +606,8 @@ async def request_handler(context: AdaptivePlaywrightCrawlingContext) -> None:
mocked_h2_handler.assert_has_calls([call(expected_h2_tag)])


@pytest.mark.skipif(
sys.platform != 'linux',
@pytest.mark.flaky(
rerun=3,
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1650.',
)
async def test_adaptive_context_query_selector_parsel(test_urls: list[str]) -> None:
Expand Down
16 changes: 10 additions & 6 deletions tests/unit/crawlers/_basic/test_basic_crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -1302,9 +1302,8 @@ async def handler(context: BasicCrawlingContext) -> None:


@pytest.mark.run_alone
@pytest.mark.skipif(
sys.platform != 'linux',
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1652.',
@pytest.mark.flaky(
reruns=3, reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1652.'
)
@pytest.mark.skipif(sys.version_info[:3] < (3, 11), reason='asyncio.timeout was introduced in Python 3.11.')
@pytest.mark.parametrize(
Expand All @@ -1328,7 +1327,11 @@ async def test_timeout_in_handler(sleep_type: str) -> None:
double_handler_timeout_s = handler_timeout.total_seconds() * 2
handler_sleep = iter([double_handler_timeout_s, double_handler_timeout_s, 0])

crawler = BasicCrawler(request_handler_timeout=handler_timeout, max_request_retries=max_request_retries)
crawler = BasicCrawler(
request_handler_timeout=handler_timeout,
max_request_retries=max_request_retries,
storage_client=MemoryStorageClient(),
)

mocked_handler_before_sleep = Mock()
mocked_handler_after_sleep = Mock()
Expand All @@ -1355,8 +1358,8 @@ async def handler(context: BasicCrawlingContext) -> None:
assert mocked_handler_after_sleep.call_count == 1


@pytest.mark.skipif(
sys.platform != 'linux',
@pytest.mark.flaky(
reruns=3,
reason='Test is flaky on Windows and MacOS, see https://github.com/apify/crawlee-python/issues/1649.',
)
@pytest.mark.parametrize(
Expand All @@ -1381,6 +1384,7 @@ async def test_keep_alive(
max_requests_per_crawl=max_requests_per_crawl,
# If more request can run in parallel, then max_requests_per_crawl is not deterministic.
concurrency_settings=ConcurrencySettings(desired_concurrency=1, max_concurrency=1),
storage_client=MemoryStorageClient(),
)
mocked_handler = Mock()

Expand Down
15 changes: 15 additions & 0 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading