Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Appearance settings

feat: update to Crawlee v0.6 #420

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Mar 4, 2025
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion 2 pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ keywords = [
dependencies = [
"apify-client>=1.9.2",
"apify-shared>=1.2.1",
"crawlee~=0.5.0",
"crawlee~=0.6.0",
"cryptography>=42.0.0",
"httpx>=0.27.0",
"lazy-object-proxy>=1.10.0",
Expand Down
4 changes: 2 additions & 2 deletions 4 src/apify/_actor.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
from typing_extensions import Self

from crawlee.proxy_configuration import _NewUrlFunction
from crawlee.storage_clients import BaseStorageClient
from crawlee.storage_clients import StorageClient

from apify._models import Webhook

Expand Down Expand Up @@ -171,7 +171,7 @@ def log(self) -> logging.Logger:
return logger

@property
def _local_storage_client(self) -> BaseStorageClient:
def _local_storage_client(self) -> StorageClient:
"""The local storage client the Actor instance uses."""
return service_locator.get_storage_client()

Expand Down
6 changes: 3 additions & 3 deletions 6 src/apify/apify_storage_client/_apify_storage_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@

from apify_client import ApifyClientAsync
from crawlee._utils.crypto import crypto_random_object_id
from crawlee.storage_clients import BaseStorageClient
from crawlee.storage_clients import StorageClient

from apify._utils import docs_group
from apify.apify_storage_client._dataset_client import DatasetClient
Expand All @@ -21,7 +21,7 @@


@docs_group('Classes')
class ApifyStorageClient(BaseStorageClient):
class ApifyStorageClient(StorageClient):
"""A storage client implementation based on the Apify platform storage."""

def __init__(self, *, configuration: Configuration) -> None:
Expand Down Expand Up @@ -68,5 +68,5 @@ async def purge_on_start(self) -> None:
pass

@override
def get_rate_limit_errors(self) -> dict[int, int]: # type: ignore[misc]
def get_rate_limit_errors(self) -> dict[int, int]:
return self._apify_client.stats.rate_limit_errors
2 changes: 1 addition & 1 deletion 2 src/apify/apify_storage_client/_dataset_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from typing_extensions import override

from crawlee.storage_clients._base import BaseDatasetClient
from crawlee.storage_clients._base import DatasetClient as BaseDatasetClient
from crawlee.storage_clients.models import DatasetItemsListPage, DatasetMetadata

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from typing_extensions import override

from crawlee.storage_clients._base import BaseDatasetCollectionClient
from crawlee.storage_clients._base import DatasetCollectionClient as BaseDatasetCollectionClient
from crawlee.storage_clients.models import DatasetListPage, DatasetMetadata

if TYPE_CHECKING:
Expand Down
2 changes: 1 addition & 1 deletion 2 src/apify/apify_storage_client/_key_value_store_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@

from typing_extensions import override

from crawlee.storage_clients._base import BaseKeyValueStoreClient
from crawlee.storage_clients._base import KeyValueStoreClient as BaseKeyValueStoreClient
from crawlee.storage_clients.models import KeyValueStoreListKeysPage, KeyValueStoreMetadata, KeyValueStoreRecord

if TYPE_CHECKING:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from typing_extensions import override

from crawlee.storage_clients._base import BaseKeyValueStoreCollectionClient
from crawlee.storage_clients._base import KeyValueStoreCollectionClient as BaseKeyValueStoreCollectionClient
from crawlee.storage_clients.models import KeyValueStoreListPage, KeyValueStoreMetadata

if TYPE_CHECKING:
Expand Down
2 changes: 1 addition & 1 deletion 2 src/apify/apify_storage_client/_request_queue_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from typing_extensions import override

from crawlee import Request
from crawlee.storage_clients._base import BaseRequestQueueClient
from crawlee.storage_clients._base import RequestQueueClient as BaseRequestQueueClient
from crawlee.storage_clients.models import (
BatchRequestsOperationResponse,
ProcessedRequest,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

from typing_extensions import override

from crawlee.storage_clients._base import BaseRequestQueueCollectionClient
from crawlee.storage_clients._base import RequestQueueCollectionClient as BaseRequestQueueCollectionClient
from crawlee.storage_clients.models import RequestQueueListPage, RequestQueueMetadata

if TYPE_CHECKING:
Expand Down
8 changes: 4 additions & 4 deletions 8 src/apify/storages/_request_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

from crawlee import Request
from crawlee._types import HttpMethod
from crawlee.http_clients import BaseHttpClient, HttpxHttpClient
from crawlee.http_clients import HttpClient, HttpxHttpClient
from crawlee.request_loaders import RequestList as CrawleeRequestList

from apify._utils import docs_group
Expand Down Expand Up @@ -49,7 +49,7 @@ class RequestList(CrawleeRequestList):
async def open(
name: str | None = None,
request_list_sources_input: list[dict[str, Any]] | None = None,
http_client: BaseHttpClient | None = None,
http_client: HttpClient | None = None,
) -> RequestList:
"""Creates RequestList from Actor input requestListSources.

Expand Down Expand Up @@ -78,7 +78,7 @@ async def open(

@staticmethod
async def _create_request_list(
name: str | None, request_list_sources_input: list[dict[str, Any]], http_client: BaseHttpClient | None
name: str | None, request_list_sources_input: list[dict[str, Any]], http_client: HttpClient | None
) -> RequestList:
if not http_client:
http_client = HttpxHttpClient()
Expand Down Expand Up @@ -108,7 +108,7 @@ def _create_requests_from_input(simple_url_inputs: list[_SimpleUrlInput]) -> lis

@staticmethod
async def _fetch_requests_from_url(
remote_url_requests_inputs: list[_RequestsFromUrlInput], http_client: BaseHttpClient
remote_url_requests_inputs: list[_RequestsFromUrlInput], http_client: HttpClient
) -> list[Request]:
"""Crete list of requests from url.

Expand Down
Loading
Loading
Morty Proxy This is a proxified and sanitized view of the page, visit original site.