From 53a3aaa9390cf98179380c8c2202f16426ee7831 Mon Sep 17 00:00:00 2001 From: mhh Date: Fri, 23 Feb 2024 10:57:10 +0100 Subject: [PATCH 001/122] Fix falsely named INSTANCE docs --- src/aleph/sdk/client/abstract.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 3335ad86..b8ce2038 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -379,7 +379,7 @@ async def create_instance( metadata: Optional[Mapping[str, Any]] = None, ) -> Tuple[AlephMessage, MessageStatus]: """ - Post a (create) PROGRAM message. + Post a (create) INSTANCE message. :param rootfs: Root filesystem to use :param rootfs_size: Size of root filesystem From 16de0ba87de8deaeaf5fbd1d01175f2dae316a13 Mon Sep 17 00:00:00 2001 From: nesitor Date: Wed, 6 Mar 2024 16:17:38 +0100 Subject: [PATCH 002/122] Implement Hypervisor field on instances (#107) * Problem: Instances with Qemu hypervisor cannot be created. Solution: Implement hypervisor field into create_instance method * Fix: Add test case for hypervisor field * Fix: Solved code quality issues * Fix: Solve code quality issues on tests also * Fix: Fixed incorrect import on abstract interface * Fix: Fixed incorrect import on abstract interface * Fix: Upgraded black version and refformatted issues * Fix: Solved issue in wrong field to test. --- src/aleph/sdk/client/abstract.py | 2 ++ src/aleph/sdk/client/authenticated_http.py | 4 ++++ tests/unit/test_asynchronous.py | 24 +++++++++++++++++++++- 3 files changed, 29 insertions(+), 1 deletion(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index b8ce2038..db98d3ac 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -23,6 +23,7 @@ Payment, PostMessage, ) +from aleph_message.models.execution.environment import HypervisorType from aleph_message.models.execution.program import Encoding from aleph_message.status import MessageStatus @@ -373,6 +374,7 @@ async def create_instance( allow_amend: bool = False, internet: bool = True, aleph_api: bool = True, + hypervisor: Optional[HypervisorType] = None, volumes: Optional[List[Mapping]] = None, volume_persistence: str = "host", ssh_keys: Optional[List[str]] = None, diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index cf75d986..f4f89ba1 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -29,6 +29,7 @@ from aleph_message.models.execution.base import Encoding, Payment, PaymentType from aleph_message.models.execution.environment import ( FunctionEnvironment, + HypervisorType, MachineResources, ) from aleph_message.models.execution.instance import RootfsVolume @@ -520,6 +521,7 @@ async def create_instance( allow_amend: bool = False, internet: bool = True, aleph_api: bool = True, + hypervisor: Optional[HypervisorType] = None, volumes: Optional[List[Mapping]] = None, volume_persistence: str = "host", ssh_keys: Optional[List[str]] = None, @@ -533,6 +535,7 @@ async def create_instance( timeout_seconds = timeout_seconds or settings.DEFAULT_VM_TIMEOUT payment = payment or Payment(chain=Chain.ETH, type=PaymentType.hold) + hypervisor = hypervisor or HypervisorType.firecracker content = InstanceContent( address=address, @@ -541,6 +544,7 @@ async def create_instance( reproducible=False, internet=internet, aleph_api=aleph_api, + hypervisor=hypervisor, ), variables=environment_variables, resources=MachineResources( diff --git a/tests/unit/test_asynchronous.py b/tests/unit/test_asynchronous.py index ef8b67ca..0981ad19 100644 --- a/tests/unit/test_asynchronous.py +++ b/tests/unit/test_asynchronous.py @@ -14,7 +14,7 @@ ProgramMessage, StoreMessage, ) -from aleph_message.models.execution.environment import MachineResources +from aleph_message.models.execution.environment import HypervisorType, MachineResources from aleph_message.status import MessageStatus from aleph.sdk.exceptions import InsufficientFundsError @@ -116,6 +116,7 @@ async def test_create_instance(mock_session_with_post_success): receiver="0x4145f182EF2F06b45E50468519C1B92C60FBd4A0", type=PaymentType.superfluid, ), + hypervisor=HypervisorType.qemu, ) assert mock_session_with_post_success.http_session.post.called_once @@ -144,6 +145,27 @@ async def test_create_instance_no_payment(mock_session_with_post_success): assert isinstance(instance_message, InstanceMessage) +@pytest.mark.asyncio +async def test_create_instance_no_hypervisor(mock_session_with_post_success): + """Test that an instance can be created with no hypervisor specified. + It should in this case default to "firecracker". + """ + async with mock_session_with_post_success as session: + instance_message, message_status = await session.create_instance( + rootfs="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", + rootfs_size=1, + rootfs_name="rootfs", + channel="TEST", + metadata={"tags": ["test"]}, + hypervisor=None, + ) + + assert instance_message.content.environment.hypervisor == HypervisorType.firecracker + + assert mock_session_with_post_success.http_session.post.called_once + assert isinstance(instance_message, InstanceMessage) + + @pytest.mark.asyncio async def test_forget(mock_session_with_post_success): async with mock_session_with_post_success as session: From 04a053ac23956e4f4dc791a91df91be936082bc7 Mon Sep 17 00:00:00 2001 From: Mike Hukiewitz <70762838+MHHukiewitz@users.noreply.github.com> Date: Wed, 13 Mar 2024 12:42:38 +0100 Subject: [PATCH 003/122] Loosen eth-abi dependency (#108) * Loosen eth-abi dependency * Fix ethereum install --- setup.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index de505203..a41e52f4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,7 +41,7 @@ install_requires = aleph-message~=0.4.3 eth_account>=0.4.0 # Required to fix a dependency issue with parsimonious and Python3.11 - eth_abi==4.0.0b2; python_version>="3.11" + eth_abi>=4.0.0; python_version>="3.11" python-magic # The usage of test_requires is discouraged, see `Dependency Management` docs # tests_require = pytest; pytest-cov @@ -94,7 +94,7 @@ nuls2 = ethereum = eth_account>=0.4.0 # Required to fix a dependency issue with parsimonious and Python3.11 - eth_abi==4.0.0b2; python_version>="3.11" + eth_abi>=4.0.0; python_version>="3.11" polkadot = substrate-interface py-sr25519-bindings From 7e452faebffbcc9b3eae5a52f81b577714a68e61 Mon Sep 17 00:00:00 2001 From: nesitor Date: Wed, 13 Mar 2024 19:34:38 +0100 Subject: [PATCH 004/122] Removed `rootfs_name` instance creation field (#112) * Fix: Removed useless instance creation fields. * Fix: Removed field from abstract class * Fix: Solve test issues --- src/aleph/sdk/client/abstract.py | 2 -- src/aleph/sdk/client/authenticated_http.py | 7 ------- tests/unit/test_asynchronous.py | 4 ---- 3 files changed, 13 deletions(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index db98d3ac..a4047dfa 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -361,7 +361,6 @@ async def create_instance( self, rootfs: str, rootfs_size: int, - rootfs_name: str, payment: Optional[Payment] = None, environment_variables: Optional[Mapping[str, str]] = None, storage_engine: StorageEnum = StorageEnum.storage, @@ -385,7 +384,6 @@ async def create_instance( :param rootfs: Root filesystem to use :param rootfs_size: Size of root filesystem - :param rootfs_name: Name of root filesystem :param payment: Payment method used to pay for the instance :param environment_variables: Environment variables to pass to the program :param storage_engine: Storage engine to use (Default: "storage") diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index f4f89ba1..315d2ea6 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -508,7 +508,6 @@ async def create_instance( self, rootfs: str, rootfs_size: int, - rootfs_name: str, payment: Optional[Payment] = None, environment_variables: Optional[Mapping[str, str]] = None, storage_engine: StorageEnum = StorageEnum.storage, @@ -557,15 +556,9 @@ async def create_instance( ref=rootfs, use_latest=True, ), - name=rootfs_name, size_mib=rootfs_size, persistence="host", use_latest=True, - comment=( - "Official Aleph Debian root filesystem" - if rootfs == settings.DEFAULT_RUNTIME_ID - else "" - ), ), volumes=[parse_volume(volume) for volume in volumes], time=time.time(), diff --git a/tests/unit/test_asynchronous.py b/tests/unit/test_asynchronous.py index 0981ad19..f31274fd 100644 --- a/tests/unit/test_asynchronous.py +++ b/tests/unit/test_asynchronous.py @@ -108,7 +108,6 @@ async def test_create_instance(mock_session_with_post_success): instance_message, message_status = await session.create_instance( rootfs="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", rootfs_size=1, - rootfs_name="rootfs", channel="TEST", metadata={"tags": ["test"]}, payment=Payment( @@ -132,7 +131,6 @@ async def test_create_instance_no_payment(mock_session_with_post_success): instance_message, message_status = await session.create_instance( rootfs="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", rootfs_size=1, - rootfs_name="rootfs", channel="TEST", metadata={"tags": ["test"]}, payment=None, @@ -154,7 +152,6 @@ async def test_create_instance_no_hypervisor(mock_session_with_post_success): instance_message, message_status = await session.create_instance( rootfs="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", rootfs_size=1, - rootfs_name="rootfs", channel="TEST", metadata={"tags": ["test"]}, hypervisor=None, @@ -248,7 +245,6 @@ async def test_create_instance_insufficient_funds_error( await session.create_instance( rootfs="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", rootfs_size=1, - rootfs_name="rootfs", channel="TEST", metadata={"tags": ["test"]}, payment=Payment( From 8503c3b6b73902eca1f61698e7d447a1c4ae0b5d Mon Sep 17 00:00:00 2001 From: Mike Hukiewitz <70762838+MHHukiewitz@users.noreply.github.com> Date: Thu, 14 Mar 2024 11:15:39 +0100 Subject: [PATCH 005/122] Deprecate encryption methods and make eciespy dependency optional (#110) * Deprecate encryption methods and make eciespy dependency optional eciespy is not actively maintained anymore and may cause problems when installed alongside newer versions of web3py and ethereum-related packages * Fix formatting and add encryption dependencies to testing dependencies --- setup.cfg | 7 +++++-- src/aleph/sdk/chains/common.py | 16 +++++++++++++++- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/setup.cfg b/setup.cfg index a41e52f4..1d71a697 100644 --- a/setup.cfg +++ b/setup.cfg @@ -34,8 +34,6 @@ install_requires = coincurve; python_version<"3.11" coincurve>=17.0.0; python_version>="3.11" # Technically, this should be >=18.0.0 but there is a conflict with eciespy aiohttp>=3.8.3 - eciespy; python_version<"3.11" - eciespy>=0.3.13; python_version>="3.11" typing_extensions typer aleph-message~=0.4.3 @@ -83,6 +81,8 @@ testing = py-sr25519-bindings ledgereth==0.9.0 aiodns + eciespy; python_version<"3.11" + eciespy>=0.3.13; python_version>="3.11" dns = aiodns mqtt = @@ -110,6 +110,9 @@ ledger = ledgereth==0.9.0 docs = sphinxcontrib-plantuml +encryption = + eciespy; python_version<"3.11" + eciespy>=0.3.13; python_version>="3.11" [options.entry_points] # Add here console scripts like: diff --git a/src/aleph/sdk/chains/common.py b/src/aleph/sdk/chains/common.py index 3c7e634e..b73d6e41 100644 --- a/src/aleph/sdk/chains/common.py +++ b/src/aleph/sdk/chains/common.py @@ -4,7 +4,7 @@ from typing import Dict, Optional from coincurve.keys import PrivateKey -from ecies import decrypt, encrypt +from typing_extensions import deprecated from aleph.sdk.conf import settings from aleph.sdk.utils import enum_as_str @@ -100,6 +100,7 @@ def get_public_key(self) -> str: """ raise NotImplementedError + @deprecated("This method will be moved to its own module `aleph.sdk.encryption`") async def encrypt(self, content: bytes) -> bytes: """ Encrypts a message using the account's public key. @@ -108,12 +109,19 @@ async def encrypt(self, content: bytes) -> bytes: Returns: bytes: Encrypted content as bytes """ + try: + from ecies import encrypt + except ImportError: + raise ImportError( + "Install `eciespy` or `aleph-sdk-python[encryption]` to use this method" + ) if self.CURVE == "secp256k1": value: bytes = encrypt(self.get_public_key(), content) return value else: raise NotImplementedError + @deprecated("This method will be moved to its own module `aleph.sdk.encryption`") async def decrypt(self, content: bytes) -> bytes: """ Decrypts a message using the account's private key. @@ -122,6 +130,12 @@ async def decrypt(self, content: bytes) -> bytes: Returns: bytes: Decrypted content as bytes """ + try: + from ecies import decrypt + except ImportError: + raise ImportError( + "Install `eciespy` or `aleph-sdk-python[encryption]` to use this method" + ) if self.CURVE == "secp256k1": value: bytes = decrypt(self.private_key, content) return value From 3d1452f7eeb996429ba6238a8675c5eb22c9890b Mon Sep 17 00:00:00 2001 From: Mike Hukiewitz <70762838+MHHukiewitz@users.noreply.github.com> Date: Fri, 15 Mar 2024 18:12:45 +0100 Subject: [PATCH 006/122] Use Hatch for Project Management (#111) * Setup hatch for project & dependencies management * Fix build targets * Add Mike as author * Remove unneeded files * Add missing configs from setup.py * Update README.md to reflect latest changes and use hatch in setup * Add Flake8-pyproject plugin to enable flake8 configuration through pyproject.toml; Add ethereum deps to "testing" deps; Update workflows to only use `pip install -e .[testing]` * Revert faulty formatting * Move mypy.ini to pyproject.toml and make it lean * Remove need for mypy.ini in workflow * Fix mypy config * Adding py.typed marker and automatically install type stubs when running mypy * Use LICENCE.txt file in pyproject.toml * Add classifiers * Use VCS version (git tag) * Add envs for testing and linting * Merge optional encryption dependencies from #110 * Fix test and coverage; remove need for docker files in pytest workflow * Fix workflows * Further improve hatch scripts and workflows * Fix python-version matrix * Fix missing test dependencies * Fix missing test dependencies for Python 3.12 and further improve workflow * Remove Python 3.12 from supported versions * Update pyproject.toml Co-authored-by: Hugo Herter * Update pyproject.toml Co-authored-by: Hugo Herter * Unify script/env names to equal aleph-vm conventions * Fix codecov action param * Add coverage.py for the codecov action --------- Co-authored-by: Hugo Herter --- .coveragerc | 31 ---- .github/workflows/build-wheels.yml | 4 +- .github/workflows/code-quality.yml | 37 ++--- .github/workflows/pytest-docker.yml | 38 ----- .github/workflows/pytest.yml | 33 ++++ AUTHORS.rst | 1 + CHANGELOG.rst | 8 - README.md | 62 ++++++-- docker/python-3.10.dockerfile | 39 ----- docker/python-3.11.dockerfile | 39 ----- docker/python-3.9.dockerfile | 39 ----- docker/ubuntu-20.04.dockerfile | 44 ----- docker/ubuntu-22.04.dockerfile | 44 ----- docker/with-ipfs.dockerfile | 2 +- mypy.ini | 67 -------- pyproject.toml | 239 ++++++++++++++++++++++++++++ setup.cfg | 181 --------------------- setup.py | 23 --- src/aleph/py.typed | 0 19 files changed, 340 insertions(+), 591 deletions(-) delete mode 100644 .coveragerc delete mode 100644 .github/workflows/pytest-docker.yml create mode 100644 .github/workflows/pytest.yml delete mode 100644 CHANGELOG.rst delete mode 100644 docker/python-3.10.dockerfile delete mode 100644 docker/python-3.11.dockerfile delete mode 100644 docker/python-3.9.dockerfile delete mode 100644 docker/ubuntu-20.04.dockerfile delete mode 100644 docker/ubuntu-22.04.dockerfile delete mode 100644 mypy.ini create mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100644 setup.py create mode 100644 src/aleph/py.typed diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index bf5bfda2..00000000 --- a/.coveragerc +++ /dev/null @@ -1,31 +0,0 @@ -# .coveragerc to control coverage.py -[run] -branch = True -source = aleph.sdk -# omit = bad_file.py - -[paths] -source = - src/ - */site-packages/ - -[report] -# Regexes for lines to exclude from consideration -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about missing debug-only code: - def __repr__ - if self\.debug - - # Don't complain if tests don't hit defensive assertion code: - raise AssertionError - raise NotImplementedError - - # Don't complain if non-runnable code isn't run: - if 0: - if __name__ == .__main__.: - - # Don't complain about ineffective code: - pass diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 241d738c..ced2a661 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -16,7 +16,7 @@ jobs: runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Workaround github issue https://github.com/actions/runner-images/issues/7192 if: startsWith(matrix.os, 'ubuntu-') @@ -29,7 +29,7 @@ jobs: python-version: 3.11 - name: Cache dependencies - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cache/pip key: ${{ runner.os }}-build-wheels-${{ hashFiles('setup.cfg', 'setup.py') }} diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index 6576a34e..79a3ac5b 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -11,41 +11,26 @@ jobs: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Workaround github issue https://github.com/actions/runner-images/issues/7192 run: sudo echo RESET grub-efi/install_devices | sudo debconf-communicate grub-pc + - name: Install pip and hatch + run: | + sudo apt-get install -y python3-pip + pip3 install hatch + - name: Cache dependencies - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: ~/.cache/pip - key: ${{ runner.os }}-code-quality-${{ hashFiles('setup.cfg', 'setup.py') }} + key: ${{ runner.os }}-code-quality-${{ hashFiles('pyproject.toml') }} restore-keys: | ${{ runner.os }}-code-quality- - name: Install required system packages only for Ubuntu Linux - run: | - sudo apt-get update - sudo apt-get -y upgrade - sudo apt-get install -y libsecp256k1-dev - - - name: Install required Python packages - run: | - python3 -m pip install -e .[testing,ethereum] - - - name: Test with Black - run: | - black --check ./src/ ./tests/ ./examples/ - - - name: Test with isort - run: | - isort --check-only ./src/ ./tests/ ./examples/ + run: sudo apt-get install -y libsecp256k1-dev - - name: Test with MyPy - run: | - mypy --config-file ./mypy.ini ./src/ ./tests/ ./examples/ - - - name: Test with flake8 - run: | - flake8 ./src/ ./tests/ ./examples/ + - name: Run Hatch lint + run: hatch run linting:all diff --git a/.github/workflows/pytest-docker.yml b/.github/workflows/pytest-docker.yml deleted file mode 100644 index d6e0759d..00000000 --- a/.github/workflows/pytest-docker.yml +++ /dev/null @@ -1,38 +0,0 @@ -name: Test using Pytest in Docker - -on: - push: - branches: - - main - pull_request: - branches: - - main - -jobs: - build: - strategy: - matrix: - image: [ "python-3.9", "python-3.10", "python-3.11", "ubuntu-20.04", "ubuntu-22.04" ] - runs-on: ubuntu-22.04 - - steps: - - uses: actions/checkout@v3 - - # Use GitHub's Docker registry to cache intermediate layers - - run: echo ${{ secrets.GITHUB_TOKEN }} | docker login docker.pkg.github.com -u $GITHUB_ACTOR --password-stdin - - run: docker pull docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-sdk-python-build-cache || true - - - name: Build the Docker image - run: | - git fetch --prune --unshallow --tags - docker build . -t aleph-sdk-python:${GITHUB_REF##*/} -f docker/${{matrix.image}}.dockerfile --cache-from=docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-sdk-python-build-cache - - - name: Push the image on GitHub's repository - run: docker tag aleph-sdk-python:${GITHUB_REF##*/} docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-sdk-python:${GITHUB_REF##*/} && docker push docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-sdk-python:${GITHUB_REF##*/} || true - - - name: Cache the image on GitHub's repository - run: docker tag aleph-sdk-python:${GITHUB_REF##*/} docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-sdk-python-build-cache && docker push docker.pkg.github.com/$GITHUB_REPOSITORY/aleph-sdk-python-build-cache || true - - - name: Pytest in the Docker image - run: | - docker run --entrypoint /opt/venv/bin/pytest aleph-sdk-python:${GITHUB_REF##*/} /opt/aleph-sdk-python/ diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml new file mode 100644 index 00000000..69382ccd --- /dev/null +++ b/.github/workflows/pytest.yml @@ -0,0 +1,33 @@ +name: Test/Coverage with Python + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + build: + strategy: + matrix: + python-version: [ "3.8", "3.9", "3.10", "3.11" ] + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + - run: sudo apt-get install -y python3-pip libsecp256k1-dev + - run: python -m pip install --upgrade pip hatch coverage + - run: hatch run testing:test + if: matrix.python-version != '3.11' + - run: hatch run testing:cov + if: matrix.python-version == '3.11' + - uses: codecov/codecov-action@v4.0.1 + if: matrix.python-version == '3.11' + with: + token: ${{ secrets.CODECOV_TOKEN }} + slug: aleph-im/aleph-sdk-python diff --git a/AUTHORS.rst b/AUTHORS.rst index a7c7459a..65119268 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -5,3 +5,4 @@ Contributors * Henry Taieb * Hugo Herter * Moshe Malawach +* Mike Hukiewitz \ No newline at end of file diff --git a/CHANGELOG.rst b/CHANGELOG.rst deleted file mode 100644 index 30607221..00000000 --- a/CHANGELOG.rst +++ /dev/null @@ -1,8 +0,0 @@ -========= -Changelog -========= - -Version 0.1 -=========== - -- Converted from minialeph \ No newline at end of file diff --git a/README.md b/README.md index e2e74cf9..88117865 100644 --- a/README.md +++ b/README.md @@ -5,11 +5,9 @@ Python SDK for the Aleph.im network, next generation network of decentralized bi Development follows the [Aleph Whitepaper](https://github.com/aleph-im/aleph-whitepaper). ## Documentation -Documentation (albeit still vastly incomplete as it is a work in progress) can be found at [http://aleph-sdk-python.readthedocs.io/](http://aleph-sdk-python.readthedocs.io/) or built from this repo with: +The latest documentation, albeit incomplete, is available at [https://docs.aleph.im/libraries/python-sdk/](https://docs.aleph.im/libraries/python-sdk/). -```shell -$ python setup.py docs -``` +For the full documentation, please refer to the docstrings in the source code. ## Requirements ### Linux @@ -31,18 +29,64 @@ $ brew install libsecp256k1 Using pip and [PyPI](https://pypi.org/project/aleph-sdk-python/): ```shell -$ pip install aleph-sdk-python[ethereum,solana,tezos] +$ pip install aleph-sdk-python +``` + +### Additional dependencies +Some functionalities require additional dependencies. They can be installed like this: + +```shell +$ pip install aleph-sdk-python[ethereum, dns] ``` +The following extra dependencies are available: +- `ethereum` for Ethereum and Ethereum-compatible chains +- `solana` for Solana accounts and signatures +- `cosmos` for Substrate/Cosmos accounts and signatures +- `nuls2` for NULS2 accounts and signatures +- `polkadot` for Polkadot accounts and signatures +- `ledger` for Ledger hardware wallet support, see [Usage with LedgerHQ hardware](#usage-with-ledgerhq-hardware) +- `mqtt` for MQTT-related functionalities, see [examples/mqtt.py](examples/mqtt.py) +- `docs` for building the documentation, see [Documentation](#documentation) +- `dns` for DNS-related functionalities +- `all` installs all extra dependencies + + ## Installation for development -To install from source and still be able to modify the source code: +Setup a virtual environment using [hatch](https://hatch.pypa.io/): +```shell +$ hatch shell +``` + +Then install the SDK from source with all extra dependencies: ```shell -$ pip install -e .[testing] +$ pip install -e .[all] ``` -or + +### Running tests & Hatch scripts +You can use the test env defined for hatch to run the tests: + +```shell +$ hatch run test:run +``` + +See `hatch env show` for more information about all the environments and their scripts. + +### Generating the documentation [DEPRECATED] +The documentation is built using [Sphinx](https://www.sphinx-doc.org/). + +To build the documentation, install the SDK with the `docs` extra dependencies: + +```shell +$ pip install -e .[docs] +``` + +Then build the documentation: + ```shell -$ python setup.py develop +$ cd docs +$ make html ``` ## Usage with LedgerHQ hardware diff --git a/docker/python-3.10.dockerfile b/docker/python-3.10.dockerfile deleted file mode 100644 index 3af183ca..00000000 --- a/docker/python-3.10.dockerfile +++ /dev/null @@ -1,39 +0,0 @@ -FROM python:3.10-bullseye -MAINTAINER The aleph.im project - -RUN apt-get update && apt-get -y upgrade && apt-get install -y \ - libsecp256k1-dev \ - && rm -rf /var/lib/apt/lists/* - -RUN useradd -s /bin/bash --create-home user -RUN mkdir /opt/venv -RUN mkdir /opt/aleph-sdk-python/ -RUN chown user:user /opt/venv -RUN chown user:user /opt/aleph-sdk-python - -USER user -RUN python3 -m venv /opt/venv -ENV PATH="/opt/venv/bin:$PATH" -ENV PATH="/opt/venv/bin:$PATH" - -RUN pip install --upgrade pip wheel twine - -# Preinstall dependencies for faster steps -RUN pip install --upgrade secp256k1 coincurve aiohttp eciespy python-magic typer -RUN pip install --upgrade 'aleph-message~=0.4.0' eth_account pynacl base58 -RUN pip install --upgrade pytest pytest-cov pytest-asyncio mypy types-setuptools pytest-asyncio fastapi httpx requests - -WORKDIR /opt/aleph-sdk-python/ -COPY . . -USER root -RUN chown -R user:user /opt/aleph-sdk-python - -RUN git config --global --add safe.directory /opt/aleph-sdk-python -RUN pip install -e .[testing,ethereum,solana,tezos,ledger] - -RUN mkdir /data -RUN chown user:user /data -ENV ALEPH_PRIVATE_KEY_FILE=/data/secret.key - -WORKDIR /home/user -USER user diff --git a/docker/python-3.11.dockerfile b/docker/python-3.11.dockerfile deleted file mode 100644 index 644195a7..00000000 --- a/docker/python-3.11.dockerfile +++ /dev/null @@ -1,39 +0,0 @@ -FROM python:3.11-bullseye -MAINTAINER The aleph.im project - -RUN apt-get update && apt-get -y upgrade && apt-get install -y \ - libsecp256k1-dev \ - && rm -rf /var/lib/apt/lists/* - -RUN useradd -s /bin/bash --create-home user -RUN mkdir /opt/venv -RUN mkdir /opt/aleph-sdk-python/ -RUN chown user:user /opt/venv -RUN chown user:user /opt/aleph-sdk-python - -USER user -RUN python3 -m venv /opt/venv -ENV PATH="/opt/venv/bin:$PATH" -ENV PATH="/opt/venv/bin:$PATH" - -RUN pip install --upgrade pip wheel twine - -# Preinstall dependencies for faster steps -RUN pip install --upgrade secp256k1 coincurve aiohttp eciespy python-magic typer -RUN pip install --upgrade 'aleph-message~=0.4.0' pynacl base58 -RUN pip install --upgrade pytest pytest-cov pytest-asyncio mypy types-setuptools pytest-asyncio fastapi httpx requests - -WORKDIR /opt/aleph-sdk-python/ -COPY . . -USER root -RUN chown -R user:user /opt/aleph-sdk-python - -RUN git config --global --add safe.directory /opt/aleph-sdk-python -RUN pip install -e .[testing,ethereum,solana,tezos,ledger] - -RUN mkdir /data -RUN chown user:user /data -ENV ALEPH_PRIVATE_KEY_FILE=/data/secret.key - -WORKDIR /home/user -USER user diff --git a/docker/python-3.9.dockerfile b/docker/python-3.9.dockerfile deleted file mode 100644 index ff6d3c41..00000000 --- a/docker/python-3.9.dockerfile +++ /dev/null @@ -1,39 +0,0 @@ -FROM python:3.9-bullseye -MAINTAINER The aleph.im project - -RUN apt-get update && apt-get -y upgrade && apt-get install -y \ - libsecp256k1-dev \ - && rm -rf /var/lib/apt/lists/* - -RUN useradd -s /bin/bash --create-home user -RUN mkdir /opt/venv -RUN mkdir /opt/aleph-sdk-python/ -RUN chown user:user /opt/venv -RUN chown user:user /opt/aleph-sdk-python - -USER user -RUN python3 -m venv /opt/venv -ENV PATH="/opt/venv/bin:$PATH" -ENV PATH="/opt/venv/bin:$PATH" - -RUN pip install --upgrade pip wheel twine - -# Preinstall dependencies for faster steps -RUN pip install --upgrade secp256k1 coincurve aiohttp eciespy python-magic typer -RUN pip install --upgrade 'aleph-message~=0.4.0' eth_account pynacl base58 -RUN pip install --upgrade pytest pytest-cov pytest-asyncio mypy types-setuptools pytest-asyncio fastapi httpx requests - -WORKDIR /opt/aleph-sdk-python/ -COPY . . -USER root -RUN chown -R user:user /opt/aleph-sdk-python - -RUN git config --global --add safe.directory /opt/aleph-sdk-python -RUN pip install -e .[testing,ethereum,solana,tezos,ledger] - -RUN mkdir /data -RUN chown user:user /data -ENV ALEPH_PRIVATE_KEY_FILE=/data/secret.key - -WORKDIR /home/user -USER user diff --git a/docker/ubuntu-20.04.dockerfile b/docker/ubuntu-20.04.dockerfile deleted file mode 100644 index cb0d7c7e..00000000 --- a/docker/ubuntu-20.04.dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -FROM ubuntu:20.04 -MAINTAINER The aleph.im project - -RUN apt-get update && apt-get -y upgrade && apt-get install -y \ - libsecp256k1-dev \ - python3-dev \ - python3-venv \ - git \ - build-essential \ - libgmp3-dev \ - && rm -rf /var/lib/apt/lists/* - -RUN useradd -s /bin/bash --create-home user -RUN mkdir /opt/venv -RUN mkdir /opt/aleph-sdk-python/ -RUN chown user:user /opt/venv -RUN chown user:user /opt/aleph-sdk-python - -USER user -RUN python3 -m venv /opt/venv -ENV PATH="/opt/venv/bin:$PATH" -ENV PATH="/opt/venv/bin:$PATH" - -RUN pip install --upgrade pip wheel twine - -# Preinstall dependencies for faster steps -RUN pip install --upgrade secp256k1 coincurve aiohttp eciespy python-magic typer -RUN pip install --upgrade 'aleph-message~=0.4.0' eth_account pynacl base58 -RUN pip install --upgrade pytest pytest-cov pytest-asyncio mypy types-setuptools pytest-asyncio fastapi httpx requests - -WORKDIR /opt/aleph-sdk-python/ -COPY . . -USER root -RUN chown -R user:user /opt/aleph-sdk-python - -RUN git config --global --add safe.directory /opt/aleph-sdk-python -RUN pip install -e .[testing,ethereum,solana,tezos,ledger] - -RUN mkdir /data -RUN chown user:user /data -ENV ALEPH_PRIVATE_KEY_FILE=/data/secret.key - -WORKDIR /home/user -USER user diff --git a/docker/ubuntu-22.04.dockerfile b/docker/ubuntu-22.04.dockerfile deleted file mode 100644 index 8e44e482..00000000 --- a/docker/ubuntu-22.04.dockerfile +++ /dev/null @@ -1,44 +0,0 @@ -FROM ubuntu:22.04 -MAINTAINER The aleph.im project - -RUN apt-get update && apt-get -y upgrade && apt-get install -y \ - libsecp256k1-dev \ - python3-dev \ - python3-venv \ - git \ - build-essential \ - libgmp3-dev \ - && rm -rf /var/lib/apt/lists/* - -RUN useradd -s /bin/bash --create-home user -RUN mkdir /opt/venv -RUN mkdir /opt/aleph-sdk-python/ -RUN chown user:user /opt/venv -RUN chown user:user /opt/aleph-sdk-python - -USER user -RUN python3 -m venv /opt/venv -ENV PATH="/opt/venv/bin:$PATH" -ENV PATH="/opt/venv/bin:$PATH" - -RUN pip install --upgrade pip wheel twine - -# Preinstall dependencies for faster steps -RUN pip install --upgrade secp256k1 coincurve aiohttp eciespy python-magic typer -RUN pip install --upgrade 'aleph-message~=0.4.0' eth_account pynacl base58 -RUN pip install --upgrade pytest pytest-cov pytest-asyncio mypy types-setuptools pytest-asyncio fastapi httpx requests - -WORKDIR /opt/aleph-sdk-python/ -COPY . . -USER root -RUN chown -R user:user /opt/aleph-sdk-python - -RUN git config --global --add safe.directory /opt/aleph-sdk-python -RUN pip install -e .[testing,ethereum,solana,tezos,ledger] - -RUN mkdir /data -RUN chown user:user /data -ENV ALEPH_PRIVATE_KEY_FILE=/data/secret.key - -WORKDIR /home/user -USER user diff --git a/docker/with-ipfs.dockerfile b/docker/with-ipfs.dockerfile index e9625f18..507ee0ea 100644 --- a/docker/with-ipfs.dockerfile +++ b/docker/with-ipfs.dockerfile @@ -29,7 +29,7 @@ RUN mkdir /opt/aleph-sdk-python/ WORKDIR /opt/aleph-sdk-python/ COPY . . -RUN pip install -e .[testing,ethereum] +RUN pip install -e .[testing] # - User 'aleph' to run the code itself diff --git a/mypy.ini b/mypy.ini deleted file mode 100644 index 87a87653..00000000 --- a/mypy.ini +++ /dev/null @@ -1,67 +0,0 @@ -# Global options: - -[mypy] -python_version = 3.8 - -mypy_path = src - -exclude = conftest.py - - -show_column_numbers = True - -# Suppressing errors -# Shows errors related to strict None checking, if the global strict_optional flag is enabled -strict_optional = True -no_implicit_optional = True - -# Import discovery -# Suppresses error messages about imports that cannot be resolved -ignore_missing_imports = True -# Forces import to reference the original source file -no_implicit_reexport = True -# show error messages from unrelated files -follow_imports = silent -follow_imports_for_stubs = False - - -# Disallow dynamic typing -# Disallows usage of types that come from unfollowed imports -disallow_any_unimported = False -# Disallows all expressions in the module that have type Any -disallow_any_expr = False -# Disallows functions that have Any in their signature after decorator transformation. -disallow_any_decorated = False -# Disallows explicit Any in type positions such as type annotations and generic type parameters. -disallow_any_explicit = False -# Disallows usage of generic types that do not specify explicit type parameters. -disallow_any_generics = False -# Disallows subclassing a value of type Any. -disallow_subclassing_any = False - -# Untyped definitions and calls -# Disallows calling functions without type annotations from functions with type annotations. -disallow_untyped_calls = False -# Disallows defining functions without type annotations or with incomplete type annotations -disallow_untyped_defs = False -# Disallows defining functions with incomplete type annotations. -check_untyped_defs = True -# Type-checks the interior of functions without type annotations. -disallow_incomplete_defs = False -# Reports an error whenever a function with type annotations is decorated with a decorator without annotations. -disallow_untyped_decorators = False - -# Prohibit comparisons of non-overlapping types (ex: 42 == "no") -strict_equality = True - -# Configuring warnings -# Warns about unneeded # type: ignore comments. -warn_unused_ignores = True -# Shows errors for missing return statements on some execution paths. -warn_no_return = True -# Shows a warning when returning a value with type Any from a function declared with a non- Any return type. -warn_return_any = False - -# Miscellaneous strictness flags -# Allows variables to be redefined with an arbitrary type, as long as the redefinition is in the same block and nesting level as the original definition. -allow_redefinition = True diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..832084d6 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,239 @@ +[build-system] +requires = ["hatchling", "hatch-vcs"] +build-backend = "hatchling.build" + +[project] +name = "aleph-sdk-python" +dynamic = ["version"] +description = "Lightweight Python Client library for the Aleph.im network" +readme = "README.md" +license = { file = "LICENSE.txt" } +authors = [ + { name = "Aleph.im Team", email = "hello@aleph.im" }, +] +classifiers = [ + "Programming Language :: Python :: 3", + "Development Status :: 4 - Beta", + "Framework :: aiohttp", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS :: MacOS X", + "Topic :: Software Development :: Libraries", +] +dependencies = [ + "aiohttp>=3.8.3", + "aleph-message~=0.4.4", + "coincurve; python_version<\"3.11\"", + "coincurve>=17.0.0; python_version>=\"3.11\"", + "eth_abi>=4.0.0; python_version>=\"3.11\"", + "eth_account>=0.4.0", + "python-magic", + "typer", + "typing_extensions", +] + +[project.optional-dependencies] +cosmos = [ + "cosmospy", +] +dns = [ + "aiodns", +] +docs = [ + "sphinxcontrib-plantuml", +] +ethereum = [ + "eth_abi>=4.0.0; python_version>=\"3.11\"", + "eth_account>=0.4.0", +] +ledger = [ + "ledgereth==0.9.0", +] +mqtt = [ + "aiomqtt<=0.1.3", + "certifi", + "Click", +] +nuls2 = [ + "aleph-nuls2", +] +polkadot = [ + "py-sr25519-bindings", + "substrate-interface", +] +solana = [ + "base58", + "pynacl", +] +tezos = [ + "aleph-pytezos==0.1.1", + "pynacl", +] +encryption = [ + "eciespy; python_version<\"3.11\"", + "eciespy>=0.3.13; python_version>=\"3.11\"", +] +all = [ + "aleph-sdk-python[cosmos,dns,docs,ethereum,ledger,mqtt,nuls2,polkadot,solana,tezos,encryption]", +] + +[project.urls] +Documentation = "https://aleph.im/" +Homepage = "https://github.com/aleph-im/aleph-sdk-python" + +[tool.hatch.version] +source = "vcs" + +[tool.hatch.build.targets.wheel] +packages = [ + "src/aleph", + "pyproject.toml", + "README.md", + "LICENSE.txt", +] + +[tool.hatch.build.targets.sdist] +include = [ + "src/aleph", + "pyproject.toml", + "README.md", + "LICENSE.txt", +] + +[tool.isort] +profile = "black" + +[[tool.hatch.envs.all.matrix]] +python = ["3.8", "3.9", "3.10", "3.11"] + +[tool.hatch.envs.testing] +features = [ + "cosmos", + "dns", + "ethereum", + "ledger", + "nuls2", + "polkadot", + "solana", + "tezos", + "encryption", +] +dependencies = [ + "pytest==8.0.1", + "pytest-cov==4.1.0", + "pytest-mock==3.12.0", + "pytest-asyncio==0.23.5", + "fastapi", + "httpx", + "secp256k1", +] +[tool.hatch.envs.testing.scripts] +test = "pytest {args:tests}" +test-cov = "pytest --cov {args:tests}" +cov-report = [ + "- coverage combine", + "coverage report", +] +cov = [ + "test-cov", + "cov-report", +] + +[tool.hatch.envs.linting] +detached = true +dependencies = [ + "black==24.2.0", + "mypy==1.9.0", + "mypy-extensions==1.0.0", + # "ruff==0.1.15", + "isort==5.13.2", +] +[tool.hatch.envs.linting.scripts] +typing = "mypy --config-file=pyproject.toml {args:} ./src/ ./tests/ ./examples/" +style = [ + # "ruff {args:.}", + "black --check --diff {args:} ./src/ ./tests/ ./examples/", + "isort --check-only --profile black {args:} ./src/ ./tests/ ./examples/", +] +fmt = [ + "black {args:} ./src/ ./tests/ ./examples/", + # "ruff --fix {args:.}", + "isort --profile black {args:} ./src/ ./tests/ ./examples/", + "style", +] +all = [ + "style", + "typing", +] + +[tool.mypy] +python_version = 3.8 +mypy_path = "src" +exclude = [ + "conftest.py" +] +show_column_numbers = true + +# Import discovery +# Install types for third-party library stubs (e.g. from typeshed repository) +install_types = true +non_interactive = true +# Suppresses error messages about imports that cannot be resolved (no py.typed file, no stub file, etc). +ignore_missing_imports = true +# Don't follow imports +follow_imports = "silent" + + +# Miscellaneous strictness flags +# Allows variables to be redefined with an arbitrary type, as long as the redefinition is in the same block and nesting level as the original definition. +allow_redefinition = true + +[tool.pytest.ini_options] +minversion = "6.0" +pythonpath = ["src"] +addopts = "-vv -m \"not ledger_hardware\"" +norecursedirs = ["*.egg", "dist", "build", ".tox", ".venv", "*/site-packages/*"] +testpaths = ["tests/unit"] +markers = {ledger_hardware = "marks tests as requiring ledger hardware"} + +[tool.coverage.run] +branch = true +parallel = true +source = [ + "src/", +] +omit = [ + "*/site-packages/*", +] + +[tool.coverage.paths] +source = [ + "src/", +] +omit = [ + "*/site-packages/*", +] + +[tool.coverage.report] +show_missing = true +skip_empty = true +exclude_lines = [ + # Have to re-enable the standard pragma + "pragma: no cover", + + # Don't complain about missing debug-only code: + "def __repr__", + "if self\\.debug", + + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", + + # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", + + # Don't complain about ineffective code: + "pass", +] \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 1d71a697..00000000 --- a/setup.cfg +++ /dev/null @@ -1,181 +0,0 @@ -# This file is used to configure your project. -# Read more about the various options under: -# http://setuptools.readthedocs.io/en/latest/setuptools.html#configuring-setup-using-setup-cfg-files - -[metadata] -name = aleph-sdk-python -description = Lightweight Python Client library for the Aleph.im network -author = Aleph.im Team -author_email = hello@aleph.im -license = mit -long_description = file: README.md -long_description_content_type = text/markdown; charset=UTF-8 -url = https://github.com/aleph-im/aleph-sdk-python -project_urls = - Documentation = https://aleph.im/ -# Change if running only on Windows, Mac or Linux (comma-separated) -platforms = any -# Add here all kinds of additional classifiers as defined under -# https://pypi.python.org/pypi?%3Aaction=list_classifiers -classifiers = - Development Status :: 4 - Beta - Programming Language :: Python :: 3 - -[options] -zip_safe = False -packages = find: -include_package_data = True -package_dir = - =src -# DON'T CHANGE THE FOLLOWING LINE! IT WILL BE UPDATED BY PYSCAFFOLD! -setup_requires = pyscaffold>=3.2a0,<3.3a0 -# Add here dependencies of your project (semicolon/line-separated), e.g. -install_requires = - coincurve; python_version<"3.11" - coincurve>=17.0.0; python_version>="3.11" # Technically, this should be >=18.0.0 but there is a conflict with eciespy - aiohttp>=3.8.3 - typing_extensions - typer - aleph-message~=0.4.3 - eth_account>=0.4.0 - # Required to fix a dependency issue with parsimonious and Python3.11 - eth_abi>=4.0.0; python_version>="3.11" - python-magic -# The usage of test_requires is discouraged, see `Dependency Management` docs -# tests_require = pytest; pytest-cov -# Require a specific Python version, e.g. Python 2.7 or >= 3.4 -# python_requires = >=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.* - -[options.packages.find] -where = src -exclude = - tests - -[options.extras_require] -# Add here additional requirements for extra features, to install with: -# `pip install aleph-sdk-python[PDF]` like: -# PDF = ReportLab; RXP -# Add here test requirements (semicolon/line-separated) -testing = - aiomqtt<=0.1.3 - psutil - pytest - pytest-cov - pytest-asyncio - pytest-mock - mypy - secp256k1 - pynacl - base58 - fastapi - # httpx is required in tests by fastapi.testclient - httpx - requests - aleph-pytezos==0.1.1 - types-certifi - types-setuptools - black - isort - flake8 - substrate-interface - py-sr25519-bindings - ledgereth==0.9.0 - aiodns - eciespy; python_version<"3.11" - eciespy>=0.3.13; python_version>="3.11" -dns = - aiodns -mqtt = - aiomqtt<=0.1.3 - certifi - Click -nuls2 = - aleph-nuls2 -ethereum = - eth_account>=0.4.0 - # Required to fix a dependency issue with parsimonious and Python3.11 - eth_abi>=4.0.0; python_version>="3.11" -polkadot = - substrate-interface - py-sr25519-bindings -cosmos = - cosmospy -solana = - pynacl - base58 -tezos = - pynacl - aleph-pytezos==0.1.1 -ledger = - ledgereth==0.9.0 -docs = - sphinxcontrib-plantuml -encryption = - eciespy; python_version<"3.11" - eciespy>=0.3.13; python_version>="3.11" - -[options.entry_points] -# Add here console scripts like: -# For example: -# console_scripts = -# fibonacci = aleph.sdk.skeleton:run -# And any other entry points, for example: -# pyscaffold.cli = -# awesome = pyscaffoldext.awesome.extension:AwesomeExtension - -[test] -# py.test options when running `python setup.py test` -# addopts = --verbose -extras = True - -[tool:pytest] -# Options for py.test: -# Specify command line options as you would do when invoking py.test directly. -# e.g. --cov-report html (or xml) for html/xml output or --junitxml junit.xml -# in order to write a coverage file that can be read by Jenkins. -addopts = - --cov aleph.sdk --cov-report term-missing - --verbose - -m "not ledger_hardware" -norecursedirs = - dist - build - .tox -testpaths = tests -markers = - "ledger_hardware: marks tests as requiring ledger hardware" -[aliases] -dists = bdist_wheel - -[bdist_wheel] -# Use this option if your package is pure-python -universal = 0 - -[build_sphinx] -source_dir = docs -build_dir = build/sphinx - -[devpi:upload] -# Options for the devpi: PyPI server and packaging tool -# VCS export must be deactivated since we are using setuptools-scm -no-vcs = 1 -formats = bdist_wheel - -[flake8] -# Some sane defaults for the code style checker flake8 -exclude = - .tox - build - dist - .eggs - docs/conf.py -ignore = E501 W291 W503 E203 E704 - -[isort] -profile = black - -[pyscaffold] -# PyScaffold's parameters when the project was created. -# This will be used when updating. Do not change! -version = 3.2.1 -package = aleph.sdk diff --git a/setup.py b/setup.py deleted file mode 100644 index 9b29e6b3..00000000 --- a/setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# -*- coding: utf-8 -*- -""" - Setup file for aleph.sdk - Use setup.cfg to configure your project. - - This file was generated with PyScaffold 3.2.1. - PyScaffold helps you to put up the scaffold of your new Python project. - Learn more under: https://pyscaffold.org/ -""" -import sys - -from pkg_resources import VersionConflict, require -from setuptools import setup - -try: - require("setuptools>=38.3") -except VersionConflict: - print("Error: version of setuptools is too old (<38.3)!") - sys.exit(1) - - -if __name__ == "__main__": - setup(use_pyscaffold=True) diff --git a/src/aleph/py.typed b/src/aleph/py.typed new file mode 100644 index 00000000..e69de29b From 2a0b78953de513847dbf18d0ba01f9948851eaa2 Mon Sep 17 00:00:00 2001 From: mhh Date: Fri, 23 Feb 2024 10:56:38 +0100 Subject: [PATCH 007/122] Add itest_store.py to test file storage --- tests/integration/fixtures/testStore.txt | 1 + tests/integration/itest_store.py | 65 ++++++++++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 tests/integration/fixtures/testStore.txt create mode 100644 tests/integration/itest_store.py diff --git a/tests/integration/fixtures/testStore.txt b/tests/integration/fixtures/testStore.txt new file mode 100644 index 00000000..865be812 --- /dev/null +++ b/tests/integration/fixtures/testStore.txt @@ -0,0 +1 @@ +Never gonna give you up. \ No newline at end of file diff --git a/tests/integration/itest_store.py b/tests/integration/itest_store.py new file mode 100644 index 00000000..0831bee3 --- /dev/null +++ b/tests/integration/itest_store.py @@ -0,0 +1,65 @@ +import pytest + +from aleph.sdk.client import AuthenticatedAlephHttpClient +from aleph.sdk.query.filters import MessageFilter +from tests.integration.toolkit import has_messages, try_until + +from .config import REFERENCE_NODE, TARGET_NODE + + +async def create_store_on_target(account, emitter_node: str, receiver_node: str): + """ + Create a POST message on the target node, then fetch it from the reference node and download the file. + """ + with open("tests/integration/fixtures/testStore.txt", "rb") as f: + file_content = f.read() + async with AuthenticatedAlephHttpClient( + account=account, api_server=emitter_node + ) as tx_session: + store_message, message_status = await tx_session.create_store( + file_content=file_content, + extra_fields={"test": "test"}, + ) + + async with AuthenticatedAlephHttpClient( + account=account, api_server=receiver_node + ) as rx_session: + responses = await try_until( + rx_session.get_messages, + has_messages, + timeout=5, + message_filter=MessageFilter( + hashes=[store_message.item_hash], + ), + ) + + message_from_target = responses.messages[0] + assert store_message.item_hash == message_from_target.item_hash + + async with AuthenticatedAlephHttpClient( + account=account, api_server=receiver_node + ) as rx_session: + store_content = await rx_session.download_file(store_message.content.item_hash) + assert store_content == file_content + + +@pytest.mark.asyncio +async def test_create_message_on_target(fixture_account): + """ + Attempts to create a new message on the target node and verifies if the message can be fetched from + the reference node. + """ + await create_store_on_target( + fixture_account, emitter_node=TARGET_NODE, receiver_node=REFERENCE_NODE + ) + + +@pytest.mark.asyncio +async def test_create_message_on_reference(fixture_account): + """ + Attempts to create a new message on the reference node and verifies if the message can be fetched from + the target node. + """ + await create_store_on_target( + fixture_account, emitter_node=REFERENCE_NODE, receiver_node=TARGET_NODE + ) From b155233fa5964593b47c5646dbdf0d2f2539aeea Mon Sep 17 00:00:00 2001 From: mhh Date: Sun, 7 Apr 2024 08:22:18 +0200 Subject: [PATCH 008/122] Fix hashing for caching dependencies --- .github/workflows/build-wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index ced2a661..28761610 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -32,7 +32,7 @@ jobs: uses: actions/cache@v4 with: path: ~/.cache/pip - key: ${{ runner.os }}-build-wheels-${{ hashFiles('setup.cfg', 'setup.py') }} + key: ${{ runner.os }}-build-wheels-${{ hashFiles('pyproject.toml') }} restore-keys: | ${{ runner.os }}-build-wheels- From 878d2b41cae76ff1fbaf368f38ef8f4d7d3d0968 Mon Sep 17 00:00:00 2001 From: mhh Date: Sun, 7 Apr 2024 08:39:23 +0200 Subject: [PATCH 009/122] Install dependencies before build --- .github/workflows/build-wheels.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 28761610..ca5e8f0c 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -57,6 +57,7 @@ jobs: - name: Build source and wheel packages run: | + python3 -m pip install . python3 -m build - name: Install the Python wheel From 1431129848f948f830ce861c77d7ffa2d2cf91d7 Mon Sep 17 00:00:00 2001 From: mhh Date: Mon, 8 Apr 2024 12:19:15 +0200 Subject: [PATCH 010/122] Fix build with using Hatch --- .github/workflows/build-wheels.yml | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index ca5e8f0c..5155bc7e 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -50,23 +50,19 @@ jobs: sudo apt-get -y upgrade sudo apt-get install -y libsecp256k1-dev - - name: Install required Python packages + - name: Install Hatch run: | - python3 -m pip install --upgrade build - python3 -m pip install --user --upgrade twine + python3 -m pip install --upgrade hatch - name: Build source and wheel packages run: | - python3 -m pip install . - python3 -m build + python3 -m hatch build - name: Install the Python wheel run: | python3 -m pip install dist/aleph_sdk_python-*.whl - name: Import and use the package - # macos tests fail this step because they use Python 3.11, which is not yet supported by our dependencies - if: startsWith(matrix.os, 'ubuntu-') run: | python3 -c "import aleph.sdk" python3 -c "from aleph.sdk.chains.ethereum import get_fallback_account; get_fallback_account()" From 141f0ace1bbd475794f6ffeff02d1a81b360efa8 Mon Sep 17 00:00:00 2001 From: mhh Date: Mon, 8 Apr 2024 12:48:07 +0200 Subject: [PATCH 011/122] Restrict eth-account version < 0.12.0 due to incompatible pydantic requirement; Remove ethereum extra package, as it was already contained in main package --- README.md | 3 +-- pyproject.toml | 9 ++------- 2 files changed, 3 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 88117865..cfc7e1a4 100644 --- a/README.md +++ b/README.md @@ -36,11 +36,10 @@ $ pip install aleph-sdk-python Some functionalities require additional dependencies. They can be installed like this: ```shell -$ pip install aleph-sdk-python[ethereum, dns] +$ pip install aleph-sdk-python[solana, dns] ``` The following extra dependencies are available: -- `ethereum` for Ethereum and Ethereum-compatible chains - `solana` for Solana accounts and signatures - `cosmos` for Substrate/Cosmos accounts and signatures - `nuls2` for NULS2 accounts and signatures diff --git a/pyproject.toml b/pyproject.toml index 832084d6..f0bd0838 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ "coincurve; python_version<\"3.11\"", "coincurve>=17.0.0; python_version>=\"3.11\"", "eth_abi>=4.0.0; python_version>=\"3.11\"", - "eth_account>=0.4.0", + "eth_account>=0.4.0,<0.12.0", "python-magic", "typer", "typing_extensions", @@ -43,10 +43,6 @@ dns = [ docs = [ "sphinxcontrib-plantuml", ] -ethereum = [ - "eth_abi>=4.0.0; python_version>=\"3.11\"", - "eth_account>=0.4.0", -] ledger = [ "ledgereth==0.9.0", ] @@ -75,7 +71,7 @@ encryption = [ "eciespy>=0.3.13; python_version>=\"3.11\"", ] all = [ - "aleph-sdk-python[cosmos,dns,docs,ethereum,ledger,mqtt,nuls2,polkadot,solana,tezos,encryption]", + "aleph-sdk-python[cosmos,dns,docs,ledger,mqtt,nuls2,polkadot,solana,tezos,encryption]", ] [project.urls] @@ -111,7 +107,6 @@ python = ["3.8", "3.9", "3.10", "3.11"] features = [ "cosmos", "dns", - "ethereum", "ledger", "nuls2", "polkadot", From 1c2d57d847a5865bc92c93a4cfaf74d122e2b20d Mon Sep 17 00:00:00 2001 From: mhh Date: Mon, 8 Apr 2024 12:53:50 +0200 Subject: [PATCH 012/122] Restrict eth-account version < 0.11.0 due to maintainers introducing breaking pydantic requirements in 0.11.1 and 0.12.1 that did not exist in 0.11.0 and 0.12.0 respectively --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f0bd0838..f21f7ece 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,7 +27,7 @@ dependencies = [ "coincurve; python_version<\"3.11\"", "coincurve>=17.0.0; python_version>=\"3.11\"", "eth_abi>=4.0.0; python_version>=\"3.11\"", - "eth_account>=0.4.0,<0.12.0", + "eth_account>=0.4.0,<0.11.0", "python-magic", "typer", "typing_extensions", From ea4acdc3e5d0fc0631d2d3d0755d43ceb609eae3 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Tue, 16 Apr 2024 14:06:45 +0200 Subject: [PATCH 013/122] Fix: No notification when dependency update breaks Problem: Since the SDK is a library, it should not enforce the version of the libraries it requires too strictly. This causes issues when an incompatible dependency upgrade breaks compatibility without anyone noticing except for the next CI build. Solution: Run tests daily with the latest dependencies. --- .github/workflows/pytest.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 69382ccd..89c3dabd 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -7,6 +7,11 @@ on: pull_request: branches: - main + schedule: + # Run every night at 04:00 (GitHub Actions timezone) + # in order to catch when unfrozen dependency updates + # break the use of the library. + - cron: '4 0 * * *' jobs: build: From 390627cf69b0f3f3945291ef40eacf3fec7b814b Mon Sep 17 00:00:00 2001 From: mhh Date: Tue, 16 Apr 2024 16:06:10 +0200 Subject: [PATCH 014/122] Updated PR Difficulty Rating action to version 2 with improved features. --- .github/workflows/pr-rating.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pr-rating.yml b/.github/workflows/pr-rating.yml index 8f42647d..1378b687 100644 --- a/.github/workflows/pr-rating.yml +++ b/.github/workflows/pr-rating.yml @@ -13,7 +13,7 @@ jobs: if: github.event.pull_request.draft == false steps: - name: PR Difficulty Rating - uses: rate-my-pr/rate@v1 + uses: rate-my-pr/rate@v2 with: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} LLAMA_URL: ${{ secrets.LLAMA_URL }} From 1409a70a825a78a729ec55cf62aba6e6a0407b49 Mon Sep 17 00:00:00 2001 From: Mike Hukiewitz <70762838+MHHukiewitz@users.noreply.github.com> Date: Wed, 17 Apr 2024 17:27:16 +0200 Subject: [PATCH 015/122] Add file_path param to `download_file()` to directly download files to disk (#115) * Add file_path param to `download_file()` to directly download files to disk instead of keeping them in memory. * Fix mypy errors * Split download_file() to two distinct methods; Use tempfile for testing file download; Return path of the saved_to * Fix test * Mock test_download HTTP requests --- src/aleph/sdk/client/abstract.py | 21 ++++++++++--- src/aleph/sdk/client/http.py | 33 +++++++++++++++++--- tests/unit/conftest.py | 51 ++++++++++++++++++++++++++++--- tests/unit/test_download.py | 52 ++++++++++++++++++++++++++------ 4 files changed, 133 insertions(+), 24 deletions(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index a4047dfa..984293ce 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -100,19 +100,30 @@ async def get_posts_iterator( yield post @abstractmethod - async def download_file( - self, - file_hash: str, - ) -> bytes: + async def download_file(self, file_hash: str) -> bytes: """ Get a file from the storage engine as raw bytes. - Warning: Downloading large files can be slow and memory intensive. + Warning: Downloading large files can be slow and memory intensive. Use `download_file_to()` to download them directly to disk instead. :param file_hash: The hash of the file to retrieve. """ raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + @abstractmethod + async def download_file_to_path( + self, + file_hash: str, + path: Union[Path, str], + ) -> Path: + """ + Download a file from the storage engine to given path. + + :param file_hash: The hash of the file to retrieve. + :param path: The path to which the file should be saved. + """ + raise NotImplementedError() + async def download_file_ipfs( self, file_hash: str, diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index c79a07a5..ae98b0d1 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -1,7 +1,9 @@ import json import logging +import os.path import ssl from io import BytesIO +from pathlib import Path from typing import Any, AsyncIterable, Dict, Iterable, List, Optional, Type, Union import aiohttp @@ -206,14 +208,11 @@ async def download_file_ipfs_to_buffer( else: response.raise_for_status() - async def download_file( - self, - file_hash: str, - ) -> bytes: + async def download_file(self, file_hash: str) -> bytes: """ Get a file from the storage engine as raw bytes. - Warning: Downloading large files can be slow and memory intensive. + Warning: Downloading large files can be slow and memory intensive. Use `download_file_to()` to download them directly to disk instead. :param file_hash: The hash of the file to retrieve. """ @@ -221,6 +220,30 @@ async def download_file( await self.download_file_to_buffer(file_hash, output_buffer=buffer) return buffer.getvalue() + async def download_file_to_path( + self, + file_hash: str, + path: Union[Path, str], + ) -> Path: + """ + Download a file from the storage engine to given path. + + :param file_hash: The hash of the file to retrieve. + :param path: The path to which the file should be saved. + """ + if not isinstance(path, Path): + path = Path(path) + + if not os.path.exists(path): + dir_path = os.path.dirname(path) + if dir_path: + os.makedirs(dir_path, exist_ok=True) + + with open(path, "wb") as file_buffer: + await self.download_file_to_buffer(file_hash, output_buffer=file_buffer) + + return path + async def download_file_ipfs( self, file_hash: str, diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 7d388e36..a5d2d481 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,7 +1,8 @@ import json +from io import BytesIO from pathlib import Path from tempfile import NamedTemporaryFile -from typing import Any, Callable, Dict, List +from typing import Any, Callable, Dict, List, Optional, Union from unittest.mock import AsyncMock, MagicMock import pytest as pytest @@ -195,19 +196,59 @@ def mock_session_with_post_success( return client -def make_custom_mock_response(resp_json, status=200) -> MockResponse: +import asyncio +from functools import wraps + + +def async_wrap(cls): + class AsyncWrapper: + def __init__(self, *args, **kwargs): + self._instance = cls(*args, **kwargs) + + def __getattr__(self, item): + attr = getattr(self._instance, item) + if callable(attr): + + @wraps(attr) + async def method(*args, **kwargs): + loop = asyncio.get_running_loop() + return await loop.run_in_executor(None, attr, *args, **kwargs) + + return method + return attr + + return AsyncWrapper + + +AsyncBytesIO = async_wrap(BytesIO) + + +def make_custom_mock_response( + resp: Union[Dict[str, Any], bytes], status=200 +) -> MockResponse: class CustomMockResponse(MockResponse): + content: Optional[AsyncBytesIO] + async def json(self): - return resp_json + return resp @property def status(self): return status - return CustomMockResponse(sync=True) + mock = CustomMockResponse(sync=True) + + try: + mock.content = AsyncBytesIO(resp) + except Exception as e: + print(e) + + return mock -def make_mock_get_session(get_return_value: Dict[str, Any]) -> AlephHttpClient: +def make_mock_get_session( + get_return_value: Union[Dict[str, Any], bytes] +) -> AlephHttpClient: class MockHttpSession(AsyncMock): def get(self, *_args, **_kwargs): return make_custom_mock_response(get_return_value) diff --git a/tests/unit/test_download.py b/tests/unit/test_download.py index 377e6d41..ffffae74 100644 --- a/tests/unit/test_download.py +++ b/tests/unit/test_download.py @@ -1,7 +1,20 @@ +import tempfile +from pathlib import Path +from unittest.mock import AsyncMock, patch + import pytest from aleph.sdk import AlephHttpClient -from aleph.sdk.conf import settings as sdk_settings + +from .conftest import make_mock_get_session + + +def make_mock_download_client(item_hash: str) -> AlephHttpClient: + if item_hash == "QmeomffUNfmQy76CQGy9NdmqEnnHU9soCexBnGU3ezPHVH": + return make_mock_get_session(b"test\n") + if item_hash == "Qmdy5LaAL4eghxE7JD6Ah5o4PJGarjAV9st8az2k52i1vq": + return make_mock_get_session(bytes(5817703)) + raise NotImplementedError @pytest.mark.parametrize( @@ -13,10 +26,30 @@ ) @pytest.mark.asyncio async def test_download(file_hash: str, expected_size: int): - async with AlephHttpClient(api_server=sdk_settings.API_HOST) as client: - file_content = await client.download_file(file_hash) # File is 5B - file_size = len(file_content) - assert file_size == expected_size + mock_download_client = make_mock_download_client(file_hash) + async with mock_download_client: + file_content = await mock_download_client.download_file(file_hash) + file_size = len(file_content) + assert file_size == expected_size + + +@pytest.mark.asyncio +async def test_download_to_file(): + file_hash = "QmeomffUNfmQy76CQGy9NdmqEnnHU9soCexBnGU3ezPHVH" + mock_download_client = make_mock_download_client(file_hash) + with tempfile.TemporaryDirectory() as temp_dir: + temp_dir_path = Path(temp_dir) + download_path = temp_dir_path / "test.txt" + + async with mock_download_client: + returned_path = await mock_download_client.download_file_to_path( + file_hash, str(download_path) + ) + + assert returned_path == download_path + assert download_path.is_file() + with open(download_path, "r") as file: + assert file.read().strip() == "test" @pytest.mark.parametrize( @@ -28,7 +61,8 @@ async def test_download(file_hash: str, expected_size: int): ) @pytest.mark.asyncio async def test_download_ipfs(file_hash: str, expected_size: int): - async with AlephHttpClient(api_server=sdk_settings.API_HOST) as client: - file_content = await client.download_file_ipfs(file_hash) # 5817703 B FILE - file_size = len(file_content) - assert file_size == expected_size + mock_download_client = make_mock_download_client(file_hash) + async with mock_download_client: + file_content = await mock_download_client.download_file_ipfs(file_hash) + file_size = len(file_content) + assert file_size == expected_size From 28ada83a424bbacb99e2a1ae20ee74bd7c062e6b Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Mon, 29 Apr 2024 08:50:57 +0200 Subject: [PATCH 016/122] Fix: No notification when dependency update breaks (#119) Problem: Since the SDK is a library, it should not enforce the version of the libraries it requires too strictly. This causes issues when an incompatible dependency upgrade breaks compatibility without anyone noticing except for the next CI build. Solution: Run tests daily with the latest dependencies. From 0c0cfeae431926299995c9524bd51063132594ee Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Mon, 29 Apr 2024 09:03:11 +0200 Subject: [PATCH 017/122] Fix: Could not generate signed messages outside HTTP client (#120) Problem: Developers could not generate signed messages without using the AlephHttpClient. Solution: Move and rename the method on the abstract class; Move the utility that generates sha256 hashes to utils.py. --- src/aleph/sdk/client/abstract.py | 69 +++++++++++++++++++++- src/aleph/sdk/client/authenticated_http.py | 54 +---------------- src/aleph/sdk/utils.py | 6 ++ 3 files changed, 75 insertions(+), 54 deletions(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 984293ce..0d0d1e4e 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -1,6 +1,7 @@ # An interface for all clients to implement. - +import json import logging +import time from abc import ABC, abstractmethod from pathlib import Path from typing import ( @@ -18,19 +19,25 @@ from aleph_message.models import ( AlephMessage, + ItemType, MessagesResponse, MessageType, Payment, PostMessage, + parse_message, ) from aleph_message.models.execution.environment import HypervisorType from aleph_message.models.execution.program import Encoding from aleph_message.status import MessageStatus +from aleph.sdk.conf import settings +from aleph.sdk.types import Account +from aleph.sdk.utils import extended_json_encoder + from ..query.filters import MessageFilter, PostFilter from ..query.responses import PostsResponse from ..types import GenericMessage, StorageEnum -from ..utils import Writable +from ..utils import Writable, compute_sha256 DEFAULT_PAGE_SIZE = 200 @@ -231,6 +238,8 @@ def watch_messages( class AuthenticatedAlephClient(AlephClient): + account: Account + @abstractmethod async def create_post( self, @@ -444,6 +453,62 @@ async def forget( "Did you mean to import `AuthenticatedAlephHttpClient`?" ) + async def generate_signed_message( + self, + message_type: MessageType, + content: Dict[str, Any], + channel: Optional[str], + allow_inlining: bool = True, + storage_engine: StorageEnum = StorageEnum.storage, + ) -> AlephMessage: + """Generate a signed aleph.im message ready to be sent to the network. + + If the content is not inlined, it will be pushed to the storage engine via the API of a Core Channel Node. + + :param message_type: Type of the message (PostMessage, ...) + :param content: User-defined content of the message + :param channel: Channel to use (Default: "TEST") + :param allow_inlining: Whether to allow inlining the content of the message (Default: True) + :param storage_engine: Storage engine to use (Default: "storage") + """ + + message_dict: Dict[str, Any] = { + "sender": self.account.get_address(), + "chain": self.account.CHAIN, + "type": message_type, + "content": content, + "time": time.time(), + "channel": channel, + } + + # Use the Pydantic encoder to serialize types like UUID, datetimes, etc. + item_content: str = json.dumps( + content, separators=(",", ":"), default=extended_json_encoder + ) + + if allow_inlining and (len(item_content) < settings.MAX_INLINE_SIZE): + message_dict["item_content"] = item_content + message_dict["item_hash"] = compute_sha256(item_content) + message_dict["item_type"] = ItemType.inline + else: + if storage_engine == StorageEnum.ipfs: + message_dict["item_hash"] = await self.ipfs_push( + content=content, + ) + message_dict["item_type"] = ItemType.ipfs + else: # storage + assert storage_engine == StorageEnum.storage + message_dict["item_hash"] = await self.storage_push( + content=content, + ) + message_dict["item_type"] = ItemType.storage + + message_dict = await self.account.sign_message(message_dict) + return parse_message(message_dict) + + # Alias for backwards compatibility + _prepare_aleph_message = generate_signed_message + @abstractmethod async def submit( self, diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 315d2ea6..0d708af2 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -7,7 +7,6 @@ from typing import Any, Dict, List, Mapping, NoReturn, Optional, Tuple, Union import aiohttp -from aleph_message import parse_message from aleph_message.models import ( AggregateContent, AggregateMessage, @@ -17,7 +16,6 @@ ForgetMessage, InstanceContent, InstanceMessage, - ItemType, MessageType, PostContent, PostMessage, @@ -622,54 +620,6 @@ async def forget( ) return message, status - @staticmethod - def compute_sha256(s: str) -> str: - h = hashlib.sha256() - h.update(s.encode("utf-8")) - return h.hexdigest() - - async def _prepare_aleph_message( - self, - message_type: MessageType, - content: Dict[str, Any], - channel: Optional[str], - allow_inlining: bool = True, - storage_engine: StorageEnum = StorageEnum.storage, - ) -> AlephMessage: - message_dict: Dict[str, Any] = { - "sender": self.account.get_address(), - "chain": self.account.CHAIN, - "type": message_type, - "content": content, - "time": time.time(), - "channel": channel, - } - - # Use the Pydantic encoder to serialize types like UUID, datetimes, etc. - item_content: str = json.dumps( - content, separators=(",", ":"), default=extended_json_encoder - ) - - if allow_inlining and (len(item_content) < settings.MAX_INLINE_SIZE): - message_dict["item_content"] = item_content - message_dict["item_hash"] = self.compute_sha256(item_content) - message_dict["item_type"] = ItemType.inline - else: - if storage_engine == StorageEnum.ipfs: - message_dict["item_hash"] = await self.ipfs_push( - content=content, - ) - message_dict["item_type"] = ItemType.ipfs - else: # storage - assert storage_engine == StorageEnum.storage - message_dict["item_hash"] = await self.storage_push( - content=content, - ) - message_dict["item_type"] = ItemType.storage - - message_dict = await self.account.sign_message(message_dict) - return parse_message(message_dict) - async def submit( self, content: Dict[str, Any], @@ -680,7 +630,7 @@ async def submit( sync: bool = False, raise_on_rejected: bool = True, ) -> Tuple[AlephMessage, MessageStatus, Optional[Dict[str, Any]]]: - message = await self._prepare_aleph_message( + message = await self.generate_signed_message( message_type=message_type, content=content, channel=channel, @@ -703,7 +653,7 @@ async def _storage_push_file_with_message( data = aiohttp.FormData() # Prepare the STORE message - message = await self._prepare_aleph_message( + message = await self.generate_signed_message( message_type=MessageType.store, content=store_content.dict(exclude_none=True), channel=channel, diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index ab17f44a..b1c04cdf 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -1,4 +1,5 @@ import errno +import hashlib import logging import os from datetime import date, datetime, time @@ -178,3 +179,8 @@ def parse_volume(volume_dict: Union[Mapping, MachineVolume]) -> MachineVolume: continue else: raise ValueError(f"Could not parse volume: {volume_dict}") + + +def compute_sha256(s: str) -> str: + """Compute the SHA256 hash of a string.""" + return hashlib.sha256(s.encode()).hexdigest() From 02490d18c4295adb2fe582647bc1e3fc0113f0da Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Mon, 10 Jun 2024 18:03:57 +0200 Subject: [PATCH 018/122] Fix: 'called_once' is not a valid assertion. AttributeError: 'called_once' is not a valid assertion. Use a spec for the mock if 'called_once' is meant to be an attribute. --- tests/unit/test_asynchronous.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/tests/unit/test_asynchronous.py b/tests/unit/test_asynchronous.py index f31274fd..0fa0df38 100644 --- a/tests/unit/test_asynchronous.py +++ b/tests/unit/test_asynchronous.py @@ -33,7 +33,7 @@ async def test_create_post(mock_session_with_post_success): sync=False, ) - assert mock_session_with_post_success.http_session.post.called_once + assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(post_message, PostMessage) assert message_status == MessageStatus.PENDING @@ -47,7 +47,7 @@ async def test_create_aggregate(mock_session_with_post_success): channel="TEST", ) - assert mock_session_with_post_success.http_session.post.called_once + assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(aggregate_message, AggregateMessage) @@ -83,7 +83,7 @@ async def test_create_store(mock_session_with_post_success): storage_engine=StorageEnum.storage, ) - assert mock_session_with_post_success.http_session.post.called + assert mock_session_with_post_success.http_session.post.assert_called assert isinstance(store_message, StoreMessage) @@ -98,7 +98,7 @@ async def test_create_program(mock_session_with_post_success): metadata={"tags": ["test"]}, ) - assert mock_session_with_post_success.http_session.post.called_once + assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(program_message, ProgramMessage) @@ -118,7 +118,7 @@ async def test_create_instance(mock_session_with_post_success): hypervisor=HypervisorType.qemu, ) - assert mock_session_with_post_success.http_session.post.called_once + assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(instance_message, InstanceMessage) @@ -139,7 +139,7 @@ async def test_create_instance_no_payment(mock_session_with_post_success): assert instance_message.content.payment.type == PaymentType.hold assert instance_message.content.payment.chain == Chain.ETH - assert mock_session_with_post_success.http_session.post.called_once + assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(instance_message, InstanceMessage) @@ -159,7 +159,7 @@ async def test_create_instance_no_hypervisor(mock_session_with_post_success): assert instance_message.content.environment.hypervisor == HypervisorType.firecracker - assert mock_session_with_post_success.http_session.post.called_once + assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(instance_message, InstanceMessage) @@ -172,7 +172,7 @@ async def test_forget(mock_session_with_post_success): channel="TEST", ) - assert mock_session_with_post_success.http_session.post.called_once + assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(forget_message, ForgetMessage) From f6d0b7bc5ef65d793f3102cd37177acf3657e66d Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Thu, 13 Jun 2024 16:07:45 +0200 Subject: [PATCH 019/122] Fix: pkg_resources is obsolete to get package version Solution: Use `importlib.metadata.version` instead. --- src/aleph/sdk/__init__.py | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/src/aleph/sdk/__init__.py b/src/aleph/sdk/__init__.py index a3ecc693..358ddd96 100644 --- a/src/aleph/sdk/__init__.py +++ b/src/aleph/sdk/__init__.py @@ -1,17 +1,14 @@ -from pkg_resources import DistributionNotFound, get_distribution +from importlib.metadata import PackageNotFoundError, version from aleph.sdk.client import AlephHttpClient, AuthenticatedAlephHttpClient try: # Change here if project is renamed and does not equal the package name - dist_name = "aleph-sdk-python" - __version__ = get_distribution(dist_name).version -except DistributionNotFound: + __version__ = version("aleph-sdk-python") +except PackageNotFoundError: __version__ = "unknown" -finally: - del get_distribution, DistributionNotFound -__all__ = ["AlephHttpClient", "AuthenticatedAlephHttpClient"] +__all__ = ["__version__", "AlephHttpClient", "AuthenticatedAlephHttpClient"] def __getattr__(name): From fd1b0e1702029db0b958e5ea12d46f90e4872ae1 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Thu, 13 Jun 2024 16:13:55 +0200 Subject: [PATCH 020/122] Fix: Ruff did not check code quality Solution: Add ruff to check the code quality and fix the reported errors. --- pyproject.toml | 9 +++++---- tests/unit/conftest.py | 6 ++---- tests/unit/test_domains.py | 4 ++-- tests/unit/test_download.py | 1 - tests/unit/test_wallet_ethereum.py | 2 +- 5 files changed, 10 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f21f7ece..63fa6d35 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -141,19 +141,19 @@ dependencies = [ "black==24.2.0", "mypy==1.9.0", "mypy-extensions==1.0.0", - # "ruff==0.1.15", + "ruff==0.4.8", "isort==5.13.2", ] [tool.hatch.envs.linting.scripts] typing = "mypy --config-file=pyproject.toml {args:} ./src/ ./tests/ ./examples/" style = [ - # "ruff {args:.}", + "ruff {args:.} ./src/ ./tests/ ./examples/", "black --check --diff {args:} ./src/ ./tests/ ./examples/", "isort --check-only --profile black {args:} ./src/ ./tests/ ./examples/", ] fmt = [ "black {args:} ./src/ ./tests/ ./examples/", - # "ruff --fix {args:.}", + "ruff --fix {args:.} ./src/ ./tests/ ./examples/", "isort --profile black {args:} ./src/ ./tests/ ./examples/", "style", ] @@ -169,6 +169,7 @@ exclude = [ "conftest.py" ] show_column_numbers = true +check_untyped_defs = true # Import discovery # Install types for third-party library stubs (e.g. from typeshed repository) @@ -231,4 +232,4 @@ exclude_lines = [ # Don't complain about ineffective code: "pass", -] \ No newline at end of file +] diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index a5d2d481..4b06c243 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -1,4 +1,6 @@ +import asyncio import json +from functools import wraps from io import BytesIO from pathlib import Path from tempfile import NamedTemporaryFile @@ -196,10 +198,6 @@ def mock_session_with_post_success( return client -import asyncio -from functools import wraps - - def async_wrap(cls): class AsyncWrapper: def __init__(self, *args, **kwargs): diff --git a/tests/unit/test_domains.py b/tests/unit/test_domains.py index 380e4bb5..eadfcec1 100644 --- a/tests/unit/test_domains.py +++ b/tests/unit/test_domains.py @@ -47,7 +47,7 @@ async def test_configured_domain(): url = "https://custom-domain-unit-test.aleph.sh" hostname = hostname_from_url(url) status = await alephdns.check_domain(hostname, TargetType.IPFS, "0xfakeaddress") - assert type(status) is dict + assert isinstance(status, dict) @pytest.mark.asyncio @@ -57,4 +57,4 @@ async def test_not_configured_domain(): hostname = hostname_from_url(url) with pytest.raises(DomainConfigurationError): status = await alephdns.check_domain(hostname, TargetType.IPFS, "0xfakeaddress") - assert type(status) is None + assert status is None diff --git a/tests/unit/test_download.py b/tests/unit/test_download.py index ffffae74..a889949d 100644 --- a/tests/unit/test_download.py +++ b/tests/unit/test_download.py @@ -1,6 +1,5 @@ import tempfile from pathlib import Path -from unittest.mock import AsyncMock, patch import pytest diff --git a/tests/unit/test_wallet_ethereum.py b/tests/unit/test_wallet_ethereum.py index 0f798c9d..f7ca2157 100644 --- a/tests/unit/test_wallet_ethereum.py +++ b/tests/unit/test_wallet_ethereum.py @@ -23,7 +23,7 @@ async def test_ledger_eth_account(): address = account.get_address() assert address - assert type(address) is str + assert isinstance(address, str) assert len(address) == 42 message = Message("ETH", account.get_address(), "SomeType", "ItemHash") From 1197d2490810fc43e75331f3642f4e2634b9a7aa Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Thu, 13 Jun 2024 17:41:04 +0200 Subject: [PATCH 021/122] Update dependencies and supported systems in CI (#122) Fix: Latest OS and Python were not tested Add comment regarding eciespy --- .github/workflows/build-wheels.yml | 18 ++++++++++++------ .github/workflows/code-quality.yml | 15 ++++++++------- .github/workflows/pytest.yml | 22 +++++++++++++++------- pyproject.toml | 11 ++++++++--- 4 files changed, 43 insertions(+), 23 deletions(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 5155bc7e..c07479e8 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -11,8 +11,9 @@ on: jobs: build: strategy: + fail-fast: false matrix: - os: [macos-11, macos-12, ubuntu-20.04, ubuntu-22.04] + os: [macos-11, macos-12, macos-13, macos-14, ubuntu-20.04, ubuntu-22.04, ubuntu-24.04] runs-on: ${{ matrix.os }} steps: @@ -52,17 +53,22 @@ jobs: - name: Install Hatch run: | - python3 -m pip install --upgrade hatch + python3 -m venv /tmp/venv + /tmp/venv/bin/python3 -m pip install --upgrade hatch - name: Build source and wheel packages run: | - python3 -m hatch build + /tmp/venv/bin/python3 -m hatch build - name: Install the Python wheel run: | - python3 -m pip install dist/aleph_sdk_python-*.whl + /tmp/venv/bin/python3 -m pip install dist/aleph_sdk_python-*.whl + + - name: Install `setuptools` on systems where it is missing by default + run: /tmp/venv/bin/python3 -m pip install --upgrade setuptools + if: matrix.os == 'ubuntu-24.04' - name: Import and use the package run: | - python3 -c "import aleph.sdk" - python3 -c "from aleph.sdk.chains.ethereum import get_fallback_account; get_fallback_account()" + /tmp/venv/bin/python3 -c "import aleph.sdk" + /tmp/venv/bin/python3 -c "from aleph.sdk.chains.ethereum import get_fallback_account; get_fallback_account()" diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index 79a3ac5b..16ec4e91 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -8,7 +8,7 @@ on: jobs: code-quality: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 @@ -16,10 +16,9 @@ jobs: - name: Workaround github issue https://github.com/actions/runner-images/issues/7192 run: sudo echo RESET grub-efi/install_devices | sudo debconf-communicate grub-pc - - name: Install pip and hatch + - name: Install system dependencies run: | - sudo apt-get install -y python3-pip - pip3 install hatch + sudo apt-get install -y python3-pip libsecp256k1-dev - name: Cache dependencies uses: actions/cache@v4 @@ -29,8 +28,10 @@ jobs: restore-keys: | ${{ runner.os }}-code-quality- - - name: Install required system packages only for Ubuntu Linux - run: sudo apt-get install -y libsecp256k1-dev + - name: Install python dependencies + run: | + python3 -m venv /tmp/venv + /tmp/venv/bin/pip install hatch - name: Run Hatch lint - run: hatch run linting:all + run: /tmp/venv/bin/hatch run linting:all diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 89c3dabd..efe667ea 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -2,13 +2,11 @@ name: Test/Coverage with Python on: push: - branches: - - main pull_request: branches: - main schedule: - # Run every night at 04:00 (GitHub Actions timezone) + # Run every night at 04:00 (GitHub Actions timezone) # in order to catch when unfrozen dependency updates # break the use of the library. - cron: '4 0 * * *' @@ -17,7 +15,7 @@ jobs: build: strategy: matrix: - python-version: [ "3.8", "3.9", "3.10", "3.11" ] + python-version: [ "3.9", "3.10", "3.11", "3.12" ] runs-on: ubuntu-latest steps: @@ -25,12 +23,22 @@ jobs: - uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} + - run: sudo apt-get install -y python3-pip libsecp256k1-dev - - run: python -m pip install --upgrade pip hatch coverage - - run: hatch run testing:test + + - run: | + python3 -m venv /tmp/venv + /tmp/venv/bin/python -m pip install --upgrade pip hatch coverage + + - run: | + /tmp/venv/bin/pip freeze + /tmp/venv/bin/hatch run testing:pip freeze + /tmp/venv/bin/hatch run testing:test if: matrix.python-version != '3.11' - - run: hatch run testing:cov + + - run: /tmp/venv/bin/hatch run testing:cov if: matrix.python-version == '3.11' + - uses: codecov/codecov-action@v4.0.1 if: matrix.python-version == '3.11' with: diff --git a/pyproject.toml b/pyproject.toml index 63fa6d35..d255c22a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,7 @@ dependencies = [ "aiohttp>=3.8.3", "aleph-message~=0.4.4", "coincurve; python_version<\"3.11\"", - "coincurve>=17.0.0; python_version>=\"3.11\"", + "coincurve>=19.0.0; python_version>=\"3.11\"", "eth_abi>=4.0.0; python_version>=\"3.11\"", "eth_account>=0.4.0,<0.11.0", "python-magic", @@ -67,13 +67,18 @@ tezos = [ "pynacl", ] encryption = [ - "eciespy; python_version<\"3.11\"", - "eciespy>=0.3.13; python_version>=\"3.11\"", + # Blocked by https://github.com/ecies/py/issues/356 + # "eciespy; python_version<'3.11'", + # "eciespy @ git+https://github.com/ecies/py.git@4b4256cde1d8acd773dff76fd8ab855a8e9faa4f#egg=eciespy; python_version>='3.11'" + "eciespy@git+https://github.com/ecies/py.git@4b4256cde1d8acd773dff76fd8ab855a8e9faa4f#egg=eciespy" ] all = [ "aleph-sdk-python[cosmos,dns,docs,ledger,mqtt,nuls2,polkadot,solana,tezos,encryption]", ] +[tool.hatch.metadata] +allow-direct-references = true + [project.urls] Documentation = "https://aleph.im/" Homepage = "https://github.com/aleph-im/aleph-sdk-python" From 0d8cd485ca64742e094aab1f16a25696b67ec5e0 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Fri, 7 Jun 2024 23:54:46 +0200 Subject: [PATCH 022/122] Fix: Typing of item hashes were `str` not ItemHash --- src/aleph/sdk/client/abstract.py | 3 ++- src/aleph/sdk/client/authenticated_http.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 0d0d1e4e..9fce5469 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -19,6 +19,7 @@ from aleph_message.models import ( AlephMessage, + ItemHash, ItemType, MessagesResponse, MessageType, @@ -429,7 +430,7 @@ async def create_instance( @abstractmethod async def forget( self, - hashes: List[str], + hashes: List[ItemHash], reason: Optional[str], storage_engine: StorageEnum = StorageEnum.storage, channel: Optional[str] = None, diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 0d708af2..755a4557 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -16,6 +16,7 @@ ForgetMessage, InstanceContent, InstanceMessage, + ItemHash, MessageType, PostContent, PostMessage, @@ -594,7 +595,7 @@ async def create_instance( async def forget( self, - hashes: List[str], + hashes: List[ItemHash], reason: Optional[str], storage_engine: StorageEnum = StorageEnum.storage, channel: Optional[str] = None, From 214db7cef44233c912ab5aedc5a22234ee626af9 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Fri, 7 Jun 2024 23:55:07 +0200 Subject: [PATCH 023/122] Fix: Typing: Protocol AccountFromPrivateKey lacked method `sign_raw` --- src/aleph/sdk/types.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 2f57b280..8d17f4d4 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -32,5 +32,7 @@ class AccountFromPrivateKey(Account, Protocol): def __init__(self, private_key: bytes): ... + async def sign_raw(self, buffer: bytes) -> bytes: ... + GenericMessage = TypeVar("GenericMessage", bound=AlephMessage) From 3e82f1dc1b8e14813ae1201587012176869ce34c Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Fri, 14 Jun 2024 17:57:02 +0200 Subject: [PATCH 024/122] Fix: Release `eciespy` 0.4.2 solves the conflicts with coincurve --- pyproject.toml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d255c22a..f2046681 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,10 +67,8 @@ tezos = [ "pynacl", ] encryption = [ - # Blocked by https://github.com/ecies/py/issues/356 - # "eciespy; python_version<'3.11'", - # "eciespy @ git+https://github.com/ecies/py.git@4b4256cde1d8acd773dff76fd8ab855a8e9faa4f#egg=eciespy; python_version>='3.11'" - "eciespy@git+https://github.com/ecies/py.git@4b4256cde1d8acd773dff76fd8ab855a8e9faa4f#egg=eciespy" + "eciespy; python_version<\"3.11\"", + "eciespy>=0.3.13; python_version>=\"3.11\"", ] all = [ "aleph-sdk-python[cosmos,dns,docs,ledger,mqtt,nuls2,polkadot,solana,tezos,encryption]", From ac6a42715d920a451247d6e83a45317ace367b60 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Tue, 18 Jun 2024 16:40:00 +0200 Subject: [PATCH 025/122] Fix: Coverage only looked at test files (#128) --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f2046681..8a70e9c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -127,8 +127,8 @@ dependencies = [ "secp256k1", ] [tool.hatch.envs.testing.scripts] -test = "pytest {args:tests}" -test-cov = "pytest --cov {args:tests}" +test = "pytest {args:} ./src/ ./tests/ ./examples/" +test-cov = "pytest --cov {args:} ./src/ ./tests/ ./examples/" cov-report = [ "- coverage combine", "coverage report", From 1d3d5e516e5ba547956dd3a8022abf93ccc75f2d Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Wed, 19 Jun 2024 12:12:50 +0200 Subject: [PATCH 026/122] Fix: aiohttp issues a DeprecationWarning (#129) For passing bytes to a file upload instead of a BytesIO fix(authenticated_http/storage_push_file): fix and add type annotation --------- Co-authored-by: Laurent Peuch --- src/aleph/sdk/client/authenticated_http.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 755a4557..60d42b2b 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -3,6 +3,7 @@ import logging import ssl import time +from io import BytesIO from pathlib import Path from typing import Any, Dict, List, Mapping, NoReturn, Optional, Tuple, Union @@ -114,14 +115,14 @@ async def storage_push(self, content: Mapping) -> str: resp.raise_for_status() return (await resp.json()).get("hash") - async def ipfs_push_file(self, file_content: Union[str, bytes]) -> str: + async def ipfs_push_file(self, file_content: bytes) -> str: """ Push a file to the IPFS service. :param file_content: The file content to upload """ data = aiohttp.FormData() - data.add_field("file", file_content) + data.add_field("file", BytesIO(file_content)) url = "/api/v0/ipfs/add_file" logger.debug(f"Pushing file to IPFS on {url}") @@ -130,12 +131,12 @@ async def ipfs_push_file(self, file_content: Union[str, bytes]) -> str: resp.raise_for_status() return (await resp.json()).get("hash") - async def storage_push_file(self, file_content) -> str: + async def storage_push_file(self, file_content: bytes) -> Optional[str]: """ Push a file to the storage service. """ data = aiohttp.FormData() - data.add_field("file", file_content) + data.add_field("file", BytesIO(file_content)) url = "/api/v0/storage/add_file" logger.debug(f"Posting file on {url}") @@ -669,7 +670,7 @@ async def _storage_push_file_with_message( content_type="application/json", ) # Add the file - data.add_field("file", file_content) + data.add_field("file", BytesIO(file_content)) url = "/api/v0/storage/add_file" logger.debug(f"Posting file on {url}") From f41d9b6ad7b020729243f2c9a14536d584863006 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Fri, 21 Jun 2024 12:40:19 +0200 Subject: [PATCH 027/122] Fix: Required an old version of `aleph-message` --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8a70e9c8..bd4ce56c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ ] dependencies = [ "aiohttp>=3.8.3", - "aleph-message~=0.4.4", + "aleph-message>=0.4.7", "coincurve; python_version<\"3.11\"", "coincurve>=19.0.0; python_version>=\"3.11\"", "eth_abi>=4.0.0; python_version>=\"3.11\"", From 07c20e8d928663954fb8e6554b14ad9cc8ebbdae Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Fri, 21 Jun 2024 13:13:03 +0200 Subject: [PATCH 028/122] Fix: Newer aleph-message requires InstanceEnvironment Else tests were breaking. --- src/aleph/sdk/client/authenticated_http.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 60d42b2b..bf34e225 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -30,6 +30,7 @@ from aleph_message.models.execution.environment import ( FunctionEnvironment, HypervisorType, + InstanceEnvironment, MachineResources, ) from aleph_message.models.execution.instance import RootfsVolume @@ -539,8 +540,7 @@ async def create_instance( content = InstanceContent( address=address, allow_amend=allow_amend, - environment=FunctionEnvironment( - reproducible=False, + environment=InstanceEnvironment( internet=internet, aleph_api=aleph_api, hypervisor=hypervisor, From a7a923eab06196518d42a5604248cad3d134e326 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Fri, 21 Jun 2024 13:13:20 +0200 Subject: [PATCH 029/122] Fix: Qemu was not the default hypervisor for instances. --- src/aleph/sdk/client/authenticated_http.py | 6 ++++-- tests/unit/test_asynchronous.py | 2 +- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index bf34e225..6d44b526 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -535,7 +535,9 @@ async def create_instance( timeout_seconds = timeout_seconds or settings.DEFAULT_VM_TIMEOUT payment = payment or Payment(chain=Chain.ETH, type=PaymentType.hold) - hypervisor = hypervisor or HypervisorType.firecracker + + # Default to the QEMU hypervisor for instances. + selected_hypervisor: HypervisorType = hypervisor or HypervisorType.qemu content = InstanceContent( address=address, @@ -543,7 +545,7 @@ async def create_instance( environment=InstanceEnvironment( internet=internet, aleph_api=aleph_api, - hypervisor=hypervisor, + hypervisor=selected_hypervisor, ), variables=environment_variables, resources=MachineResources( diff --git a/tests/unit/test_asynchronous.py b/tests/unit/test_asynchronous.py index 0fa0df38..0f909408 100644 --- a/tests/unit/test_asynchronous.py +++ b/tests/unit/test_asynchronous.py @@ -157,7 +157,7 @@ async def test_create_instance_no_hypervisor(mock_session_with_post_success): hypervisor=None, ) - assert instance_message.content.environment.hypervisor == HypervisorType.firecracker + assert instance_message.content.environment.hypervisor == HypervisorType.qemu assert mock_session_with_post_success.http_session.post.assert_called_once assert isinstance(instance_message, InstanceMessage) From 06e03353f75a6c5ee7850d6a47552c39cd5331bd Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Fri, 21 Jun 2024 14:42:38 +0200 Subject: [PATCH 030/122] Fix: Pythom 3.12 fails setup libsecp256k1 When "using bundled libsecp256k1", the setup using `/tmp/venv/bin/hatch run testing:test` fails to proceed on Python 3.12. That library `secp256k1` has been unmaintained for more than 2 years now (0.14.0, Nov 6, 2021), and seems to not support Python 3.12. The error in the logs: ``` File "/tmp/pip-build-env-ye8d6ort/overlay/lib/python3.12/site-packages/setuptools/_distutils/dist.py", line 862, in get_command_obj cmd_obj = self.command_obj[command] = klass(self) ^^^^^^^^^^^ TypeError: 'NoneType' object is not callable [end of output] ``` See failing CI run: https://github.com/aleph-im/aleph-sdk-python/actions/runs/9613634583/job/26516767722 --- .github/workflows/pytest.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index efe667ea..8d5456c6 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -14,8 +14,11 @@ on: jobs: build: strategy: + fail-fast: false matrix: - python-version: [ "3.9", "3.10", "3.11", "3.12" ] + python-version: [ "3.9", "3.10", "3.11" ] + # An issue with secp256k1 prevents Python 3.12 from working + # See https://github.com/baking-bad/pytezos/issues/370 runs-on: ubuntu-latest steps: From fb67b7a3cade59bd289cd27346e320f4f3b77b02 Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Tue, 25 Jun 2024 17:17:34 +0200 Subject: [PATCH 031/122] doc(README): command to launch tests was incorrect --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index cfc7e1a4..3d2aea9c 100644 --- a/README.md +++ b/README.md @@ -67,7 +67,7 @@ $ pip install -e .[all] You can use the test env defined for hatch to run the tests: ```shell -$ hatch run test:run +$ hatch run testing:run ``` See `hatch env show` for more information about all the environments and their scripts. From cba4a34fca5403cafa37960b43036b5ae2beadc8 Mon Sep 17 00:00:00 2001 From: mhh Date: Thu, 27 Jun 2024 17:48:51 +0200 Subject: [PATCH 032/122] feat: rename aleph.sdk.chains.sol to aleph.sdk.chains.solana Co-authored-by: Laurent Peuch --- src/aleph/sdk/chains/sol.py | 98 +++------------------------------ src/aleph/sdk/chains/solana.py | 93 +++++++++++++++++++++++++++++++ tests/unit/conftest.py | 2 +- tests/unit/test_chain_solana.py | 2 +- 4 files changed, 102 insertions(+), 93 deletions(-) create mode 100644 src/aleph/sdk/chains/solana.py diff --git a/src/aleph/sdk/chains/sol.py b/src/aleph/sdk/chains/sol.py index ff870a4d..b8e85962 100644 --- a/src/aleph/sdk/chains/sol.py +++ b/src/aleph/sdk/chains/sol.py @@ -1,93 +1,9 @@ -import json -from pathlib import Path -from typing import Dict, Optional, Union +import warnings -import base58 -from nacl.exceptions import BadSignatureError as NaclBadSignatureError -from nacl.public import PrivateKey, SealedBox -from nacl.signing import SigningKey, VerifyKey +from aleph.sdk.chains.solana import * # noqa -from ..exceptions import BadSignatureError -from .common import BaseAccount, get_fallback_private_key, get_verification_buffer - - -def encode(item): - return base58.b58encode(bytes(item)).decode("ascii") - - -class SOLAccount(BaseAccount): - CHAIN = "SOL" - CURVE = "curve25519" - _signing_key: SigningKey - _private_key: PrivateKey - - def __init__(self, private_key: bytes): - self.private_key = private_key - self._signing_key = SigningKey(self.private_key) - self._private_key = self._signing_key.to_curve25519_private_key() - - async def sign_message(self, message: Dict) -> Dict: - """Sign a message inplace.""" - message = self._setup_sender(message) - verif = get_verification_buffer(message) - signature = await self.sign_raw(verif) - sig = { - "publicKey": self.get_address(), - "signature": encode(signature), - } - message["signature"] = json.dumps(sig) - return message - - async def sign_raw(self, buffer: bytes) -> bytes: - """Sign a raw buffer.""" - sig = self._signing_key.sign(buffer) - return sig.signature - - def get_address(self) -> str: - return encode(self._signing_key.verify_key) - - def get_public_key(self) -> str: - return bytes(self._signing_key.verify_key.to_curve25519_public_key()).hex() - - async def encrypt(self, content) -> bytes: - value: bytes = bytes(SealedBox(self._private_key.public_key).encrypt(content)) - return value - - async def decrypt(self, content) -> bytes: - value: bytes = SealedBox(self._private_key).decrypt(content) - return value - - -def get_fallback_account(path: Optional[Path] = None) -> SOLAccount: - return SOLAccount(private_key=get_fallback_private_key(path=path)) - - -def generate_key() -> bytes: - privkey = bytes(SigningKey.generate()) - return privkey - - -def verify_signature( - signature: Union[bytes, str], - public_key: Union[bytes, str], - message: Union[bytes, str], -): - """ - Verifies a signature. - Args: - signature: The signature to verify. Can be a base58 encoded string or bytes. - public_key: The public key to use for verification. Can be a base58 encoded string or bytes. - message: The message to verify. Can be an utf-8 string or bytes. - Raises: - BadSignatureError: If the signature is invalid. - """ - if isinstance(signature, str): - signature = base58.b58decode(signature) - if isinstance(message, str): - message = message.encode("utf-8") - if isinstance(public_key, str): - public_key = base58.b58decode(public_key) - try: - VerifyKey(public_key).verify(message, signature) - except NaclBadSignatureError as e: - raise BadSignatureError from e +warnings.warn( + "aleph.sdk.chains.sol is deprecated, use aleph.sdk.chains.solana instead", + DeprecationWarning, + stacklevel=1, +) diff --git a/src/aleph/sdk/chains/solana.py b/src/aleph/sdk/chains/solana.py new file mode 100644 index 00000000..ff870a4d --- /dev/null +++ b/src/aleph/sdk/chains/solana.py @@ -0,0 +1,93 @@ +import json +from pathlib import Path +from typing import Dict, Optional, Union + +import base58 +from nacl.exceptions import BadSignatureError as NaclBadSignatureError +from nacl.public import PrivateKey, SealedBox +from nacl.signing import SigningKey, VerifyKey + +from ..exceptions import BadSignatureError +from .common import BaseAccount, get_fallback_private_key, get_verification_buffer + + +def encode(item): + return base58.b58encode(bytes(item)).decode("ascii") + + +class SOLAccount(BaseAccount): + CHAIN = "SOL" + CURVE = "curve25519" + _signing_key: SigningKey + _private_key: PrivateKey + + def __init__(self, private_key: bytes): + self.private_key = private_key + self._signing_key = SigningKey(self.private_key) + self._private_key = self._signing_key.to_curve25519_private_key() + + async def sign_message(self, message: Dict) -> Dict: + """Sign a message inplace.""" + message = self._setup_sender(message) + verif = get_verification_buffer(message) + signature = await self.sign_raw(verif) + sig = { + "publicKey": self.get_address(), + "signature": encode(signature), + } + message["signature"] = json.dumps(sig) + return message + + async def sign_raw(self, buffer: bytes) -> bytes: + """Sign a raw buffer.""" + sig = self._signing_key.sign(buffer) + return sig.signature + + def get_address(self) -> str: + return encode(self._signing_key.verify_key) + + def get_public_key(self) -> str: + return bytes(self._signing_key.verify_key.to_curve25519_public_key()).hex() + + async def encrypt(self, content) -> bytes: + value: bytes = bytes(SealedBox(self._private_key.public_key).encrypt(content)) + return value + + async def decrypt(self, content) -> bytes: + value: bytes = SealedBox(self._private_key).decrypt(content) + return value + + +def get_fallback_account(path: Optional[Path] = None) -> SOLAccount: + return SOLAccount(private_key=get_fallback_private_key(path=path)) + + +def generate_key() -> bytes: + privkey = bytes(SigningKey.generate()) + return privkey + + +def verify_signature( + signature: Union[bytes, str], + public_key: Union[bytes, str], + message: Union[bytes, str], +): + """ + Verifies a signature. + Args: + signature: The signature to verify. Can be a base58 encoded string or bytes. + public_key: The public key to use for verification. Can be a base58 encoded string or bytes. + message: The message to verify. Can be an utf-8 string or bytes. + Raises: + BadSignatureError: If the signature is invalid. + """ + if isinstance(signature, str): + signature = base58.b58decode(signature) + if isinstance(message, str): + message = message.encode("utf-8") + if isinstance(public_key, str): + public_key = base58.b58decode(public_key) + try: + VerifyKey(public_key).verify(message, signature) + except NaclBadSignatureError as e: + raise BadSignatureError from e diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 4b06c243..3c5c1fe8 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -11,7 +11,7 @@ from aleph_message.models import AggregateMessage, AlephMessage, PostMessage import aleph.sdk.chains.ethereum as ethereum -import aleph.sdk.chains.sol as solana +import aleph.sdk.chains.solana as solana import aleph.sdk.chains.substrate as substrate import aleph.sdk.chains.tezos as tezos from aleph.sdk import AlephHttpClient, AuthenticatedAlephHttpClient diff --git a/tests/unit/test_chain_solana.py b/tests/unit/test_chain_solana.py index 07b67602..ed2fff78 100644 --- a/tests/unit/test_chain_solana.py +++ b/tests/unit/test_chain_solana.py @@ -8,7 +8,7 @@ from nacl.signing import VerifyKey from aleph.sdk.chains.common import get_verification_buffer -from aleph.sdk.chains.sol import SOLAccount, get_fallback_account, verify_signature +from aleph.sdk.chains.solana import SOLAccount, get_fallback_account, verify_signature from aleph.sdk.exceptions import BadSignatureError From f6a12671c8c2062654bb8450dbcd0bb24588779b Mon Sep 17 00:00:00 2001 From: mhh Date: Thu, 27 Jun 2024 17:49:17 +0200 Subject: [PATCH 033/122] feat: rename polkadot extra to substrate Co-authored-by: Laurent Peuch --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index bd4ce56c..c073e689 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -54,7 +54,7 @@ mqtt = [ nuls2 = [ "aleph-nuls2", ] -polkadot = [ +substrate = [ "py-sr25519-bindings", "substrate-interface", ] @@ -71,7 +71,7 @@ encryption = [ "eciespy>=0.3.13; python_version>=\"3.11\"", ] all = [ - "aleph-sdk-python[cosmos,dns,docs,ledger,mqtt,nuls2,polkadot,solana,tezos,encryption]", + "aleph-sdk-python[cosmos,dns,docs,ledger,mqtt,nuls2,substrate,solana,tezos,encryption]", ] [tool.hatch.metadata] @@ -112,7 +112,7 @@ features = [ "dns", "ledger", "nuls2", - "polkadot", + "substrate", "solana", "tezos", "encryption", From 94fb0897e431d1029efa54cc155369a222e8d469 Mon Sep 17 00:00:00 2001 From: mhh Date: Thu, 27 Jun 2024 17:49:46 +0200 Subject: [PATCH 034/122] refactor: remove unused dependency typer Co-authored-by: Laurent Peuch --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c073e689..22423e85 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,6 @@ dependencies = [ "eth_abi>=4.0.0; python_version>=\"3.11\"", "eth_account>=0.4.0,<0.11.0", "python-magic", - "typer", "typing_extensions", ] From 898193543205ad138a0b64c89946be73ff6827f5 Mon Sep 17 00:00:00 2001 From: Bram Date: Fri, 5 Jul 2024 11:55:37 +0200 Subject: [PATCH 035/122] ci: remove macos-11 build since it's not supported anymore (#139) --- .github/workflows/build-wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index c07479e8..55e39e75 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-11, macos-12, macos-13, macos-14, ubuntu-20.04, ubuntu-22.04, ubuntu-24.04] + os: [macos-12, macos-13, macos-14, ubuntu-20.04, ubuntu-22.04, ubuntu-24.04] runs-on: ${{ matrix.os }} steps: From e1704ef3ae05bb886531bb6fc432c6148763411d Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Fri, 5 Jul 2024 11:56:15 +0200 Subject: [PATCH 036/122] Feature: Allow User to control their VM (#124) * Feature: VmClient * Fix: Protocol (http/https) should not be hardcoded. One place hardcoded `http://`, the other one `https://`. * Fix: There was no test for `notify_allocation()`. * WIP: Copy authentication functions from aleph-vm * Fix: vm client sessions wasn't close + authentifications for test will use localhost as domain * Add: Unit test for {perform_operation, stop, reboot, erase, expire} * Refactor: logs didn't need to generate full header Fix: extracts domain from node url instead of sending url Fix: using vmclient sessions in get_logs instead of creating new one * Add: get_logs test * Fix: black in aleph_vm_authentification.py fix: isort issue Fix: mypy issue Fix: black Fix: isort * Fix: fully remove _generate_header call in get_logs Fix Fix: using real path server instead of fake server for test Fix: create playload * Fix: black issue * Fix: test fix workflow * feat(vm_client): add missing types annotations * refactor(vm_client): remove duplicated types annotations * refactor(vm_client): avoid using single letter variable names * feat(vm_client): increase test_notify_allocation precision * refactor(vm_client): add empty lines for code readability * style: run linting:fmt * Fix: Required an old version of `aleph-message` * Fix: Newer aleph-message requires InstanceEnvironment Else tests were breaking. * Fix: Qemu was not the default hypervisor for instances. * Fix: Pythom 3.12 fails setup libsecp256k1 When "using bundled libsecp256k1", the setup using `/tmp/venv/bin/hatch run testing:test` fails to proceed on Python 3.12. That library `secp256k1` has been unmaintained for more than 2 years now (0.14.0, Nov 6, 2021), and seems to not support Python 3.12. The error in the logs: ``` File "/tmp/pip-build-env-ye8d6ort/overlay/lib/python3.12/site-packages/setuptools/_distutils/dist.py", line 862, in get_command_obj cmd_obj = self.command_obj[command] = klass(self) ^^^^^^^^^^^ TypeError: 'NoneType' object is not callable [end of output] ``` See failing CI run: https://github.com/aleph-im/aleph-sdk-python/actions/runs/9613634583/job/26516767722 * doc(README): command to launch tests was incorrect * Refactor: create and sign playload goes to utils and some fix * Fix: linting issue * Fix: mypy issue * fix: black * feat: use bytes_from_hex where it makes sens * chore: use ruff new CLI api * feat: add unit tests for authentication mechanisms of VmClient * fix: debug code remove * Update vmclient.py Co-authored-by: Olivier Le Thanh Duong * Fix: update unit test to use stream_logs endpoint instead of logs * Implement `VmConfidentialClient` class (#138) * Problem: A user cannot initialize an already created confidential VM. Solution: Implement `VmConfidentialClient` class to be able to initialize and interact with confidential VMs. * Problem: Auth was not working Corrections: * Measurement type returned was missing field needed for validation of measurements * Port number was not handled correctly in authentifaction * Adapt to new auth protocol where domain is moved to the operation field (While keeping compat with the old format) * Get measurement was not working since signed with the wrong method * inject_secret was not sending a json * Websocked auth was sending a twice serialized json * update 'vendorized' aleph-vm auth file from source Co-authored-by: Hugo Herter Co-authored-by: Laurent Peuch Co-authored-by: Olivier Le Thanh Duong Co-authored-by: nesitor --- pyproject.toml | 8 +- src/aleph/sdk/chains/common.py | 7 - src/aleph/sdk/chains/ethereum.py | 8 +- src/aleph/sdk/chains/substrate.py | 3 +- src/aleph/sdk/client/vm_client.py | 192 +++++++++++ .../sdk/client/vm_confidential_client.py | 216 +++++++++++++ src/aleph/sdk/types.py | 28 ++ src/aleph/sdk/utils.py | 198 +++++++++++- src/aleph/sdk/wallets/ledger/ethereum.py | 3 +- tests/unit/aleph_vm_authentication.py | 290 +++++++++++++++++ tests/unit/test_vm_client.py | 297 ++++++++++++++++++ tests/unit/test_vm_confidential_client.py | 216 +++++++++++++ 12 files changed, 1447 insertions(+), 19 deletions(-) create mode 100644 src/aleph/sdk/client/vm_client.py create mode 100644 src/aleph/sdk/client/vm_confidential_client.py create mode 100644 tests/unit/aleph_vm_authentication.py create mode 100644 tests/unit/test_vm_client.py create mode 100644 tests/unit/test_vm_confidential_client.py diff --git a/pyproject.toml b/pyproject.toml index 22423e85..918462a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,8 +28,10 @@ dependencies = [ "coincurve>=19.0.0; python_version>=\"3.11\"", "eth_abi>=4.0.0; python_version>=\"3.11\"", "eth_account>=0.4.0,<0.11.0", + "jwcrypto==1.5.6", "python-magic", "typing_extensions", + "aioresponses>=0.7.6" ] [project.optional-dependencies] @@ -121,6 +123,8 @@ dependencies = [ "pytest-cov==4.1.0", "pytest-mock==3.12.0", "pytest-asyncio==0.23.5", + "pytest-aiohttp==1.0.5", + "aioresponses==0.7.6", "fastapi", "httpx", "secp256k1", @@ -149,13 +153,13 @@ dependencies = [ [tool.hatch.envs.linting.scripts] typing = "mypy --config-file=pyproject.toml {args:} ./src/ ./tests/ ./examples/" style = [ - "ruff {args:.} ./src/ ./tests/ ./examples/", + "ruff check {args:.} ./src/ ./tests/ ./examples/", "black --check --diff {args:} ./src/ ./tests/ ./examples/", "isort --check-only --profile black {args:} ./src/ ./tests/ ./examples/", ] fmt = [ "black {args:} ./src/ ./tests/ ./examples/", - "ruff --fix {args:.} ./src/ ./tests/ ./examples/", + "ruff check --fix {args:.} ./src/ ./tests/ ./examples/", "isort --profile black {args:} ./src/ ./tests/ ./examples/", "style", ] diff --git a/src/aleph/sdk/chains/common.py b/src/aleph/sdk/chains/common.py index b73d6e41..0a90183c 100644 --- a/src/aleph/sdk/chains/common.py +++ b/src/aleph/sdk/chains/common.py @@ -170,10 +170,3 @@ def get_fallback_private_key(path: Optional[Path] = None) -> bytes: if not default_key_path.exists(): default_key_path.symlink_to(path) return private_key - - -def bytes_from_hex(hex_string: str) -> bytes: - if hex_string.startswith("0x"): - hex_string = hex_string[2:] - hex_string = bytes.fromhex(hex_string) - return hex_string diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index 124fbee7..b0fa5fbe 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -7,12 +7,8 @@ from eth_keys.exceptions import BadSignature as EthBadSignatureError from ..exceptions import BadSignatureError -from .common import ( - BaseAccount, - bytes_from_hex, - get_fallback_private_key, - get_public_key, -) +from ..utils import bytes_from_hex +from .common import BaseAccount, get_fallback_private_key, get_public_key class ETHAccount(BaseAccount): diff --git a/src/aleph/sdk/chains/substrate.py b/src/aleph/sdk/chains/substrate.py index 13795568..f4d18a0d 100644 --- a/src/aleph/sdk/chains/substrate.py +++ b/src/aleph/sdk/chains/substrate.py @@ -9,7 +9,8 @@ from ..conf import settings from ..exceptions import BadSignatureError -from .common import BaseAccount, bytes_from_hex, get_verification_buffer +from ..utils import bytes_from_hex +from .common import BaseAccount, get_verification_buffer logger = logging.getLogger(__name__) diff --git a/src/aleph/sdk/client/vm_client.py b/src/aleph/sdk/client/vm_client.py new file mode 100644 index 00000000..4092851d --- /dev/null +++ b/src/aleph/sdk/client/vm_client.py @@ -0,0 +1,192 @@ +import datetime +import json +import logging +from typing import Any, AsyncGenerator, Dict, List, Optional, Tuple +from urllib.parse import urlparse + +import aiohttp +from aleph_message.models import ItemHash +from eth_account.messages import encode_defunct +from jwcrypto import jwk + +from aleph.sdk.types import Account +from aleph.sdk.utils import ( + create_vm_control_payload, + sign_vm_control_payload, + to_0x_hex, +) + +logger = logging.getLogger(__name__) + + +class VmClient: + account: Account + ephemeral_key: jwk.JWK + node_url: str + pubkey_payload: Dict[str, Any] + pubkey_signature_header: str + session: aiohttp.ClientSession + + def __init__( + self, + account: Account, + node_url: str = "", + session: Optional[aiohttp.ClientSession] = None, + ): + self.account = account + self.ephemeral_key = jwk.JWK.generate(kty="EC", crv="P-256") + self.node_url = node_url + self.pubkey_payload = self._generate_pubkey_payload() + self.pubkey_signature_header = "" + self.session = session or aiohttp.ClientSession() + + def _generate_pubkey_payload(self) -> Dict[str, Any]: + return { + "pubkey": json.loads(self.ephemeral_key.export_public()), + "alg": "ECDSA", + "domain": self.node_domain, + "address": self.account.get_address(), + "expires": ( + datetime.datetime.utcnow() + datetime.timedelta(days=1) + ).isoformat() + + "Z", + } + + async def _generate_pubkey_signature_header(self) -> str: + pubkey_payload = json.dumps(self.pubkey_payload).encode("utf-8").hex() + signable_message = encode_defunct(hexstr=pubkey_payload) + buffer_to_sign = signable_message.body + + signed_message = await self.account.sign_raw(buffer_to_sign) + pubkey_signature = to_0x_hex(signed_message) + + return json.dumps( + { + "sender": self.account.get_address(), + "payload": pubkey_payload, + "signature": pubkey_signature, + "content": {"domain": self.node_domain}, + } + ) + + async def _generate_header( + self, vm_id: ItemHash, operation: str, method: str + ) -> Tuple[str, Dict[str, str]]: + payload = create_vm_control_payload( + vm_id, operation, domain=self.node_domain, method=method + ) + signed_operation = sign_vm_control_payload(payload, self.ephemeral_key) + + if not self.pubkey_signature_header: + self.pubkey_signature_header = ( + await self._generate_pubkey_signature_header() + ) + + headers = { + "X-SignedPubKey": self.pubkey_signature_header, + "X-SignedOperation": signed_operation, + } + + path = payload["path"] + return f"{self.node_url}{path}", headers + + @property + def node_domain(self) -> str: + domain = urlparse(self.node_url).hostname + if not domain: + raise Exception("Could not parse node domain") + return domain + + async def perform_operation( + self, vm_id: ItemHash, operation: str, method: str = "POST" + ) -> Tuple[Optional[int], str]: + if not self.pubkey_signature_header: + self.pubkey_signature_header = ( + await self._generate_pubkey_signature_header() + ) + + url, header = await self._generate_header( + vm_id=vm_id, operation=operation, method=method + ) + + try: + async with self.session.request( + method=method, url=url, headers=header + ) as response: + response_text = await response.text() + return response.status, response_text + + except aiohttp.ClientError as e: + logger.error(f"HTTP error during operation {operation}: {str(e)}") + return None, str(e) + + async def get_logs(self, vm_id: ItemHash) -> AsyncGenerator[str, None]: + if not self.pubkey_signature_header: + self.pubkey_signature_header = ( + await self._generate_pubkey_signature_header() + ) + + payload = create_vm_control_payload( + vm_id, "stream_logs", method="get", domain=self.node_domain + ) + signed_operation = sign_vm_control_payload(payload, self.ephemeral_key) + path = payload["path"] + ws_url = f"{self.node_url}{path}" + + async with self.session.ws_connect(ws_url) as ws: + auth_message = { + "auth": { + "X-SignedPubKey": json.loads(self.pubkey_signature_header), + "X-SignedOperation": json.loads(signed_operation), + } + } + await ws.send_json(auth_message) + + async for msg in ws: # msg is of type aiohttp.WSMessage + if msg.type == aiohttp.WSMsgType.TEXT: + yield msg.data + elif msg.type == aiohttp.WSMsgType.ERROR: + break + + async def start_instance(self, vm_id: ItemHash) -> Tuple[int, str]: + return await self.notify_allocation(vm_id) + + async def stop_instance(self, vm_id: ItemHash) -> Tuple[Optional[int], str]: + return await self.perform_operation(vm_id, "stop") + + async def reboot_instance(self, vm_id: ItemHash) -> Tuple[Optional[int], str]: + return await self.perform_operation(vm_id, "reboot") + + async def erase_instance(self, vm_id: ItemHash) -> Tuple[Optional[int], str]: + return await self.perform_operation(vm_id, "erase") + + async def expire_instance(self, vm_id: ItemHash) -> Tuple[Optional[int], str]: + return await self.perform_operation(vm_id, "expire") + + async def notify_allocation(self, vm_id: ItemHash) -> Tuple[int, str]: + json_data = {"instance": vm_id} + + async with self.session.post( + f"{self.node_url}/control/allocation/notify", json=json_data + ) as session: + form_response_text = await session.text() + + return session.status, form_response_text + + async def manage_instance( + self, vm_id: ItemHash, operations: List[str] + ) -> Tuple[int, str]: + for operation in operations: + status, response = await self.perform_operation(vm_id, operation) + if status != 200 and status: + return status, response + return 200, "All operations completed successfully" + + async def close(self): + await self.session.close() + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_value, traceback): + await self.close() diff --git a/src/aleph/sdk/client/vm_confidential_client.py b/src/aleph/sdk/client/vm_confidential_client.py new file mode 100644 index 00000000..a100de8c --- /dev/null +++ b/src/aleph/sdk/client/vm_confidential_client.py @@ -0,0 +1,216 @@ +import base64 +import json +import logging +import os +import tempfile +from pathlib import Path +from typing import Any, Dict, Optional, Tuple + +import aiohttp +from aleph_message.models import ItemHash + +from aleph.sdk.client.vm_client import VmClient +from aleph.sdk.types import Account, SEVMeasurement +from aleph.sdk.utils import ( + compute_confidential_measure, + encrypt_secret_table, + get_vm_measure, + make_packet_header, + make_secret_table, + run_in_subprocess, +) + +logger = logging.getLogger(__name__) + + +class VmConfidentialClient(VmClient): + sevctl_path: Path + + def __init__( + self, + account: Account, + sevctl_path: Path, + node_url: str = "", + session: Optional[aiohttp.ClientSession] = None, + ): + super().__init__(account, node_url, session) + self.sevctl_path = sevctl_path + + async def get_certificates(self) -> Tuple[Optional[int], str]: + """ + Get platform confidential certificate + """ + + url = f"{self.node_url}/about/certificates" + try: + async with self.session.get(url) as response: + data = await response.read() + with tempfile.NamedTemporaryFile(delete=False) as tmp_file: + tmp_file.write(data) + return response.status, tmp_file.name + + except aiohttp.ClientError as e: + logger.error( + f"HTTP error getting node certificates on {self.node_url}: {str(e)}" + ) + return None, str(e) + + async def create_session( + self, vm_id: ItemHash, certificate_path: Path, policy: int + ) -> Path: + """ + Create new confidential session + """ + + current_path = Path().cwd() + args = [ + "session", + "--name", + str(vm_id), + str(certificate_path), + str(policy), + ] + try: + # TODO: Check command result + await self.sevctl_cmd(*args) + return current_path + except Exception as e: + raise ValueError(f"Session creation have failed, reason: {str(e)}") + + async def initialize(self, vm_id: ItemHash, session: Path, godh: Path) -> str: + """ + Initialize Confidential VM negociation passing the needed session files + """ + + session_file = session.read_bytes() + godh_file = godh.read_bytes() + params = { + "session": session_file, + "godh": godh_file, + } + return await self.perform_confidential_operation( + vm_id, "confidential/initialize", params=params + ) + + async def measurement(self, vm_id: ItemHash) -> SEVMeasurement: + """ + Fetch VM confidential measurement + """ + + if not self.pubkey_signature_header: + self.pubkey_signature_header = ( + await self._generate_pubkey_signature_header() + ) + + status, text = await self.perform_operation( + vm_id, "confidential/measurement", method="GET" + ) + sev_mesurement = SEVMeasurement.parse_raw(text) + return sev_mesurement + + async def validate_measure( + self, sev_data: SEVMeasurement, tik_path: Path, firmware_hash: str + ) -> bool: + """ + Validate VM confidential measurement + """ + + tik = tik_path.read_bytes() + vm_measure, nonce = get_vm_measure(sev_data) + + expected_measure = compute_confidential_measure( + sev_info=sev_data.sev_info, + tik=tik, + expected_hash=firmware_hash, + nonce=nonce, + ).digest() + return expected_measure == vm_measure + + async def build_secret( + self, tek_path: Path, tik_path: Path, sev_data: SEVMeasurement, secret: str + ) -> Tuple[str, str]: + """ + Build disk secret to be injected on the confidential VM + """ + + tek = tek_path.read_bytes() + tik = tik_path.read_bytes() + + vm_measure, _ = get_vm_measure(sev_data) + + iv = os.urandom(16) + secret_table = make_secret_table(secret) + encrypted_secret_table = encrypt_secret_table( + secret_table=secret_table, tek=tek, iv=iv + ) + + packet_header = make_packet_header( + vm_measure=vm_measure, + encrypted_secret_table=encrypted_secret_table, + secret_table_size=len(secret_table), + tik=tik, + iv=iv, + ) + + encoded_packet_header = base64.b64encode(packet_header).decode() + encoded_secret = base64.b64encode(encrypted_secret_table).decode() + + return encoded_packet_header, encoded_secret + + async def inject_secret( + self, vm_id: ItemHash, packet_header: str, secret: str + ) -> Dict: + """ + Send the secret by the encrypted channel to boot up the VM + """ + + params = { + "packet_header": packet_header, + "secret": secret, + } + text = await self.perform_confidential_operation( + vm_id, "confidential/inject_secret", json=params + ) + + return json.loads(text) + + async def perform_confidential_operation( + self, + vm_id: ItemHash, + operation: str, + params: Optional[Dict[str, Any]] = None, + json=None, + ) -> str: + """ + Send confidential operations to the CRN passing the auth headers on each request + """ + + if not self.pubkey_signature_header: + self.pubkey_signature_header = ( + await self._generate_pubkey_signature_header() + ) + + url, header = await self._generate_header( + vm_id=vm_id, operation=operation, method="post" + ) + + try: + async with self.session.post( + url, headers=header, data=params, json=json + ) as response: + response.raise_for_status() + response_text = await response.text() + return response_text + + except aiohttp.ClientError as e: + raise ValueError(f"HTTP error during operation {operation}: {str(e)}") + + async def sevctl_cmd(self, *args) -> bytes: + """ + Execute `sevctl` command with given arguments + """ + + return await run_in_subprocess( + [str(self.sevctl_path), *args], + check=True, + ) diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 8d17f4d4..cf9e6fa8 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -2,6 +2,8 @@ from enum import Enum from typing import Dict, Protocol, TypeVar +from pydantic import BaseModel + __all__ = ("StorageEnum", "Account", "AccountFromPrivateKey", "GenericMessage") from aleph_message.models import AlephMessage @@ -20,6 +22,9 @@ class Account(Protocol): @abstractmethod async def sign_message(self, message: Dict) -> Dict: ... + @abstractmethod + async def sign_raw(self, buffer: bytes) -> bytes: ... + @abstractmethod def get_address(self) -> str: ... @@ -36,3 +41,26 @@ async def sign_raw(self, buffer: bytes) -> bytes: ... GenericMessage = TypeVar("GenericMessage", bound=AlephMessage) + + +class SEVInfo(BaseModel): + """ + An AMD SEV platform information. + """ + + enabled: bool + api_major: int + api_minor: int + build_id: int + policy: int + state: str + handle: int + + +class SEVMeasurement(BaseModel): + """ + A SEV measurement data get from Qemu measurement. + """ + + sev_info: SEVInfo + launch_measure: str diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index b1c04cdf..5c641d5c 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -1,14 +1,21 @@ +import asyncio +import base64 import errno import hashlib +import hmac +import json import logging import os +import subprocess from datetime import date, datetime, time from enum import Enum from pathlib import Path from shutil import make_archive from typing import ( Any, + Dict, Iterable, + List, Mapping, Optional, Protocol, @@ -18,15 +25,19 @@ Union, get_args, ) +from uuid import UUID from zipfile import BadZipFile, ZipFile -from aleph_message.models import MessageType +from aleph_message.models import ItemHash, MessageType from aleph_message.models.execution.program import Encoding from aleph_message.models.execution.volume import MachineVolume +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from jwcrypto.jwa import JWA from pydantic.json import pydantic_encoder from aleph.sdk.conf import settings -from aleph.sdk.types import GenericMessage +from aleph.sdk.types import GenericMessage, SEVInfo, SEVMeasurement logger = logging.getLogger(__name__) @@ -184,3 +195,186 @@ def parse_volume(volume_dict: Union[Mapping, MachineVolume]) -> MachineVolume: def compute_sha256(s: str) -> str: """Compute the SHA256 hash of a string.""" return hashlib.sha256(s.encode()).hexdigest() + + +def to_0x_hex(b: bytes) -> str: + return "0x" + bytes.hex(b) + + +def bytes_from_hex(hex_string: str) -> bytes: + if hex_string.startswith("0x"): + hex_string = hex_string[2:] + hex_string = bytes.fromhex(hex_string) + return hex_string + + +def create_vm_control_payload( + vm_id: ItemHash, operation: str, domain: str, method: str +) -> Dict[str, str]: + path = f"/control/machine/{vm_id}/{operation}" + payload = { + "time": datetime.utcnow().isoformat() + "Z", + "method": method.upper(), + "path": path, + "domain": domain, + } + return payload + + +def sign_vm_control_payload(payload: Dict[str, str], ephemeral_key) -> str: + payload_as_bytes = json.dumps(payload).encode("utf-8") + payload_signature = JWA.signing_alg("ES256").sign(ephemeral_key, payload_as_bytes) + signed_operation = json.dumps( + { + "payload": payload_as_bytes.hex(), + "signature": payload_signature.hex(), + } + ) + return signed_operation + + +async def run_in_subprocess( + command: List[str], check: bool = True, stdin_input: Optional[bytes] = None +) -> bytes: + """Run the specified command in a subprocess, returns the stdout of the process.""" + logger.debug(f"command: {' '.join(command)}") + + process = await asyncio.create_subprocess_exec( + *command, + stdin=asyncio.subprocess.PIPE, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await process.communicate(input=stdin_input) + + if check and process.returncode: + logger.error( + f"Command failed with error code {process.returncode}:\n" + f" stdin = {stdin_input!r}\n" + f" command = {command}\n" + f" stdout = {stderr!r}" + ) + raise subprocess.CalledProcessError( + process.returncode, str(command), stderr.decode() + ) + + return stdout + + +def get_vm_measure(sev_data: SEVMeasurement) -> Tuple[bytes, bytes]: + launch_measure = base64.b64decode(sev_data.launch_measure) + vm_measure = launch_measure[0:32] + nonce = launch_measure[32:48] + return vm_measure, nonce + + +def compute_confidential_measure( + sev_info: SEVInfo, tik: bytes, expected_hash: str, nonce: bytes +) -> hmac.HMAC: + """ + Computes the SEV measurement using the CRN SEV data and local variables like the OVMF firmware hash, + and the session key generated. + """ + + h = hmac.new(tik, digestmod="sha256") + + ## + # calculated per section 6.5.2 + ## + h.update(bytes([0x04])) + h.update(sev_info.api_major.to_bytes(1, byteorder="little")) + h.update(sev_info.api_minor.to_bytes(1, byteorder="little")) + h.update(sev_info.build_id.to_bytes(1, byteorder="little")) + h.update(sev_info.policy.to_bytes(4, byteorder="little")) + + expected_hash_bytes = bytearray.fromhex(expected_hash) + h.update(expected_hash_bytes) + + h.update(nonce) + + return h + + +def make_secret_table(secret: str) -> bytearray: + """ + Makes the disk secret table to be sent to the Confidential CRN + """ + + ## + # Construct the secret table: two guids + 4 byte lengths plus string + # and zero terminator + # + # Secret layout is guid, len (4 bytes), data + # with len being the length from start of guid to end of data + # + # The table header covers the entire table then each entry covers + # only its local data + # + # our current table has the header guid with total table length + # followed by the secret guid with the zero terminated secret + ## + + # total length of table: header plus one entry with trailing \0 + length = 16 + 4 + 16 + 4 + len(secret) + 1 + # SEV-ES requires rounding to 16 + length = (length + 15) & ~15 + secret_table = bytearray(length) + + secret_table[0:16] = UUID("{1e74f542-71dd-4d66-963e-ef4287ff173b}").bytes_le + secret_table[16:20] = len(secret_table).to_bytes(4, byteorder="little") + secret_table[20:36] = UUID("{736869e5-84f0-4973-92ec-06879ce3da0b}").bytes_le + secret_table[36:40] = (16 + 4 + len(secret) + 1).to_bytes(4, byteorder="little") + secret_table[40 : 40 + len(secret)] = secret.encode() + + return secret_table + + +def encrypt_secret_table(secret_table: bytes, tek: bytes, iv: bytes) -> bytes: + """Encrypt the secret table with the TEK in CTR mode using a random IV""" + + # Initialize the cipher with AES algorithm and CTR mode + cipher = Cipher(algorithms.AES(tek), modes.CTR(iv), backend=default_backend()) + encryptor = cipher.encryptor() + + # Encrypt the secret table + encrypted_secret = encryptor.update(secret_table) + encryptor.finalize() + + return encrypted_secret + + +def make_packet_header( + vm_measure: bytes, + encrypted_secret_table: bytes, + secret_table_size: int, + tik: bytes, + iv: bytes, +) -> bytearray: + """ + Creates a packet header using the encrypted disk secret table to be sent to the Confidential CRN + """ + + ## + # ultimately needs to be an argument, but there's only + # compressed and no real use case + ## + flags = 0 + + ## + # Table 55. LAUNCH_SECRET Packet Header Buffer + ## + header = bytearray(52) + header[0:4] = flags.to_bytes(4, byteorder="little") + header[4:20] = iv + + h = hmac.new(tik, digestmod="sha256") + h.update(bytes([0x01])) + # FLAGS || IV + h.update(header[0:20]) + h.update(secret_table_size.to_bytes(4, byteorder="little")) + h.update(secret_table_size.to_bytes(4, byteorder="little")) + h.update(encrypted_secret_table) + h.update(vm_measure) + + header[20:52] = h.digest() + + return header diff --git a/src/aleph/sdk/wallets/ledger/ethereum.py b/src/aleph/sdk/wallets/ledger/ethereum.py index 2ecdc5d3..5dc40f03 100644 --- a/src/aleph/sdk/wallets/ledger/ethereum.py +++ b/src/aleph/sdk/wallets/ledger/ethereum.py @@ -9,7 +9,8 @@ from ledgereth.messages import sign_message from ledgereth.objects import LedgerAccount, SignedMessage -from ...chains.common import BaseAccount, bytes_from_hex, get_verification_buffer +from ...chains.common import BaseAccount, get_verification_buffer +from ...utils import bytes_from_hex class LedgerETHAccount(BaseAccount): diff --git a/tests/unit/aleph_vm_authentication.py b/tests/unit/aleph_vm_authentication.py new file mode 100644 index 00000000..491da51a --- /dev/null +++ b/tests/unit/aleph_vm_authentication.py @@ -0,0 +1,290 @@ +# Keep datetime import as is as it allow patching in test +import datetime +import functools +import json +import logging +from collections.abc import Awaitable, Coroutine +from typing import Any, Callable, Dict, Literal, Optional, Union + +import cryptography.exceptions +import pydantic +from aiohttp import web +from eth_account import Account +from eth_account.messages import encode_defunct +from jwcrypto import jwk +from jwcrypto.jwa import JWA +from pydantic import BaseModel, ValidationError, root_validator, validator + +from aleph.sdk.utils import bytes_from_hex + +logger = logging.getLogger(__name__) + +DOMAIN_NAME = "localhost" + + +def is_token_still_valid(datestr: str) -> bool: + """ + Checks if a token has expired based on its expiry timestamp + """ + current_datetime = datetime.datetime.now(tz=datetime.timezone.utc) + expiry_datetime = datetime.datetime.fromisoformat(datestr.replace("Z", "+00:00")) + + return expiry_datetime > current_datetime + + +def verify_wallet_signature(signature: bytes, message: str, address: str) -> bool: + """ + Verifies a signature issued by a wallet + """ + enc_msg = encode_defunct(hexstr=message) + computed_address = Account.recover_message(enc_msg, signature=signature) + + return computed_address.lower() == address.lower() + + +class SignedPubKeyPayload(BaseModel): + """This payload is signed by the wallet of the user to authorize an ephemeral key to act on his behalf.""" + + pubkey: Dict[str, Any] + # {'pubkey': {'alg': 'ES256', 'crv': 'P-256', 'ext': True, 'key_ops': ['verify'], 'kty': 'EC', + # 'x': '4blJBYpltvQLFgRvLE-2H7dsMr5O0ImHkgOnjUbG2AU', 'y': '5VHnq_hUSogZBbVgsXMs0CjrVfMy4Pa3Uv2BEBqfrN4'} + # alg: Literal["ECDSA"] + address: str + expires: str + + @property + def json_web_key(self) -> jwk.JWK: + """Return the ephemeral public key as Json Web Key""" + + return jwk.JWK(**self.pubkey) + + +class SignedPubKeyHeader(BaseModel): + signature: bytes + payload: bytes + + @validator("signature") + def signature_must_be_hex(cls, value: bytes) -> bytes: + """Convert the signature from hexadecimal to bytes""" + + return bytes_from_hex(value.decode()) + + @validator("payload") + def payload_must_be_hex(cls, value: bytes) -> bytes: + """Convert the payload from hexadecimal to bytes""" + + return bytes_from_hex(value.decode()) + + @root_validator(pre=False, skip_on_failure=True) + def check_expiry(cls, values) -> Dict[str, bytes]: + """Check that the token has not expired""" + payload: bytes = values["payload"] + content = SignedPubKeyPayload.parse_raw(payload) + + if not is_token_still_valid(content.expires): + msg = "Token expired" + raise ValueError(msg) + + return values + + @root_validator(pre=False, skip_on_failure=True) + def check_signature(cls, values: Dict[str, bytes]) -> Dict[str, bytes]: + """Check that the signature is valid""" + signature: bytes = values["signature"] + payload: bytes = values["payload"] + content = SignedPubKeyPayload.parse_raw(payload) + + if not verify_wallet_signature(signature, payload.hex(), content.address): + msg = "Invalid signature" + raise ValueError(msg) + + return values + + @property + def content(self) -> SignedPubKeyPayload: + """Return the content of the header""" + return SignedPubKeyPayload.parse_raw(self.payload) + + +class SignedOperationPayload(BaseModel): + time: datetime.datetime + method: Union[Literal["POST"], Literal["GET"]] + domain: str + path: str + # body_sha256: str # disabled since there is no body + + @validator("time") + def time_is_current(cls, v: datetime.datetime) -> datetime.datetime: + """Check that the time is current and the payload is not a replay attack.""" + max_past = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta( + minutes=2 + ) + max_future = datetime.datetime.now( + tz=datetime.timezone.utc + ) + datetime.timedelta(minutes=2) + if v < max_past: + raise ValueError("Time is too far in the past") + if v > max_future: + raise ValueError("Time is too far in the future") + return v + + +class SignedOperation(BaseModel): + """This payload is signed by the ephemeral key authorized above.""" + + signature: bytes + payload: bytes + + @validator("signature") + def signature_must_be_hex(cls, value: str) -> bytes: + """Convert the signature from hexadecimal to bytes""" + + try: + if isinstance(value, bytes): + value = value.decode() + return bytes_from_hex(value) + except pydantic.ValidationError as error: + logger.warning(value) + raise error + + @validator("payload") + def payload_must_be_hex(cls, v) -> bytes: + """Convert the payload from hexadecimal to bytes""" + v = bytes.fromhex(v.decode()) + _ = SignedOperationPayload.parse_raw(v) + return v + + @property + def content(self) -> SignedOperationPayload: + """Return the content of the header""" + return SignedOperationPayload.parse_raw(self.payload) + + +def get_signed_pubkey(request: web.Request) -> SignedPubKeyHeader: + """Get the ephemeral public key that is signed by the wallet from the request headers.""" + signed_pubkey_header = request.headers.get("X-SignedPubKey") + + if not signed_pubkey_header: + raise web.HTTPBadRequest(reason="Missing X-SignedPubKey header") + + try: + return SignedPubKeyHeader.parse_raw(signed_pubkey_header) + + except KeyError as error: + logger.debug(f"Missing X-SignedPubKey header: {error}") + raise web.HTTPBadRequest(reason="Invalid X-SignedPubKey fields") from error + + except json.JSONDecodeError as error: + raise web.HTTPBadRequest(reason="Invalid X-SignedPubKey format") from error + + except ValueError as errors: + logging.debug(errors) + + for err in errors.args[0]: + if isinstance(err.exc, json.JSONDecodeError): + raise web.HTTPBadRequest( + reason="Invalid X-SignedPubKey format" + ) from errors + + if str(err.exc) == "Token expired": + raise web.HTTPUnauthorized(reason="Token expired") from errors + + if str(err.exc) == "Invalid signature": + raise web.HTTPUnauthorized(reason="Invalid signature") from errors + else: + raise errors + + +def get_signed_operation(request: web.Request) -> SignedOperation: + """Get the signed operation public key that is signed by the ephemeral key from the request headers.""" + try: + signed_operation = request.headers["X-SignedOperation"] + return SignedOperation.parse_raw(signed_operation) + except KeyError as error: + raise web.HTTPBadRequest(reason="Missing X-SignedOperation header") from error + except json.JSONDecodeError as error: + raise web.HTTPBadRequest(reason="Invalid X-SignedOperation format") from error + except ValidationError as error: + logger.debug(f"Invalid X-SignedOperation fields: {error}") + raise web.HTTPBadRequest(reason="Invalid X-SignedOperation fields") from error + + +def verify_signed_operation( + signed_operation: SignedOperation, signed_pubkey: SignedPubKeyHeader +) -> str: + """Verify that the operation is signed by the ephemeral key authorized by the wallet.""" + pubkey = signed_pubkey.content.json_web_key + + try: + JWA.signing_alg("ES256").verify( + pubkey, signed_operation.payload, signed_operation.signature + ) + logger.debug("Signature verified") + + return signed_pubkey.content.address + + except cryptography.exceptions.InvalidSignature as e: + logger.debug("Failing to validate signature for operation", e) + + raise web.HTTPUnauthorized(reason="Signature could not verified") + + +async def authenticate_jwk( + request: web.Request, domain_name: Optional[str] = DOMAIN_NAME +) -> str: + """Authenticate a request using the X-SignedPubKey and X-SignedOperation headers.""" + signed_pubkey = get_signed_pubkey(request) + signed_operation = get_signed_operation(request) + + if signed_operation.content.domain != domain_name: + logger.debug( + f"Invalid domain '{signed_operation.content.domain}' != '{domain_name}'" + ) + raise web.HTTPUnauthorized(reason="Invalid domain") + + if signed_operation.content.path != request.path: + logger.debug( + f"Invalid path '{signed_operation.content.path}' != '{request.path}'" + ) + raise web.HTTPUnauthorized(reason="Invalid path") + if signed_operation.content.method != request.method: + logger.debug( + f"Invalid method '{signed_operation.content.method}' != '{request.method}'" + ) + raise web.HTTPUnauthorized(reason="Invalid method") + return verify_signed_operation(signed_operation, signed_pubkey) + + +async def authenticate_websocket_message( + message, domain_name: Optional[str] = DOMAIN_NAME +) -> str: + """Authenticate a websocket message since JS cannot configure headers on WebSockets.""" + signed_pubkey = SignedPubKeyHeader.parse_obj(message["X-SignedPubKey"]) + signed_operation = SignedOperation.parse_obj(message["X-SignedOperation"]) + if signed_operation.content.domain != domain_name: + logger.debug( + f"Invalid domain '{signed_pubkey.content.domain}' != '{domain_name}'" + ) + raise web.HTTPUnauthorized(reason="Invalid domain") + return verify_signed_operation(signed_operation, signed_pubkey) + + +def require_jwk_authentication( + handler: Callable[[web.Request, str], Coroutine[Any, Any, web.StreamResponse]] +) -> Callable[[web.Request], Awaitable[web.StreamResponse]]: + @functools.wraps(handler) + async def wrapper(request): + try: + authenticated_sender: str = await authenticate_jwk(request) + except web.HTTPException as e: + return web.json_response(data={"error": e.reason}, status=e.status) + except Exception as e: + # Unexpected make sure to log it + logging.exception(e) + raise + + # authenticated_sender is the authenticted wallet address of the requester (as a string) + response = await handler(request, authenticated_sender) + return response + + return wrapper diff --git a/tests/unit/test_vm_client.py b/tests/unit/test_vm_client.py new file mode 100644 index 00000000..7cc9a2c3 --- /dev/null +++ b/tests/unit/test_vm_client.py @@ -0,0 +1,297 @@ +from urllib.parse import urlparse + +import aiohttp +import pytest +from aiohttp import web +from aioresponses import aioresponses +from aleph_message.models import ItemHash +from yarl import URL + +from aleph.sdk.chains.ethereum import ETHAccount +from aleph.sdk.client.vm_client import VmClient + +from .aleph_vm_authentication import ( + SignedOperation, + SignedPubKeyHeader, + authenticate_jwk, + authenticate_websocket_message, + verify_signed_operation, +) + + +@pytest.mark.asyncio +async def test_notify_allocation(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + with aioresponses() as m: + vm_client = VmClient( + account=account, + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + m.post("http://localhost/control/allocation/notify", status=200) + await vm_client.notify_allocation(vm_id=vm_id) + assert len(m.requests) == 1 + assert ("POST", URL("http://localhost/control/allocation/notify")) in m.requests + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_perform_operation(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + operation = "reboot" + + with aioresponses() as m: + vm_client = VmClient( + account=account, + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + m.post( + f"http://localhost/control/machine/{vm_id}/{operation}", + status=200, + payload="mock_response_text", + ) + + status, response_text = await vm_client.perform_operation(vm_id, operation) + assert status == 200 + assert response_text == '"mock_response_text"' # ' ' cause by aioresponses + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_stop_instance(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + with aioresponses() as m: + vm_client = VmClient( + account=account, + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + m.post( + f"http://localhost/control/machine/{vm_id}/stop", + status=200, + payload="mock_response_text", + ) + + status, response_text = await vm_client.stop_instance(vm_id) + assert status == 200 + assert response_text == '"mock_response_text"' # ' ' cause by aioresponses + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_reboot_instance(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + with aioresponses() as m: + vm_client = VmClient( + account=account, + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + m.post( + f"http://localhost/control/machine/{vm_id}/reboot", + status=200, + payload="mock_response_text", + ) + + status, response_text = await vm_client.reboot_instance(vm_id) + assert status == 200 + assert response_text == '"mock_response_text"' # ' ' cause by aioresponses + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_erase_instance(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + with aioresponses() as m: + vm_client = VmClient( + account=account, + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + m.post( + f"http://localhost/control/machine/{vm_id}/erase", + status=200, + payload="mock_response_text", + ) + + status, response_text = await vm_client.erase_instance(vm_id) + assert status == 200 + assert response_text == '"mock_response_text"' # ' ' cause by aioresponses + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_expire_instance(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + with aioresponses() as m: + vm_client = VmClient( + account=account, + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + m.post( + f"http://localhost/control/machine/{vm_id}/expire", + status=200, + payload="mock_response_text", + ) + + status, response_text = await vm_client.expire_instance(vm_id) + assert status == 200 + assert response_text == '"mock_response_text"' # ' ' cause by aioresponses + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_get_logs(aiohttp_client): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + async def websocket_handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + async for msg in ws: + if msg.type == aiohttp.WSMsgType.TEXT: + await ws.send_str("mock_log_entry") + elif msg.type == aiohttp.WSMsgType.ERROR: + break + + return ws + + app = web.Application() + app.router.add_route( + "GET", "/control/machine/{vm_id}/stream_logs", websocket_handler + ) # Update route to match the URL + + client = await aiohttp_client(app) + + node_url = str(client.make_url("")).rstrip("/") + + vm_client = VmClient( + account=account, + node_url=node_url, + session=client.session, + ) + + logs = [] + async for log in vm_client.get_logs(vm_id): + logs.append(log) + if log == "mock_log_entry": + break + + assert logs == ["mock_log_entry"] + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_authenticate_jwk(aiohttp_client): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + async def test_authenticate_route(request): + address = await authenticate_jwk( + request, domain_name=urlparse(node_url).hostname + ) + assert vm_client.account.get_address() == address + return web.Response(text="ok") + + app = web.Application() + app.router.add_route( + "POST", f"/control/machine/{vm_id}/stop", test_authenticate_route + ) # Update route to match the URL + + client = await aiohttp_client(app) + + node_url = str(client.make_url("")).rstrip("/") + + vm_client = VmClient( + account=account, + node_url=node_url, + session=client.session, + ) + + status_code, response_text = await vm_client.stop_instance(vm_id) + assert status_code == 200, response_text + assert response_text == "ok" + + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_websocket_authentication(aiohttp_client): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + async def websocket_handler(request): + ws = web.WebSocketResponse() + await ws.prepare(request) + + first_message = await ws.receive_json() + credentials = first_message["auth"] + sender_address = await authenticate_websocket_message( + credentials, + domain_name=urlparse(node_url).hostname, + ) + + assert vm_client.account.get_address() == sender_address + await ws.send_str(sender_address) + + return ws + + app = web.Application() + app.router.add_route( + "GET", "/control/machine/{vm_id}/stream_logs", websocket_handler + ) # Update route to match the URL + + client = await aiohttp_client(app) + + node_url = str(client.make_url("")).rstrip("/") + + vm_client = VmClient( + account=account, + node_url=node_url, + session=client.session, + ) + + valid = False + + async for address in vm_client.get_logs(vm_id): + assert address == vm_client.account.get_address() + valid = True + + # this is done to ensure that the ws as runned at least once and avoid + # having silent errors + assert valid + + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_vm_client_generate_correct_authentication_headers(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + + vm_client = VmClient( + account=account, + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + + path, headers = await vm_client._generate_header(vm_id, "reboot", method="post") + signed_pubkey = SignedPubKeyHeader.parse_raw(headers["X-SignedPubKey"]) + signed_operation = SignedOperation.parse_raw(headers["X-SignedOperation"]) + address = verify_signed_operation(signed_operation, signed_pubkey) + + assert vm_client.account.get_address() == address diff --git a/tests/unit/test_vm_confidential_client.py b/tests/unit/test_vm_confidential_client.py new file mode 100644 index 00000000..832871ff --- /dev/null +++ b/tests/unit/test_vm_confidential_client.py @@ -0,0 +1,216 @@ +import tempfile +from pathlib import Path +from unittest import mock +from unittest.mock import patch + +import aiohttp +import pytest +from aioresponses import aioresponses +from aleph_message.models import ItemHash + +from aleph.sdk.chains.ethereum import ETHAccount +from aleph.sdk.client.vm_confidential_client import VmConfidentialClient + + +@pytest.mark.asyncio +async def test_perform_confidential_operation(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + operation = "confidential/test" + + with aioresponses() as m: + vm_client = VmConfidentialClient( + account=account, + sevctl_path=Path("/"), + node_url="http://localhost", + session=aiohttp.ClientSession(), + ) + m.post( + f"http://localhost/control/machine/{vm_id}/{operation}", + status=200, + payload="mock_response_text", + ) + + response_text = await vm_client.perform_confidential_operation(vm_id, operation) + assert response_text == '"mock_response_text"' # ' ' cause by aioresponses + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_confidential_initialize_instance(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + operation = "confidential/initialize" + node_url = "http://localhost" + url = f"{node_url}/control/machine/{vm_id}/{operation}" + headers = { + "X-SignedPubKey": "test_pubkey_token", + "X-SignedOperation": "test_operation_token", + } + + with tempfile.NamedTemporaryFile() as tmp_file: + tmp_file_bytes = Path(tmp_file.name).read_bytes() + with aioresponses() as m: + with patch( + "aleph.sdk.client.vm_confidential_client.VmConfidentialClient._generate_header", + return_value=(url, headers), + ): + vm_client = VmConfidentialClient( + account=account, + sevctl_path=Path("/"), + node_url=node_url, + session=aiohttp.ClientSession(), + ) + m.post( + url, + status=200, + payload="mock_response_text", + ) + tmp_file_path = Path(tmp_file.name) + response_text = await vm_client.initialize( + vm_id, session=tmp_file_path, godh=tmp_file_path + ) + assert ( + response_text == '"mock_response_text"' + ) # ' ' cause by aioresponses + m.assert_called_once_with( + url, + method="POST", + data={ + "session": tmp_file_bytes, + "godh": tmp_file_bytes, + }, + json=None, + headers=headers, + ) + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_confidential_measurement_instance(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + operation = "confidential/measurement" + node_url = "http://localhost" + url = f"{node_url}/control/machine/{vm_id}/{operation}" + headers = { + "X-SignedPubKey": "test_pubkey_token", + "X-SignedOperation": "test_operation_token", + } + + with aioresponses() as m: + with patch( + "aleph.sdk.client.vm_confidential_client.VmConfidentialClient._generate_header", + return_value=(url, headers), + ): + vm_client = VmConfidentialClient( + account=account, + sevctl_path=Path("/"), + node_url=node_url, + session=aiohttp.ClientSession(), + ) + m.get( + url, + status=200, + payload=dict( + { + "sev_info": { + "enabled": True, + "api_major": 0, + "api_minor": 0, + "build_id": 0, + "policy": 0, + "state": "", + "handle": 0, + }, + "launch_measure": "test_measure", + } + ), + ) + measurement = await vm_client.measurement(vm_id) + assert measurement.launch_measure == "test_measure" + m.assert_called_once_with( + url, + method="GET", + headers=headers, + ) + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_confidential_inject_secret_instance(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + operation = "confidential/inject_secret" + node_url = "http://localhost" + url = f"{node_url}/control/machine/{vm_id}/{operation}" + headers = { + "X-SignedPubKey": "test_pubkey_token", + "X-SignedOperation": "test_operation_token", + } + test_secret = "test_secret" + packet_header = "test_packet_header" + + with aioresponses() as m: + with patch( + "aleph.sdk.client.vm_confidential_client.VmConfidentialClient._generate_header", + return_value=(url, headers), + ): + vm_client = VmConfidentialClient( + account=account, + sevctl_path=Path("/"), + node_url=node_url, + session=aiohttp.ClientSession(), + ) + m.post( + url, + status=200, + payload="mock_response_text", + ) + response_text = await vm_client.inject_secret( + vm_id, secret=test_secret, packet_header=packet_header + ) + assert response_text == "mock_response_text" + m.assert_called_once_with( + url, + method="POST", + json={ + "secret": test_secret, + "packet_header": packet_header, + }, + headers=headers, + ) + await vm_client.session.close() + + +@pytest.mark.asyncio +async def test_create_session_command(): + account = ETHAccount(private_key=b"0x" + b"1" * 30) + vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") + node_url = "http://localhost" + sevctl_path = Path("/usr/bin/sevctl") + certificates_path = Path("/") + policy = 1 + + with mock.patch( + "aleph.sdk.client.vm_confidential_client.run_in_subprocess", + return_value=True, + ) as export_mock: + vm_client = VmConfidentialClient( + account=account, + sevctl_path=sevctl_path, + node_url=node_url, + session=aiohttp.ClientSession(), + ) + _ = await vm_client.create_session(vm_id, certificates_path, policy) + export_mock.assert_called_once_with( + [ + str(sevctl_path), + "session", + "--name", + str(vm_id), + str(certificates_path), + str(policy), + ], + check=True, + ) From 59cc2479b1112a89d6e583f4ce333986be259b5a Mon Sep 17 00:00:00 2001 From: Olivier Le Thanh Duong Date: Fri, 5 Jul 2024 14:11:44 +0200 Subject: [PATCH 037/122] Confidential: Provide method to calculate firmware hash (#140) Which is required to calculate and verify the measurement Also add test and test for compute_confidential_measure --- src/aleph/sdk/utils.py | 15 ++++++++++ tests/unit/test_utils.py | 63 +++++++++++++++++++++++++++++++++++++++- 2 files changed, 77 insertions(+), 1 deletion(-) diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index 5c641d5c..116c7b42 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -268,6 +268,21 @@ def get_vm_measure(sev_data: SEVMeasurement) -> Tuple[bytes, bytes]: return vm_measure, nonce +def calculate_firmware_hash(firmware_path: Path) -> str: + """Calculate the hash of the firmware (OVMF) file to be used in validating the measurements + + Returned as hex encoded string""" + + # https://www.qemu.org/docs/master/system/i386/amd-memory-encryption.html + # The value of GCTX.LD is SHA256(firmware_blob || kernel_hashes_blob || vmsas_blob), where: + # firmware_blob is the content of the entire firmware flash file (for example, OVMF.fd). [...] + # and verified again sevctl, see tests + firmware_content = firmware_path.read_bytes() + hash_calculator = hashlib.sha256(firmware_content) + + return hash_calculator.hexdigest() + + def compute_confidential_measure( sev_info: SEVInfo, tik: bytes, expected_hash: str, nonce: bytes ) -> hmac.HMAC: diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 85f274e6..bfca23a5 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -1,3 +1,4 @@ +import base64 import datetime import pytest as pytest @@ -18,7 +19,14 @@ PersistentVolume, ) -from aleph.sdk.utils import enum_as_str, get_message_type_value, parse_volume +from aleph.sdk.types import SEVInfo +from aleph.sdk.utils import ( + calculate_firmware_hash, + compute_confidential_measure, + enum_as_str, + get_message_type_value, + parse_volume, +) def test_get_message_type_value(): @@ -174,3 +182,56 @@ def test_parse_persistent_volume(): volume = parse_volume(volume) assert volume assert isinstance(volume, PersistentVolume) + + +def test_calculate_firmware_hash(mocker): + mock_path = mocker.Mock( + read_bytes=mocker.Mock(return_value=b"abc"), + ) + + assert ( + calculate_firmware_hash(mock_path) + == "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad" + ) + + +def test_compute_confidential_measure(): + """Verify that we properly calculate the measurement we use agains the server + + Validated against the sevctl command: + $ RUST_LOG=trace sevctl measurement build --api-major 01 --api-minor 55 --build-id 24 --policy 1 + --tik ~/pycharm-aleph-sdk-python/decadecadecadecadecadecadecadecadecadecadecadecadecadecadecadeca_tik.bin + --firmware /usr/share/ovmf/OVMF.fd --nonce URQNqJAqh/2ep4drjx/XvA + + [2024-07-05T11:19:06Z DEBUG sevctl::measurement] firmware + table len=4194304 sha256: d06471f485c0a61aba5a431ec136b947be56907acf6ed96afb11788ae4525aeb + [2024-07-05T11:19:06Z DEBUG sevctl::measurement] --tik base64: npOTEc4mtRGfXfB+G6EBdw== + [2024-07-05T11:19:06Z DEBUG sevctl::measurement] --nonce base64: URQNqJAqh/2ep4drjx/XvA== + [2024-07-05T11:19:06Z DEBUG sevctl::measurement] Raw measurement: BAE3GAEAAADQZHH0hcCmGrpaQx7BNrlHvlaQes9u2Wr7EXiK5FJa61EUDaiQKof9nqeHa48f17w= + [2024-07-05T11:19:06Z DEBUG sevctl::measurement] Signed measurement: ls2jv10V3HVShVI/RHCo/a43WO0soLZf0huU9ZZstIw= + [2024-07-05T11:19:06Z DEBUG sevctl::measurement] Measurement + nonce: ls2jv10V3HVShVI/RHCo/a43WO0soLZf0huU9ZZstIxRFA2okCqH/Z6nh2uPH9e8 + """ + + tik = bytes.fromhex("9e939311ce26b5119f5df07e1ba10177") + assert base64.b64encode(tik) == b"npOTEc4mtRGfXfB+G6EBdw==" + expected_hash = "d06471f485c0a61aba5a431ec136b947be56907acf6ed96afb11788ae4525aeb" + nonce = base64.b64decode("URQNqJAqh/2ep4drjx/XvA==") + sev_info = SEVInfo.parse_obj( + { + "enabled": True, + "api_major": 1, + "api_minor": 55, + "build_id": 24, + "policy": 1, + "state": "running", + "handle": 1, + } + ) + + assert ( + base64.b64encode( + compute_confidential_measure( + sev_info, tik, expected_hash, nonce=nonce + ).digest() + ) + == b"ls2jv10V3HVShVI/RHCo/a43WO0soLZf0huU9ZZstIw=" + ) From be5a69742768dfde7a476b7ba5609f8539804187 Mon Sep 17 00:00:00 2001 From: nesitor Date: Fri, 5 Jul 2024 15:36:15 +0200 Subject: [PATCH 038/122] Add new Confidential fields (#137) * Problem: As a user we cannot create a confidential VM using the SDK. Solution: Implement new confidential VM fields. * Fix: Solve code quality issues. * Fix: Changed `create_instance` method signature to be similar to aleph-message schema. --- src/aleph/sdk/client/abstract.py | 11 ++++++- src/aleph/sdk/client/authenticated_http.py | 6 ++++ tests/unit/test_asynchronous.py | 37 +++++++++++++++++++++- 3 files changed, 52 insertions(+), 2 deletions(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 9fce5469..23c30e81 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -27,7 +27,11 @@ PostMessage, parse_message, ) -from aleph_message.models.execution.environment import HypervisorType +from aleph_message.models.execution.environment import ( + HostRequirements, + HypervisorType, + TrustedExecutionEnvironment, +) from aleph_message.models.execution.program import Encoding from aleph_message.status import MessageStatus @@ -395,10 +399,12 @@ async def create_instance( internet: bool = True, aleph_api: bool = True, hypervisor: Optional[HypervisorType] = None, + trusted_execution: Optional[TrustedExecutionEnvironment] = None, volumes: Optional[List[Mapping]] = None, volume_persistence: str = "host", ssh_keys: Optional[List[str]] = None, metadata: Optional[Mapping[str, Any]] = None, + requirements: Optional[HostRequirements] = None, ) -> Tuple[AlephMessage, MessageStatus]: """ Post a (create) INSTANCE message. @@ -417,11 +423,14 @@ async def create_instance( :param allow_amend: Whether the deployed VM image may be changed (Default: False) :param internet: Whether the VM should have internet connectivity. (Default: True) :param aleph_api: Whether the VM needs access to Aleph messages API (Default: True) + :param hypervisor: Whether the VM should use as Hypervisor, like QEmu or Firecracker (Default: Qemu) + :param trusted_execution: Whether the VM configuration (firmware and policy) to use for Confidential computing (Default: None) :param encoding: Encoding to use (Default: Encoding.zip) :param volumes: Volumes to mount :param volume_persistence: Where volumes are persisted, can be "host" or "store", meaning distributed across Aleph.im (Default: "host") :param ssh_keys: SSH keys to authorize access to the VM :param metadata: Metadata to attach to the message + :param requirements: CRN Requirements needed for the VM execution """ raise NotImplementedError( "Did you mean to import `AuthenticatedAlephHttpClient`?" diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 6d44b526..cb2f3e01 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -29,9 +29,11 @@ from aleph_message.models.execution.base import Encoding, Payment, PaymentType from aleph_message.models.execution.environment import ( FunctionEnvironment, + HostRequirements, HypervisorType, InstanceEnvironment, MachineResources, + TrustedExecutionEnvironment, ) from aleph_message.models.execution.instance import RootfsVolume from aleph_message.models.execution.program import CodeContent, FunctionRuntime @@ -522,10 +524,12 @@ async def create_instance( internet: bool = True, aleph_api: bool = True, hypervisor: Optional[HypervisorType] = None, + trusted_execution: Optional[TrustedExecutionEnvironment] = None, volumes: Optional[List[Mapping]] = None, volume_persistence: str = "host", ssh_keys: Optional[List[str]] = None, metadata: Optional[Mapping[str, Any]] = None, + requirements: Optional[HostRequirements] = None, ) -> Tuple[InstanceMessage, MessageStatus]: address = address or settings.ADDRESS_TO_USE or self.account.get_address() @@ -546,6 +550,7 @@ async def create_instance( internet=internet, aleph_api=aleph_api, hypervisor=selected_hypervisor, + trusted_execution=trusted_execution, ), variables=environment_variables, resources=MachineResources( @@ -563,6 +568,7 @@ async def create_instance( use_latest=True, ), volumes=[parse_volume(volume) for volume in volumes], + requirements=requirements, time=time.time(), authorized_keys=ssh_keys, metadata=metadata, diff --git a/tests/unit/test_asynchronous.py b/tests/unit/test_asynchronous.py index 0f909408..b044e170 100644 --- a/tests/unit/test_asynchronous.py +++ b/tests/unit/test_asynchronous.py @@ -14,7 +14,13 @@ ProgramMessage, StoreMessage, ) -from aleph_message.models.execution.environment import HypervisorType, MachineResources +from aleph_message.models.execution.environment import ( + HostRequirements, + HypervisorType, + MachineResources, + NodeRequirements, + TrustedExecutionEnvironment, +) from aleph_message.status import MessageStatus from aleph.sdk.exceptions import InsufficientFundsError @@ -163,6 +169,35 @@ async def test_create_instance_no_hypervisor(mock_session_with_post_success): assert isinstance(instance_message, InstanceMessage) +@pytest.mark.asyncio +async def test_create_confidential_instance(mock_session_with_post_success): + async with mock_session_with_post_success as session: + confidential_instance_message, message_status = await session.create_instance( + rootfs="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", + rootfs_size=1, + channel="TEST", + metadata={"tags": ["test"]}, + payment=Payment( + chain=Chain.AVAX, + receiver="0x4145f182EF2F06b45E50468519C1B92C60FBd4A0", + type=PaymentType.superfluid, + ), + hypervisor=HypervisorType.qemu, + trusted_execution=TrustedExecutionEnvironment( + firmware="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", + policy=0b1, + ), + requirements=HostRequirements( + node=NodeRequirements( + node_hash="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", + ) + ), + ) + + assert mock_session_with_post_success.http_session.post.assert_called_once + assert isinstance(confidential_instance_message, InstanceMessage) + + @pytest.mark.asyncio async def test_forget(mock_session_with_post_success): async with mock_session_with_post_success as session: From 6c5aa1638a281ac2b3a4fbb46a0701e32875117a Mon Sep 17 00:00:00 2001 From: Olivier Le Thanh Duong Date: Mon, 8 Jul 2024 14:54:29 +0200 Subject: [PATCH 039/122] fix typo --- src/aleph/sdk/client/vm_confidential_client.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/aleph/sdk/client/vm_confidential_client.py b/src/aleph/sdk/client/vm_confidential_client.py index a100de8c..4a037d45 100644 --- a/src/aleph/sdk/client/vm_confidential_client.py +++ b/src/aleph/sdk/client/vm_confidential_client.py @@ -105,8 +105,8 @@ async def measurement(self, vm_id: ItemHash) -> SEVMeasurement: status, text = await self.perform_operation( vm_id, "confidential/measurement", method="GET" ) - sev_mesurement = SEVMeasurement.parse_raw(text) - return sev_mesurement + sev_measurement = SEVMeasurement.parse_raw(text) + return sev_measurement async def validate_measure( self, sev_data: SEVMeasurement, tik_path: Path, firmware_hash: str From 24c3e317519a89c0ffb31f3f1780cf238710e7df Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Tue, 9 Jul 2024 12:20:52 +0200 Subject: [PATCH 040/122] Fix: Bump aleph-message version to 0.4.8 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 918462a9..fbe94434 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,7 +23,7 @@ classifiers = [ ] dependencies = [ "aiohttp>=3.8.3", - "aleph-message>=0.4.7", + "aleph-message>=0.4.8", "coincurve; python_version<\"3.11\"", "coincurve>=19.0.0; python_version>=\"3.11\"", "eth_abi>=4.0.0; python_version>=\"3.11\"", From 598ab0e95f19866e62c019d58d0ea297b210e97f Mon Sep 17 00:00:00 2001 From: philogicae Date: Wed, 14 Aug 2024 12:32:11 +0300 Subject: [PATCH 041/122] Fix to ensure no traling slash in crn node url --- src/aleph/sdk/client/vm_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/sdk/client/vm_client.py b/src/aleph/sdk/client/vm_client.py index 4092851d..5ab22a0b 100644 --- a/src/aleph/sdk/client/vm_client.py +++ b/src/aleph/sdk/client/vm_client.py @@ -35,7 +35,7 @@ def __init__( ): self.account = account self.ephemeral_key = jwk.JWK.generate(kty="EC", crv="P-256") - self.node_url = node_url + self.node_url = node_url.rstrip('/') self.pubkey_payload = self._generate_pubkey_payload() self.pubkey_signature_header = "" self.session = session or aiohttp.ClientSession() From b1e33d0743cda99a9603f7926cd8724a668601ed Mon Sep 17 00:00:00 2001 From: philogicae Date: Wed, 14 Aug 2024 16:09:53 +0300 Subject: [PATCH 042/122] run black --- src/aleph/sdk/client/vm_client.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/sdk/client/vm_client.py b/src/aleph/sdk/client/vm_client.py index 5ab22a0b..18d280cc 100644 --- a/src/aleph/sdk/client/vm_client.py +++ b/src/aleph/sdk/client/vm_client.py @@ -35,7 +35,7 @@ def __init__( ): self.account = account self.ephemeral_key = jwk.JWK.generate(kty="EC", crv="P-256") - self.node_url = node_url.rstrip('/') + self.node_url = node_url.rstrip("/") self.pubkey_payload = self._generate_pubkey_payload() self.pubkey_signature_header = "" self.session = session or aiohttp.ClientSession() From ef1f77d0bcb418d104f0fbed5d8deb90ee99e52d Mon Sep 17 00:00:00 2001 From: Bram Date: Wed, 14 Aug 2024 15:59:57 +0200 Subject: [PATCH 043/122] ci: test jobs used to be call 'build' while they are tests jobs (#147) --- .github/workflows/pytest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 8d5456c6..b4fecc57 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -12,7 +12,7 @@ on: - cron: '4 0 * * *' jobs: - build: + tests: strategy: fail-fast: false matrix: From d2f56b024cd3cff5d725293e786804b61ff791f2 Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Wed, 14 Aug 2024 15:35:54 +0200 Subject: [PATCH 044/122] fix: aiohttp.ClientSession needs to be created inside an async function --- src/aleph/sdk/client/http.py | 59 ++++++++++++++++++++++-------------- tests/unit/conftest.py | 6 ++-- 2 files changed, 40 insertions(+), 25 deletions(-) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index ae98b0d1..8f3b7f90 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -30,7 +30,7 @@ class AlephHttpClient(AlephClient): api_server: str - http_session: aiohttp.ClientSession + _http_session: Optional[aiohttp.ClientSession] def __init__( self, @@ -48,35 +48,50 @@ def __init__( if not self.api_server: raise ValueError("Missing API host") - connector: Union[aiohttp.BaseConnector, None] + self.connector: Union[aiohttp.BaseConnector, None] unix_socket_path = api_unix_socket or settings.API_UNIX_SOCKET + if ssl_context: - connector = aiohttp.TCPConnector(ssl=ssl_context) + self.connector = aiohttp.TCPConnector(ssl=ssl_context) elif unix_socket_path and allow_unix_sockets: check_unix_socket_valid(unix_socket_path) - connector = aiohttp.UnixConnector(path=unix_socket_path) + self.connector = aiohttp.UnixConnector(path=unix_socket_path) else: - connector = None - - # ClientSession timeout defaults to a private sentinel object and may not be None. - self.http_session = ( - aiohttp.ClientSession( - base_url=self.api_server, - connector=connector, - timeout=timeout, - json_serialize=extended_json_encoder, - ) - if timeout - else aiohttp.ClientSession( - base_url=self.api_server, - connector=connector, - json_serialize=lambda obj: json.dumps( - obj, default=extended_json_encoder - ), + self.connector = None + + self.timeout = timeout + self._http_session = None + + @property + def http_session(self) -> aiohttp.ClientSession: + if self._http_session is None: + raise Exception( + f"{self.__class__.__name__} can only be using within an async context manager.\n\n" + "Please use it this way:\n\n" + " async with {self.__class__.__name__}(...) as client:" ) - ) + + return self._http_session async def __aenter__(self) -> "AlephHttpClient": + if self._http_session is None: + self._http_session = ( + aiohttp.ClientSession( + base_url=self.api_server, + connector=self.connector, + timeout=self.timeout, + json_serialize=extended_json_encoder, + ) + if self.timeout + else aiohttp.ClientSession( + base_url=self.api_server, + connector=self.connector, + json_serialize=lambda obj: json.dumps( + obj, default=extended_json_encoder + ), + ) + ) + return self async def __aexit__(self, exc_type, exc_val, exc_tb): diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 3c5c1fe8..3b60873e 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -193,7 +193,7 @@ def mock_session_with_post_success( client = AuthenticatedAlephHttpClient( account=ethereum_account, api_server="http://localhost" ) - client.http_session = http_session + client._http_session = http_session return client @@ -254,7 +254,7 @@ def get(self, *_args, **_kwargs): http_session = MockHttpSession() client = AlephHttpClient(api_server="http://localhost") - client.http_session = http_session + client._http_session = http_session return client @@ -281,6 +281,6 @@ def post(self, *_args, **_kwargs): client = AuthenticatedAlephHttpClient( account=ethereum_account, api_server="http://localhost" ) - client.http_session = http_session + client._http_session = http_session return client From 0de453dfc0870e3b5124a7cf3bbeaa5699ef2681 Mon Sep 17 00:00:00 2001 From: Olivier Le Thanh Duong Date: Wed, 14 Aug 2024 17:33:49 +0200 Subject: [PATCH 045/122] Fix 146: Autenticated client was overriding the manager and it was causing issue (#149) * Fix 146: Autenticated client was overriding the manager and it was causing issue * fix(client.http): add missing f to format string * Typing issue with hardcoded class --------- Co-authored-by: Laurent Peuch --- src/aleph/sdk/client/authenticated_http.py | 3 --- src/aleph/sdk/client/http.py | 8 +++++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index cb2f3e01..f84b97ca 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -89,9 +89,6 @@ def __init__( ) self.account = account - async def __aenter__(self) -> "AuthenticatedAlephHttpClient": - return self - async def ipfs_push(self, content: Mapping) -> str: """ Push arbitrary content as JSON to the IPFS service. diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 8f3b7f90..9dfb588a 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -68,12 +68,12 @@ def http_session(self) -> aiohttp.ClientSession: raise Exception( f"{self.__class__.__name__} can only be using within an async context manager.\n\n" "Please use it this way:\n\n" - " async with {self.__class__.__name__}(...) as client:" + f" async with {self.__class__.__name__}(...) as client:" ) return self._http_session - async def __aenter__(self) -> "AlephHttpClient": + async def __aenter__(self): if self._http_session is None: self._http_session = ( aiohttp.ClientSession( @@ -95,7 +95,9 @@ async def __aenter__(self) -> "AlephHttpClient": return self async def __aexit__(self, exc_type, exc_val, exc_tb): - await self.http_session.close() + # Avoid cascade in error handling + if self._http_session is not None: + await self._http_session.close() async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: params: Dict[str, Any] = {"keys": key} From 2173274316f1c7596037f5a548d27d5cfd8bd9ef Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Fri, 16 Aug 2024 11:28:41 +0200 Subject: [PATCH 046/122] Feature: get_program_price (#143) * Feature: get_program_price functions * Fix: add also to abstract AlehpClient * Fix: black issue * Fix: add superfuild to pyproject.toml * Revert "Fix: add superfuild to pyproject.toml" This reverts commit d206c0c8be6c963b50af37f4e5379766e1d7be48. * Fix: isort issue * Fix: type * Fix: unit test * Fix: style issue * Update src/aleph/sdk/client/http.py Co-authored-by: Hugo Herter --------- Co-authored-by: Hugo Herter --- src/aleph/sdk/client/abstract.py | 15 ++++++++++++++- src/aleph/sdk/client/http.py | 23 ++++++++++++++++++++-- src/aleph/sdk/exceptions.py | 6 ++++++ src/aleph/sdk/query/responses.py | 7 +++++++ tests/unit/conftest.py | 20 +++++++++++++++++++ tests/unit/test_price.py | 33 ++++++++++++++++++++++++++++++++ 6 files changed, 101 insertions(+), 3 deletions(-) create mode 100644 tests/unit/test_price.py diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 23c30e81..5f1bd942 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -7,6 +7,7 @@ from typing import ( Any, AsyncIterable, + Coroutine, Dict, Iterable, List, @@ -40,7 +41,7 @@ from aleph.sdk.utils import extended_json_encoder from ..query.filters import MessageFilter, PostFilter -from ..query.responses import PostsResponse +from ..query.responses import PostsResponse, PriceResponse from ..types import GenericMessage, StorageEnum from ..utils import Writable, compute_sha256 @@ -241,6 +242,18 @@ def watch_messages( """ raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + @abstractmethod + def get_program_price( + self, + item_hash: str, + ) -> Coroutine[Any, Any, PriceResponse]: + """ + Get Program message Price + + :param item_hash: item_hash of executable message + """ + raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + class AuthenticatedAlephClient(AlephClient): account: Account diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 9dfb588a..7e8755b5 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -12,9 +12,14 @@ from pydantic import ValidationError from ..conf import settings -from ..exceptions import FileTooLarge, ForgottenMessageError, MessageNotFoundError +from ..exceptions import ( + FileTooLarge, + ForgottenMessageError, + InvalidHashError, + MessageNotFoundError, +) from ..query.filters import MessageFilter, PostFilter -from ..query.responses import MessagesResponse, Post, PostsResponse +from ..query.responses import MessagesResponse, Post, PostsResponse, PriceResponse from ..types import GenericMessage from ..utils import ( Writable, @@ -409,3 +414,17 @@ async def watch_messages( yield parse_message(data) elif msg.type == aiohttp.WSMsgType.ERROR: break + + async def get_program_price(self, item_hash: str) -> PriceResponse: + async with self.http_session.get(f"/api/v0/price/{item_hash}") as resp: + try: + resp.raise_for_status() + response_json = await resp.json() + return PriceResponse( + required_tokens=response_json["required_tokens"], + payment_type=response_json["payment_type"], + ) + except aiohttp.ClientResponseError as e: + if e.status == 400: + raise InvalidHashError(f"Bad request or no such hash {item_hash}") + raise e diff --git a/src/aleph/sdk/exceptions.py b/src/aleph/sdk/exceptions.py index 39972f7f..a538a31c 100644 --- a/src/aleph/sdk/exceptions.py +++ b/src/aleph/sdk/exceptions.py @@ -78,3 +78,9 @@ def __init__(self, required_funds: float, available_funds: float): super().__init__( f"Insufficient funds: required {required_funds}, available {available_funds}" ) + + +class InvalidHashError(QueryError): + """The Hash is not valid""" + + pass diff --git a/src/aleph/sdk/query/responses.py b/src/aleph/sdk/query/responses.py index 5fb91804..4b598f12 100644 --- a/src/aleph/sdk/query/responses.py +++ b/src/aleph/sdk/query/responses.py @@ -72,3 +72,10 @@ class MessagesResponse(PaginationResponse): messages: List[AlephMessage] pagination_item = "messages" + + +class PriceResponse(BaseModel): + """Response from an aleph.im node API on the path /api/v0/price/{item_hash}""" + + required_tokens: float + payment_type: str diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 3b60873e..4205230b 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -8,6 +8,7 @@ from unittest.mock import AsyncMock, MagicMock import pytest as pytest +from aiohttp import ClientResponseError from aleph_message.models import AggregateMessage, AlephMessage, PostMessage import aleph.sdk.chains.ethereum as ethereum @@ -230,6 +231,10 @@ class CustomMockResponse(MockResponse): async def json(self): return resp + def raise_for_status(self): + if status >= 400: + raise ClientResponseError(None, None, status=status) + @property def status(self): return status @@ -259,6 +264,21 @@ def get(self, *_args, **_kwargs): return client +def make_mock_get_session_400( + get_return_value: Union[Dict[str, Any], bytes] +) -> AlephHttpClient: + class MockHttpSession(AsyncMock): + def get(self, *_args, **_kwargs): + return make_custom_mock_response(get_return_value, 400) + + http_session = MockHttpSession() + + client = AlephHttpClient(api_server="http://localhost") + client.http_session = http_session + + return client + + @pytest.fixture def mock_session_with_rejected_message( ethereum_account, rejected_message diff --git a/tests/unit/test_price.py b/tests/unit/test_price.py new file mode 100644 index 00000000..bed9304a --- /dev/null +++ b/tests/unit/test_price.py @@ -0,0 +1,33 @@ +import pytest + +from aleph.sdk.exceptions import InvalidHashError +from aleph.sdk.query.responses import PriceResponse +from tests.unit.conftest import make_mock_get_session, make_mock_get_session_400 + + +@pytest.mark.asyncio +async def test_get_program_price_valid(): + """ + Test that the get_program_price method returns the correct PriceResponse + when given a valid item hash. + """ + expected_response = { + "required_tokens": 3.0555555555555556e-06, + "payment_type": "superfluid", + } + mock_session = make_mock_get_session(expected_response) + async with mock_session: + response = await mock_session.get_program_price("cacacacacacaca") + assert response == PriceResponse(**expected_response) + + +@pytest.mark.asyncio +async def test_get_program_price_invalid(): + """ + Test that the get_program_price method raises an InvalidHashError + when given an invalid item hash. + """ + mock_session = make_mock_get_session_400({"error": "Invalid hash"}) + async with mock_session: + with pytest.raises(InvalidHashError): + await mock_session.get_program_price("invalid_item_hash") From 04622be3a911c9ec8f3d2dbb01a046aa79fe8a07 Mon Sep 17 00:00:00 2001 From: 1yam Date: Fri, 16 Aug 2024 11:37:37 +0200 Subject: [PATCH 047/122] Fix: Mocking attribute `_http_session` failed Due to renaming in d2f56b024cd3cff5d725293e786804b61ff791f2 --- tests/unit/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 4205230b..c1c56fcd 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -274,7 +274,7 @@ def get(self, *_args, **_kwargs): http_session = MockHttpSession() client = AlephHttpClient(api_server="http://localhost") - client.http_session = http_session + client._http_session = http_session return client From fcb3730979bd289cbbc8139ef6c524340da1389c Mon Sep 17 00:00:00 2001 From: 1yam Date: Fri, 9 Aug 2024 11:08:41 +0200 Subject: [PATCH 048/122] Feature: Could not creating Superfluid flows from sdk Solution: Install and import superfluid.py from PyPI. Add helper methods on EthAccount --- pyproject.toml | 5 +- src/aleph/sdk/chains/ethereum.py | 98 +++++++++++++++++- src/aleph/sdk/conf.py | 4 + src/aleph/sdk/connectors/superfluid.py | 124 +++++++++++++++++++++++ tests/unit/test_superfluid.py | 133 +++++++++++++++++++++++++ 5 files changed, 361 insertions(+), 3 deletions(-) create mode 100644 src/aleph/sdk/connectors/superfluid.py create mode 100644 tests/unit/test_superfluid.py diff --git a/pyproject.toml b/pyproject.toml index fbe94434..4c41904a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,10 @@ dependencies = [ "jwcrypto==1.5.6", "python-magic", "typing_extensions", - "aioresponses>=0.7.6" + "aioresponses>=0.7.6", + "superfluid~=0.2.1", + "eth_typing==4.3.1", + ] [project.optional-dependencies] diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index b0fa5fbe..294f47da 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -1,24 +1,78 @@ +from decimal import Decimal from pathlib import Path -from typing import Optional, Union +from typing import Awaitable, Dict, Optional, Set, Union +from aleph_message.models import Chain from eth_account import Account from eth_account.messages import encode_defunct from eth_account.signers.local import LocalAccount from eth_keys.exceptions import BadSignature as EthBadSignatureError +from superfluid import Web3FlowInfo +from ..conf import settings +from ..connectors.superfluid import Superfluid from ..exceptions import BadSignatureError from ..utils import bytes_from_hex from .common import BaseAccount, get_fallback_private_key, get_public_key +CHAINS_WITH_SUPERTOKEN: Set[Chain] = {Chain.AVAX} +CHAIN_IDS: Dict[Chain, int] = { + Chain.AVAX: settings.AVAX_CHAIN_ID, +} + + +def get_rpc_for_chain(chain: Chain): + """Returns the RPC to use for a given Ethereum based blockchain""" + if not chain: + return None + + if chain == Chain.AVAX: + return settings.AVAX_RPC + else: + raise ValueError(f"Unknown RPC for chain {chain}") + + +def get_chain_id_for_chain(chain: Chain): + """Returns the chain ID of a given Ethereum based blockchain""" + if not chain: + return None + + if chain in CHAIN_IDS: + return CHAIN_IDS[chain] + else: + raise ValueError(f"Unknown RPC for chain {chain}") + class ETHAccount(BaseAccount): + """Interact with an Ethereum address or key pair""" + CHAIN = "ETH" CURVE = "secp256k1" _account: LocalAccount + chain: Optional[Chain] + superfluid_connector: Optional[Superfluid] - def __init__(self, private_key: bytes): + def __init__( + self, + private_key: bytes, + chain: Optional[Chain] = None, + rpc: Optional[str] = None, + chain_id: Optional[int] = None, + ): self.private_key = private_key self._account = Account.from_key(self.private_key) + self.chain = chain + rpc = rpc or get_rpc_for_chain(chain) + chain_id = chain_id or get_chain_id_for_chain(chain) + self.superfluid_connector = ( + Superfluid( + rpc=rpc, + chain_id=chain_id, + account=self._account, + ) + if chain in CHAINS_WITH_SUPERTOKEN + else None + ) async def sign_raw(self, buffer: bytes) -> bytes: """Sign a raw buffer.""" @@ -37,6 +91,46 @@ def from_mnemonic(mnemonic: str) -> "ETHAccount": Account.enable_unaudited_hdwallet_features() return ETHAccount(private_key=Account.from_mnemonic(mnemonic=mnemonic).key) + def create_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: + """Creat a Superfluid flow between this account and the receiver address.""" + if not self.superfluid_connector: + raise ValueError("Superfluid connector is required to create a flow") + return self.superfluid_connector.create_flow( + sender=self.get_address(), receiver=receiver, flow=flow + ) + + def get_flow(self, receiver: str) -> Awaitable[Web3FlowInfo]: + """Get the Superfluid flow between this account and the receiver address.""" + if not self.superfluid_connector: + raise ValueError("Superfluid connector is required to get a flow") + return self.superfluid_connector.get_flow( + sender=self.get_address(), receiver=receiver + ) + + def update_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: + """Update the Superfluid flow between this account and the receiver address.""" + if not self.superfluid_connector: + raise ValueError("Superfluid connector is required to update a flow") + return self.superfluid_connector.update_flow( + sender=self.get_address(), receiver=receiver, flow=flow + ) + + def delete_flow(self, receiver: str) -> Awaitable[str]: + """Delete the Superfluid flow between this account and the receiver address.""" + if not self.superfluid_connector: + raise ValueError("Superfluid connector is required to delete a flow") + return self.superfluid_connector.delete_flow( + sender=self.get_address(), receiver=receiver + ) + + def update_superfluid_connector(self, rpc: str, chain_id: int): + """Update the Superfluid connector after initialisation.""" + self.superfluid_connector = Superfluid( + rpc=rpc, + chain_id=chain_id, + account=self._account, + ) + def get_fallback_account(path: Optional[Path] = None) -> ETHAccount: return ETHAccount(private_key=get_fallback_private_key(path=path)) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 318536e4..70378088 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -38,6 +38,10 @@ class Settings(BaseSettings): CODE_USES_SQUASHFS: bool = which("mksquashfs") is not None # True if command exists + AVAX_RPC: str = "https://api.avax.network/ext/bc/C/rpc" + AVAX_CHAIN_ID: int = 43114 + AVAX_ALEPH_SUPER_TOKEN = "0xc0Fbc4967259786C743361a5885ef49380473dCF" # mainnet + # Dns resolver DNS_IPFS_DOMAIN = "ipfs.public.aleph.sh" DNS_PROGRAM_DOMAIN = "program.public.aleph.sh" diff --git a/src/aleph/sdk/connectors/superfluid.py b/src/aleph/sdk/connectors/superfluid.py new file mode 100644 index 00000000..2c0b9fb6 --- /dev/null +++ b/src/aleph/sdk/connectors/superfluid.py @@ -0,0 +1,124 @@ +from __future__ import annotations + +import asyncio +from decimal import Decimal +from typing import TYPE_CHECKING, Optional + +from eth_utils import to_normalized_address, to_wei +from superfluid import CFA_V1, Operation, Web3FlowInfo +from web3 import Web3 +from web3.types import TxParams + +from aleph.sdk.conf import settings + +if TYPE_CHECKING: + from aleph.sdk.chains.ethereum import LocalAccount + + +async def sign_and_send_transaction( + account: LocalAccount, tx_params: TxParams, rpc: str +) -> str: + """ + Sign and broadcast a transaction using the provided ETHAccount + + @param tx_params - Transaction parameters + @param rpc - RPC URL + @returns - str - The transaction hash + """ + web3 = Web3(Web3.HTTPProvider(rpc)) + + def sign_and_send(): + signed_txn = account.sign_transaction(tx_params) + transaction_hash = web3.eth.send_raw_transaction(signed_txn.rawTransaction) + return transaction_hash.hex() + + # Sending a transaction is done over HTTP(S) and implemented using a blocking + # API in `web3.eth`. This runs it in a non-blocking asyncio executor. + loop = asyncio.get_running_loop() + transaction_hash = await loop.run_in_executor(None, sign_and_send) + return transaction_hash + + +async def execute_operation_with_account( + account: LocalAccount, operation: Operation +) -> str: + """ + Execute an operation using the provided ETHAccount + + @param operation - Operation instance from the library + @returns - str - The transaction hash + @returns - str - The transaction hash + """ + populated_transaction = operation._get_populated_transaction_request( + operation.rpc, account.key + ) + transaction_hash = await sign_and_send_transaction( + account, populated_transaction, operation.rpc + ) + return transaction_hash + + +class Superfluid: + """ + Wrapper around the Superfluid APIs in order to CRUD Superfluid flows between two accounts. + """ + + account: Optional[LocalAccount] + + def __init__( + self, + rpc=settings.AVAX_RPC, + chain_id=settings.AVAX_CHAIN_ID, + account: Optional[LocalAccount] = None, + ): + self.cfaV1Instance = CFA_V1(rpc, chain_id) + self.account = account + + async def create_flow(self, sender: str, receiver: str, flow: Decimal) -> str: + """Create a Superfluid flow between two addresses.""" + if not self.account: + raise ValueError("An account is required to create a flow") + return await execute_operation_with_account( + account=self.account, + operation=self.cfaV1Instance.create_flow( + sender=to_normalized_address(sender), + receiver=to_normalized_address(receiver), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + flow_rate=to_wei(Decimal(flow), "ether"), + ), + ) + + async def get_flow(self, sender: str, receiver: str) -> Web3FlowInfo: + """Fetch information about the Superfluid flow between two addresses.""" + return self.cfaV1Instance.get_flow( + sender=to_normalized_address(sender), + receiver=to_normalized_address(receiver), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + ) + + async def delete_flow(self, sender: str, receiver: str) -> str: + """Delete the Supefluid flow between two addresses.""" + if not self.account: + raise ValueError("An account is required to delete a flow") + return await execute_operation_with_account( + account=self.account, + operation=self.cfaV1Instance.delete_flow( + sender=to_normalized_address(sender), + receiver=to_normalized_address(receiver), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + ), + ) + + async def update_flow(self, sender: str, receiver: str, flow: Decimal) -> str: + """Update the flow of a Superfluid flow between two addresses.""" + if not self.account: + raise ValueError("An account is required to update a flow") + return await execute_operation_with_account( + account=self.account, + operation=self.cfaV1Instance.update_flow( + sender=to_normalized_address(sender), + receiver=to_normalized_address(receiver), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + flow_rate=to_wei(Decimal(flow), "ether"), + ), + ) diff --git a/tests/unit/test_superfluid.py b/tests/unit/test_superfluid.py new file mode 100644 index 00000000..92c83f2c --- /dev/null +++ b/tests/unit/test_superfluid.py @@ -0,0 +1,133 @@ +import random +from decimal import Decimal +from unittest import mock +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from aleph_message.models import Chain +from eth_utils import to_checksum_address +from superfluid import Operation, Web3FlowInfo + +from aleph.sdk.chains.ethereum import ETHAccount +from aleph.sdk.conf import settings + + +def generate_fake_eth_address(): + return to_checksum_address( + "0x" + "".join([random.choice("0123456789abcdef") for _ in range(40)]) + ) + + +@pytest.fixture +def mock_superfluid(): + with patch("aleph.sdk.connectors.superfluid.CFA_V1") as MockCFA_V1: + yield MockCFA_V1.return_value + + +@pytest.fixture +def eth_account(mock_superfluid): + private_key = b"\x01" * 32 + return ETHAccount( + private_key, + chain=Chain.AVAX, + rpc=settings.AVAX_RPC, + chain_id=settings.AVAX_CHAIN_ID, + ) + + +@pytest.mark.asyncio +async def test_initialization(eth_account): + assert eth_account.superfluid_connector is not None + + +@pytest.mark.asyncio +async def test_create_flow(eth_account, mock_superfluid): + mock_operation = AsyncMock(spec=Operation) + mock_superfluid.create_flow.return_value = mock_operation + + sender = eth_account.get_address() + receiver = generate_fake_eth_address() + flow = Decimal("10.0") + + with patch( + "aleph.sdk.connectors.superfluid.execute_operation_with_account", + return_value="0xTransactionHash", + ) as mock_execute: + tx_hash = await eth_account.create_flow(receiver, flow) + assert tx_hash == "0xTransactionHash" + mock_execute.assert_called_once_with( + account=eth_account._account, operation=mock_operation + ) + mock_superfluid.create_flow.assert_called_once_with( + sender=sender.lower(), + receiver=receiver.lower(), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + flow_rate=mock.ANY, + ) + + +@pytest.mark.asyncio +async def test_delete_flow(eth_account, mock_superfluid): + mock_operation = AsyncMock(spec=Operation) + mock_superfluid.delete_flow.return_value = mock_operation + + sender = eth_account.get_address() + receiver = generate_fake_eth_address() + + with patch( + "aleph.sdk.connectors.superfluid.execute_operation_with_account", + return_value="0xTransactionHash", + ) as mock_execute: + tx_hash = await eth_account.delete_flow(receiver) + assert tx_hash == "0xTransactionHash" + mock_execute.assert_called_once_with( + account=eth_account._account, operation=mock_operation + ) + mock_superfluid.delete_flow.assert_called_once_with( + sender=sender.lower(), + receiver=receiver.lower(), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + ) + + +@pytest.mark.asyncio +async def test_update_flow(eth_account, mock_superfluid): + mock_operation = AsyncMock(spec=Operation) + mock_superfluid.update_flow.return_value = mock_operation + + sender = eth_account.get_address() + receiver = generate_fake_eth_address() + flow = Decimal(15.0) + + with patch( + "aleph.sdk.connectors.superfluid.execute_operation_with_account", + return_value="0xTransactionHash", + ) as mock_execute: + tx_hash = await eth_account.update_flow(receiver, flow) + assert tx_hash == "0xTransactionHash" + mock_execute.assert_called_once_with( + account=eth_account._account, operation=mock_operation + ) + mock_superfluid.update_flow.assert_called_once_with( + sender=sender.lower(), + receiver=receiver.lower(), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + flow_rate=mock.ANY, + ) + + +@pytest.mark.asyncio +async def test_get_flow(eth_account, mock_superfluid): + mock_flow_info = MagicMock(spec=Web3FlowInfo) + mock_superfluid.get_flow.return_value = mock_flow_info + + sender = eth_account.get_address() + receiver = generate_fake_eth_address() + + flow_info = await eth_account.get_flow(receiver) + assert flow_info == mock_flow_info + mock_superfluid.get_flow.assert_called_once_with( + sender=sender.lower(), + receiver=receiver.lower(), + super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + ) From 2d4ded1b524c5ced38a4ac893d2f82164934019d Mon Sep 17 00:00:00 2001 From: philogicae Date: Mon, 26 Aug 2024 18:51:59 +0300 Subject: [PATCH 049/122] Upgrade to web3py, fixes on ETHAccount/PAYG, EVM configs (#154) * Remove eth_typing / eth_account -> web3 6.3.0 + temp dependencies * New config * Rewrite for EVM chains * Fix circular import * mypy * Fix superfluid + export utils methods in evm_utils * Fix eth_typing * fix: unit test * fix: unit test * Fix mock tests * Add get_chains_with_holding --------- Co-authored-by: 1yam --- pyproject.toml | 7 +- src/aleph/sdk/chains/ethereum.py | 196 ++++++++++++++++--------- src/aleph/sdk/conf.py | 45 +++++- src/aleph/sdk/connectors/superfluid.py | 140 +++++++----------- src/aleph/sdk/evm_utils.py | 91 ++++++++++++ src/aleph/sdk/types.py | 14 +- tests/unit/test_superfluid.py | 125 ++++++---------- 7 files changed, 376 insertions(+), 242 deletions(-) create mode 100644 src/aleph/sdk/evm_utils.py diff --git a/pyproject.toml b/pyproject.toml index 4c41904a..9ecc2a62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,18 +23,17 @@ classifiers = [ ] dependencies = [ "aiohttp>=3.8.3", - "aleph-message>=0.4.8", + "aleph-message>=0.4.9", "coincurve; python_version<\"3.11\"", "coincurve>=19.0.0; python_version>=\"3.11\"", "eth_abi>=4.0.0; python_version>=\"3.11\"", - "eth_account>=0.4.0,<0.11.0", "jwcrypto==1.5.6", "python-magic", "typing_extensions", "aioresponses>=0.7.6", - "superfluid~=0.2.1", + "superfluid@git+https://github.com/1yam/superfluid.py.git@1yam-add-base", "eth_typing==4.3.1", - + "web3==6.3.0", ] [project.optional-dependencies] diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index 294f47da..32f459b7 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -1,103 +1,167 @@ +import asyncio from decimal import Decimal from pathlib import Path -from typing import Awaitable, Dict, Optional, Set, Union +from typing import Awaitable, Optional, Union from aleph_message.models import Chain -from eth_account import Account +from eth_account import Account # type: ignore from eth_account.messages import encode_defunct from eth_account.signers.local import LocalAccount from eth_keys.exceptions import BadSignature as EthBadSignatureError from superfluid import Web3FlowInfo +from web3 import Web3 +from web3.middleware import geth_poa_middleware +from web3.types import TxParams, TxReceipt + +from aleph.sdk.exceptions import InsufficientFundsError from ..conf import settings from ..connectors.superfluid import Superfluid +from ..evm_utils import ( + BALANCEOF_ABI, + MIN_ETH_BALANCE, + MIN_ETH_BALANCE_WEI, + get_chain_id, + get_chains_with_super_token, + get_rpc, + get_super_token_address, + get_token_address, + to_human_readable_token, +) from ..exceptions import BadSignatureError from ..utils import bytes_from_hex from .common import BaseAccount, get_fallback_private_key, get_public_key -CHAINS_WITH_SUPERTOKEN: Set[Chain] = {Chain.AVAX} -CHAIN_IDS: Dict[Chain, int] = { - Chain.AVAX: settings.AVAX_CHAIN_ID, -} - - -def get_rpc_for_chain(chain: Chain): - """Returns the RPC to use for a given Ethereum based blockchain""" - if not chain: - return None - - if chain == Chain.AVAX: - return settings.AVAX_RPC - else: - raise ValueError(f"Unknown RPC for chain {chain}") - - -def get_chain_id_for_chain(chain: Chain): - """Returns the chain ID of a given Ethereum based blockchain""" - if not chain: - return None - - if chain in CHAIN_IDS: - return CHAIN_IDS[chain] - else: - raise ValueError(f"Unknown RPC for chain {chain}") - class ETHAccount(BaseAccount): - """Interact with an Ethereum address or key pair""" + """Interact with an Ethereum address or key pair on EVM blockchains""" CHAIN = "ETH" CURVE = "secp256k1" _account: LocalAccount + _provider: Optional[Web3] chain: Optional[Chain] + chain_id: Optional[int] + rpc: Optional[str] superfluid_connector: Optional[Superfluid] def __init__( self, private_key: bytes, chain: Optional[Chain] = None, - rpc: Optional[str] = None, - chain_id: Optional[int] = None, ): self.private_key = private_key - self._account = Account.from_key(self.private_key) - self.chain = chain - rpc = rpc or get_rpc_for_chain(chain) - chain_id = chain_id or get_chain_id_for_chain(chain) - self.superfluid_connector = ( - Superfluid( - rpc=rpc, - chain_id=chain_id, - account=self._account, - ) - if chain in CHAINS_WITH_SUPERTOKEN - else None + self._account: LocalAccount = Account.from_key(self.private_key) + self.connect_chain(chain=chain) + + @staticmethod + def from_mnemonic(mnemonic: str, chain: Optional[Chain] = None) -> "ETHAccount": + Account.enable_unaudited_hdwallet_features() + return ETHAccount( + private_key=Account.from_mnemonic(mnemonic=mnemonic).key, chain=chain ) + def get_address(self) -> str: + return self._account.address + + def get_public_key(self) -> str: + return "0x" + get_public_key(private_key=self._account.key).hex() + async def sign_raw(self, buffer: bytes) -> bytes: """Sign a raw buffer.""" msghash = encode_defunct(text=buffer.decode("utf-8")) sig = self._account.sign_message(msghash) return sig["signature"] - def get_address(self) -> str: - return self._account.address + def connect_chain(self, chain: Optional[Chain] = None): + self.chain = chain + if self.chain: + self.chain_id = get_chain_id(self.chain) + self.rpc = get_rpc(self.chain) + self._provider = Web3(Web3.HTTPProvider(self.rpc)) + if chain == Chain.BSC: + self._provider.middleware_onion.inject( + geth_poa_middleware, "geth_poa", layer=0 + ) + else: + self.chain_id = None + self.rpc = None + self._provider = None + + if chain in get_chains_with_super_token() and self._provider: + self.superfluid_connector = Superfluid(self) + else: + self.superfluid_connector = None + + def switch_chain(self, chain: Optional[Chain] = None): + self.connect_chain(chain=chain) + + def can_transact(self, block=True) -> bool: + balance = self.get_eth_balance() + valid = balance > MIN_ETH_BALANCE_WEI if self.chain else False + if not valid and block: + raise InsufficientFundsError( + required_funds=MIN_ETH_BALANCE, + available_funds=to_human_readable_token(balance), + ) + return valid + + async def _sign_and_send_transaction(self, tx_params: TxParams) -> str: + """ + Sign and broadcast a transaction using the provided ETHAccount + @param tx_params - Transaction parameters + @returns - str - Transaction hash + """ + self.can_transact() + + def sign_and_send() -> TxReceipt: + if self._provider is None: + raise ValueError("Provider not connected") + signed_tx = self._provider.eth.account.sign_transaction( + tx_params, self._account.key + ) + tx_hash = self._provider.eth.send_raw_transaction(signed_tx.rawTransaction) + tx_receipt = self._provider.eth.wait_for_transaction_receipt( + tx_hash, settings.TX_TIMEOUT + ) + return tx_receipt - def get_public_key(self) -> str: - return "0x" + get_public_key(private_key=self._account.key).hex() + loop = asyncio.get_running_loop() + tx_receipt = await loop.run_in_executor(None, sign_and_send) + return tx_receipt["transactionHash"].hex() - @staticmethod - def from_mnemonic(mnemonic: str) -> "ETHAccount": - Account.enable_unaudited_hdwallet_features() - return ETHAccount(private_key=Account.from_mnemonic(mnemonic=mnemonic).key) + def get_eth_balance(self) -> Decimal: + return Decimal( + self._provider.eth.get_balance(self._account.address) + if self._provider + else 0 + ) + + def get_token_balance(self) -> Decimal: + if self.chain and self._provider: + contact_address = get_token_address(self.chain) + if contact_address: + contract = self._provider.eth.contract( + address=contact_address, abi=BALANCEOF_ABI + ) + return Decimal(contract.functions.balanceOf(self.get_address()).call()) + return Decimal(0) + + def get_super_token_balance(self) -> Decimal: + if self.chain and self._provider: + contact_address = get_super_token_address(self.chain) + if contact_address: + contract = self._provider.eth.contract( + address=contact_address, abi=BALANCEOF_ABI + ) + return Decimal(contract.functions.balanceOf(self.get_address()).call()) + return Decimal(0) def create_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: """Creat a Superfluid flow between this account and the receiver address.""" if not self.superfluid_connector: raise ValueError("Superfluid connector is required to create a flow") - return self.superfluid_connector.create_flow( - sender=self.get_address(), receiver=receiver, flow=flow - ) + return self.superfluid_connector.create_flow(receiver=receiver, flow=flow) def get_flow(self, receiver: str) -> Awaitable[Web3FlowInfo]: """Get the Superfluid flow between this account and the receiver address.""" @@ -111,29 +175,19 @@ def update_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: """Update the Superfluid flow between this account and the receiver address.""" if not self.superfluid_connector: raise ValueError("Superfluid connector is required to update a flow") - return self.superfluid_connector.update_flow( - sender=self.get_address(), receiver=receiver, flow=flow - ) + return self.superfluid_connector.update_flow(receiver=receiver, flow=flow) def delete_flow(self, receiver: str) -> Awaitable[str]: """Delete the Superfluid flow between this account and the receiver address.""" if not self.superfluid_connector: raise ValueError("Superfluid connector is required to delete a flow") - return self.superfluid_connector.delete_flow( - sender=self.get_address(), receiver=receiver - ) - - def update_superfluid_connector(self, rpc: str, chain_id: int): - """Update the Superfluid connector after initialisation.""" - self.superfluid_connector = Superfluid( - rpc=rpc, - chain_id=chain_id, - account=self._account, - ) + return self.superfluid_connector.delete_flow(receiver=receiver) -def get_fallback_account(path: Optional[Path] = None) -> ETHAccount: - return ETHAccount(private_key=get_fallback_private_key(path=path)) +def get_fallback_account( + path: Optional[Path] = None, chain: Optional[Chain] = None +) -> ETHAccount: + return ETHAccount(private_key=get_fallback_private_key(path=path), chain=chain) def verify_signature( diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 70378088..38afc381 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -1,10 +1,13 @@ import os from pathlib import Path from shutil import which -from typing import Optional +from typing import Dict, Optional, Union +from aleph_message.models import Chain from pydantic import BaseSettings, Field +from aleph.sdk.types import ChainInfo + class Settings(BaseSettings): CONFIG_HOME: Optional[str] = None @@ -38,9 +41,43 @@ class Settings(BaseSettings): CODE_USES_SQUASHFS: bool = which("mksquashfs") is not None # True if command exists - AVAX_RPC: str = "https://api.avax.network/ext/bc/C/rpc" - AVAX_CHAIN_ID: int = 43114 - AVAX_ALEPH_SUPER_TOKEN = "0xc0Fbc4967259786C743361a5885ef49380473dCF" # mainnet + # Web3Provider settings + TOKEN_DECIMALS = 18 + TX_TIMEOUT = 60 * 3 + CHAINS: Dict[Union[Chain, str], ChainInfo] = { + # TESTNETS + "SEPOLIA": ChainInfo( + chain_id=11155111, + rpc="https://eth-sepolia.public.blastapi.io", + token="0xc4bf5cbdabe595361438f8c6a187bdc330539c60", + super_token="0x22064a21fee226d8ffb8818e7627d5ff6d0fc33a", + active=False, + ), + # MAINNETS + Chain.ETH: ChainInfo( + chain_id=1, + rpc="https://eth-mainnet.public.blastapi.io", + token="0x27702a26126e0B3702af63Ee09aC4d1A084EF628", + ), + Chain.AVAX: ChainInfo( + chain_id=43114, + rpc="https://api.avax.network/ext/bc/C/rpc", + token="0xc0Fbc4967259786C743361a5885ef49380473dCF", + super_token="0xc0Fbc4967259786C743361a5885ef49380473dCF", + ), + Chain.BASE: ChainInfo( + chain_id=8453, + rpc="https://base-mainnet.public.blastapi.io", + token="0xc0Fbc4967259786C743361a5885ef49380473dCF", + super_token="0xc0Fbc4967259786C743361a5885ef49380473dCF", + ), + Chain.BSC: ChainInfo( + chain_id=56, + rpc="https://binance.llamarpc.com", + token="0x82D2f8E02Afb160Dd5A480a617692e62de9038C4", + active=False, + ), + } # Dns resolver DNS_IPFS_DOMAIN = "ipfs.public.aleph.sh" diff --git a/src/aleph/sdk/connectors/superfluid.py b/src/aleph/sdk/connectors/superfluid.py index 2c0b9fb6..4b7274f8 100644 --- a/src/aleph/sdk/connectors/superfluid.py +++ b/src/aleph/sdk/connectors/superfluid.py @@ -1,61 +1,17 @@ from __future__ import annotations -import asyncio from decimal import Decimal -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING -from eth_utils import to_normalized_address, to_wei +from eth_utils import to_normalized_address from superfluid import CFA_V1, Operation, Web3FlowInfo -from web3 import Web3 -from web3.types import TxParams -from aleph.sdk.conf import settings +from aleph.sdk.exceptions import InsufficientFundsError -if TYPE_CHECKING: - from aleph.sdk.chains.ethereum import LocalAccount - - -async def sign_and_send_transaction( - account: LocalAccount, tx_params: TxParams, rpc: str -) -> str: - """ - Sign and broadcast a transaction using the provided ETHAccount - - @param tx_params - Transaction parameters - @param rpc - RPC URL - @returns - str - The transaction hash - """ - web3 = Web3(Web3.HTTPProvider(rpc)) - - def sign_and_send(): - signed_txn = account.sign_transaction(tx_params) - transaction_hash = web3.eth.send_raw_transaction(signed_txn.rawTransaction) - return transaction_hash.hex() - - # Sending a transaction is done over HTTP(S) and implemented using a blocking - # API in `web3.eth`. This runs it in a non-blocking asyncio executor. - loop = asyncio.get_running_loop() - transaction_hash = await loop.run_in_executor(None, sign_and_send) - return transaction_hash +from ..evm_utils import get_super_token_address, to_human_readable_token, to_wei_token - -async def execute_operation_with_account( - account: LocalAccount, operation: Operation -) -> str: - """ - Execute an operation using the provided ETHAccount - - @param operation - Operation instance from the library - @returns - str - The transaction hash - @returns - str - The transaction hash - """ - populated_transaction = operation._get_populated_transaction_request( - operation.rpc, account.key - ) - transaction_hash = await sign_and_send_transaction( - account, populated_transaction, operation.rpc - ) - return transaction_hash +if TYPE_CHECKING: + from aleph.sdk.chains.ethereum import ETHAccount class Superfluid: @@ -63,28 +19,52 @@ class Superfluid: Wrapper around the Superfluid APIs in order to CRUD Superfluid flows between two accounts. """ - account: Optional[LocalAccount] + account: ETHAccount + normalized_address: str + super_token: str + cfaV1Instance: CFA_V1 + MIN_4_HOURS = 60 * 60 * 4 - def __init__( - self, - rpc=settings.AVAX_RPC, - chain_id=settings.AVAX_CHAIN_ID, - account: Optional[LocalAccount] = None, - ): - self.cfaV1Instance = CFA_V1(rpc, chain_id) + def __init__(self, account: ETHAccount): self.account = account - - async def create_flow(self, sender: str, receiver: str, flow: Decimal) -> str: + self.normalized_address = to_normalized_address(account.get_address()) + if account.chain: + self.super_token = str(get_super_token_address(account.chain)) + self.cfaV1Instance = CFA_V1(account.rpc, account.chain_id) + + async def _execute_operation_with_account(self, operation: Operation) -> str: + """ + Execute an operation using the provided ETHAccount + @param operation - Operation instance from the library + @returns - str - Transaction hash + """ + populated_transaction = operation._get_populated_transaction_request( + self.account.rpc, self.account._account.key + ) + return await self.account._sign_and_send_transaction(populated_transaction) + + def can_start_flow(self, flow: Decimal, block=True) -> bool: + valid = False + if self.account.can_transact(block=block): + balance = self.account.get_super_token_balance() + MIN_FLOW_4H = to_wei_token(flow) * Decimal(self.MIN_4_HOURS) + valid = balance > MIN_FLOW_4H + if not valid and block: + raise InsufficientFundsError( + required_funds=float(MIN_FLOW_4H), + available_funds=to_human_readable_token(balance), + ) + return valid + + async def create_flow(self, receiver: str, flow: Decimal) -> str: """Create a Superfluid flow between two addresses.""" - if not self.account: - raise ValueError("An account is required to create a flow") - return await execute_operation_with_account( - account=self.account, + self.can_start_flow(flow) + return await self._execute_operation_with_account( operation=self.cfaV1Instance.create_flow( - sender=to_normalized_address(sender), + sender=self.normalized_address, receiver=to_normalized_address(receiver), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, - flow_rate=to_wei(Decimal(flow), "ether"), + super_token=self.super_token, + flow_rate=int(to_wei_token(flow)), ), ) @@ -93,32 +73,26 @@ async def get_flow(self, sender: str, receiver: str) -> Web3FlowInfo: return self.cfaV1Instance.get_flow( sender=to_normalized_address(sender), receiver=to_normalized_address(receiver), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + super_token=self.super_token, ) - async def delete_flow(self, sender: str, receiver: str) -> str: + async def delete_flow(self, receiver: str) -> str: """Delete the Supefluid flow between two addresses.""" - if not self.account: - raise ValueError("An account is required to delete a flow") - return await execute_operation_with_account( - account=self.account, + return await self._execute_operation_with_account( operation=self.cfaV1Instance.delete_flow( - sender=to_normalized_address(sender), + sender=self.normalized_address, receiver=to_normalized_address(receiver), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, + super_token=self.super_token, ), ) - async def update_flow(self, sender: str, receiver: str, flow: Decimal) -> str: + async def update_flow(self, receiver: str, flow: Decimal) -> str: """Update the flow of a Superfluid flow between two addresses.""" - if not self.account: - raise ValueError("An account is required to update a flow") - return await execute_operation_with_account( - account=self.account, + return await self._execute_operation_with_account( operation=self.cfaV1Instance.update_flow( - sender=to_normalized_address(sender), + sender=self.normalized_address, receiver=to_normalized_address(receiver), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, - flow_rate=to_wei(Decimal(flow), "ether"), + super_token=self.super_token, + flow_rate=int(to_wei_token(flow)), ), ) diff --git a/src/aleph/sdk/evm_utils.py b/src/aleph/sdk/evm_utils.py new file mode 100644 index 00000000..c7166cec --- /dev/null +++ b/src/aleph/sdk/evm_utils.py @@ -0,0 +1,91 @@ +from decimal import Decimal +from typing import List, Optional, Union + +from aleph_message.models import Chain +from eth_utils import to_wei +from web3 import Web3 +from web3.types import ChecksumAddress + +from .conf import settings + +MIN_ETH_BALANCE: float = 0.005 +MIN_ETH_BALANCE_WEI = Decimal(to_wei(MIN_ETH_BALANCE, "ether")) +BALANCEOF_ABI = """[{ + "name": "balanceOf", + "inputs": [{"name": "account", "type": "address"}], + "outputs": [{"name": "balance", "type": "uint256"}], + "constant": true, + "payable": false, + "stateMutability": "view", + "type": "function" +}]""" + + +def to_human_readable_token(amount: Decimal) -> float: + return float(amount / (Decimal(10) ** Decimal(settings.TOKEN_DECIMALS))) + + +def to_wei_token(amount: Decimal) -> Decimal: + return amount * Decimal(10) ** Decimal(settings.TOKEN_DECIMALS) + + +def get_chain_id(chain: Union[Chain, str, None]) -> Optional[int]: + """Returns the CHAIN_ID of a given EVM blockchain""" + if chain: + if chain in settings.CHAINS and settings.CHAINS[chain].chain_id: + return settings.CHAINS[chain].chain_id + else: + raise ValueError(f"Unknown RPC for chain {chain}") + return None + + +def get_rpc(chain: Union[Chain, str, None]) -> Optional[str]: + """Returns the RPC to use for a given EVM blockchain""" + if chain: + if chain in settings.CHAINS and settings.CHAINS[chain].rpc: + return settings.CHAINS[chain].rpc + else: + raise ValueError(f"Unknown RPC for chain {chain}") + return None + + +def get_token_address(chain: Union[Chain, str, None]) -> Optional[ChecksumAddress]: + if chain: + if chain in settings.CHAINS: + address = settings.CHAINS[chain].super_token + if address: + try: + return Web3.to_checksum_address(address) + except ValueError: + raise ValueError(f"Invalid token address {address}") + else: + raise ValueError(f"Unknown token for chain {chain}") + return None + + +def get_super_token_address( + chain: Union[Chain, str, None] +) -> Optional[ChecksumAddress]: + if chain: + if chain in settings.CHAINS: + address = settings.CHAINS[chain].super_token + if address: + try: + return Web3.to_checksum_address(address) + except ValueError: + raise ValueError(f"Invalid token address {address}") + else: + raise ValueError(f"Unknown super_token for chain {chain}") + return None + + +def get_chains_with_holding() -> List[Union[Chain, str]]: + return [chain for chain, info in settings.CHAINS.items() if info.active] + + +def get_chains_with_super_token() -> List[Union[Chain, str]]: + return [ + chain + for chain, info in settings.CHAINS.items() + if info.active and info.super_token + ] diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index cf9e6fa8..081a3465 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -1,6 +1,6 @@ from abc import abstractmethod from enum import Enum -from typing import Dict, Protocol, TypeVar +from typing import Dict, Optional, Protocol, TypeVar from pydantic import BaseModel @@ -64,3 +64,15 @@ class SEVMeasurement(BaseModel): sev_info: SEVInfo launch_measure: str + + +class ChainInfo(BaseModel): + """ + A chain information. + """ + + chain_id: int + rpc: str + token: str + super_token: Optional[str] = None + active: bool = True diff --git a/tests/unit/test_superfluid.py b/tests/unit/test_superfluid.py index 92c83f2c..c2f853bd 100644 --- a/tests/unit/test_superfluid.py +++ b/tests/unit/test_superfluid.py @@ -1,15 +1,12 @@ import random from decimal import Decimal -from unittest import mock -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock, patch import pytest from aleph_message.models import Chain from eth_utils import to_checksum_address -from superfluid import Operation, Web3FlowInfo from aleph.sdk.chains.ethereum import ETHAccount -from aleph.sdk.conf import settings def generate_fake_eth_address(): @@ -20,19 +17,38 @@ def generate_fake_eth_address(): @pytest.fixture def mock_superfluid(): - with patch("aleph.sdk.connectors.superfluid.CFA_V1") as MockCFA_V1: - yield MockCFA_V1.return_value + with patch("aleph.sdk.connectors.superfluid.Superfluid") as MockSuperfluid: + mock_superfluid = MockSuperfluid.return_value + + # Mock methods for the Superfluid connector + mock_superfluid.create_flow = AsyncMock(return_value="0xTransactionHash") + mock_superfluid.delete_flow = AsyncMock(return_value="0xTransactionHash") + mock_superfluid.update_flow = AsyncMock(return_value="0xTransactionHash") + + # Mock get_flow to return a mock Web3FlowInfo + mock_flow_info = {"timestamp": 0, "flowRate": 0, "deposit": 0, "owedDeposit": 0} + mock_superfluid.get_flow = AsyncMock(return_value=mock_flow_info) + + yield mock_superfluid @pytest.fixture def eth_account(mock_superfluid): private_key = b"\x01" * 32 - return ETHAccount( + account = ETHAccount( private_key, chain=Chain.AVAX, - rpc=settings.AVAX_RPC, - chain_id=settings.AVAX_CHAIN_ID, ) + with patch.object( + account, "get_super_token_balance", new_callable=AsyncMock + ) as mock_get_balance: + mock_get_balance.return_value = Decimal("1") + with patch.object( + account, "can_transact", new_callable=AsyncMock + ) as mock_can_transact: + mock_can_transact.return_value = True + account.superfluid_connector = mock_superfluid + yield account @pytest.mark.asyncio @@ -42,92 +58,43 @@ async def test_initialization(eth_account): @pytest.mark.asyncio async def test_create_flow(eth_account, mock_superfluid): - mock_operation = AsyncMock(spec=Operation) - mock_superfluid.create_flow.return_value = mock_operation - - sender = eth_account.get_address() receiver = generate_fake_eth_address() - flow = Decimal("10.0") - - with patch( - "aleph.sdk.connectors.superfluid.execute_operation_with_account", - return_value="0xTransactionHash", - ) as mock_execute: - tx_hash = await eth_account.create_flow(receiver, flow) - assert tx_hash == "0xTransactionHash" - mock_execute.assert_called_once_with( - account=eth_account._account, operation=mock_operation - ) - mock_superfluid.create_flow.assert_called_once_with( - sender=sender.lower(), - receiver=receiver.lower(), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, - flow_rate=mock.ANY, - ) + flow = Decimal("0.00000005") + + tx_hash = await eth_account.create_flow(receiver, flow) + + assert tx_hash == "0xTransactionHash" + mock_superfluid.create_flow.assert_awaited_once() @pytest.mark.asyncio async def test_delete_flow(eth_account, mock_superfluid): - mock_operation = AsyncMock(spec=Operation) - mock_superfluid.delete_flow.return_value = mock_operation - - sender = eth_account.get_address() receiver = generate_fake_eth_address() - with patch( - "aleph.sdk.connectors.superfluid.execute_operation_with_account", - return_value="0xTransactionHash", - ) as mock_execute: - tx_hash = await eth_account.delete_flow(receiver) - assert tx_hash == "0xTransactionHash" - mock_execute.assert_called_once_with( - account=eth_account._account, operation=mock_operation - ) - mock_superfluid.delete_flow.assert_called_once_with( - sender=sender.lower(), - receiver=receiver.lower(), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, - ) + tx_hash = await eth_account.delete_flow(receiver) + + assert tx_hash == "0xTransactionHash" + mock_superfluid.delete_flow.assert_awaited_once() @pytest.mark.asyncio async def test_update_flow(eth_account, mock_superfluid): - mock_operation = AsyncMock(spec=Operation) - mock_superfluid.update_flow.return_value = mock_operation - - sender = eth_account.get_address() receiver = generate_fake_eth_address() - flow = Decimal(15.0) - - with patch( - "aleph.sdk.connectors.superfluid.execute_operation_with_account", - return_value="0xTransactionHash", - ) as mock_execute: - tx_hash = await eth_account.update_flow(receiver, flow) - assert tx_hash == "0xTransactionHash" - mock_execute.assert_called_once_with( - account=eth_account._account, operation=mock_operation - ) - mock_superfluid.update_flow.assert_called_once_with( - sender=sender.lower(), - receiver=receiver.lower(), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, - flow_rate=mock.ANY, - ) + flow = Decimal("0.005") + + tx_hash = await eth_account.update_flow(receiver, flow) + + assert tx_hash == "0xTransactionHash" + mock_superfluid.update_flow.assert_awaited_once() @pytest.mark.asyncio async def test_get_flow(eth_account, mock_superfluid): - mock_flow_info = MagicMock(spec=Web3FlowInfo) - mock_superfluid.get_flow.return_value = mock_flow_info - - sender = eth_account.get_address() receiver = generate_fake_eth_address() flow_info = await eth_account.get_flow(receiver) - assert flow_info == mock_flow_info - mock_superfluid.get_flow.assert_called_once_with( - sender=sender.lower(), - receiver=receiver.lower(), - super_token=settings.AVAX_ALEPH_SUPER_TOKEN, - ) + + assert flow_info["timestamp"] == 0 + assert flow_info["flowRate"] == 0 + assert flow_info["deposit"] == 0 + assert flow_info["owedDeposit"] == 0 From 89fb7bcc4a28437c82beea0ab3ac84e22342fce7 Mon Sep 17 00:00:00 2001 From: nesitor Date: Mon, 26 Aug 2024 17:57:24 +0200 Subject: [PATCH 050/122] Fix: Remove support for python 3.8 and Ubuntu 20.04 (#155) --- .github/workflows/build-wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 55e39e75..4e32a239 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-12, macos-13, macos-14, ubuntu-20.04, ubuntu-22.04, ubuntu-24.04] + os: [macos-12, macos-13, macos-14, ubuntu-22.04, ubuntu-24.04] runs-on: ${{ matrix.os }} steps: From 991670c4e8b2823558f780a77a8e3e112fa2a08f Mon Sep 17 00:00:00 2001 From: nesitor Date: Mon, 26 Aug 2024 18:20:05 +0200 Subject: [PATCH 051/122] Fix: Reload the official superfluid dependency. (#156) --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9ecc2a62..3fce31f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dependencies = [ "python-magic", "typing_extensions", "aioresponses>=0.7.6", - "superfluid@git+https://github.com/1yam/superfluid.py.git@1yam-add-base", + "superfluid~=0.2.1", "eth_typing==4.3.1", "web3==6.3.0", ] @@ -107,7 +107,7 @@ include = [ profile = "black" [[tool.hatch.envs.all.matrix]] -python = ["3.8", "3.9", "3.10", "3.11"] +python = ["3.9", "3.10", "3.11"] [tool.hatch.envs.testing] features = [ @@ -171,7 +171,7 @@ all = [ ] [tool.mypy] -python_version = 3.8 +python_version = 3.9 mypy_path = "src" exclude = [ "conftest.py" From 5e0c979230068e5321e912bdbd9e447c61deba3a Mon Sep 17 00:00:00 2001 From: philogicae Date: Wed, 28 Aug 2024 16:51:10 +0300 Subject: [PATCH 052/122] Fix _load_account when both string & path private key are provided (#160) * Fix _load_account when both string & path private key are provided * Update src/aleph/sdk/account.py --------- Co-authored-by: Olivier Le Thanh Duong --- src/aleph/sdk/account.py | 9 +-------- 1 file changed, 1 insertion(+), 8 deletions(-) diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 6ec08c83..59eef815 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -30,14 +30,7 @@ def _load_account( private_key_path: Optional[Path] = None, account_type: Type[AccountFromPrivateKey] = ETHAccount, ) -> AccountFromPrivateKey: - """Load private key from a string or a file. - - Only keys that accounts that can be initiated from a - """ - - assert not ( - private_key_str and private_key_path - ), "Private key should be a string or a filepath, not both." + """Load private key from a string or a file. takes the string argument in priority""" if private_key_str: logger.debug("Using account from string") From 0a48732daba3c16f71d610b2e60fb0e10f66e299 Mon Sep 17 00:00:00 2001 From: philogicae Date: Wed, 28 Aug 2024 15:22:53 +0300 Subject: [PATCH 053/122] Fix right type on create_session --- src/aleph/sdk/client/vm_confidential_client.py | 6 +++--- tests/unit/test_vm_confidential_client.py | 14 +++++++++----- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/src/aleph/sdk/client/vm_confidential_client.py b/src/aleph/sdk/client/vm_confidential_client.py index 4a037d45..e027b384 100644 --- a/src/aleph/sdk/client/vm_confidential_client.py +++ b/src/aleph/sdk/client/vm_confidential_client.py @@ -56,7 +56,7 @@ async def get_certificates(self) -> Tuple[Optional[int], str]: return None, str(e) async def create_session( - self, vm_id: ItemHash, certificate_path: Path, policy: int + self, certificate_prefix: str, platform_certificate_path: Path, policy: int ) -> Path: """ Create new confidential session @@ -66,8 +66,8 @@ async def create_session( args = [ "session", "--name", - str(vm_id), - str(certificate_path), + certificate_prefix, + str(platform_certificate_path), str(policy), ] try: diff --git a/tests/unit/test_vm_confidential_client.py b/tests/unit/test_vm_confidential_client.py index 832871ff..6c5e01ed 100644 --- a/tests/unit/test_vm_confidential_client.py +++ b/tests/unit/test_vm_confidential_client.py @@ -186,10 +186,12 @@ async def test_confidential_inject_secret_instance(): @pytest.mark.asyncio async def test_create_session_command(): account = ETHAccount(private_key=b"0x" + b"1" * 30) - vm_id = ItemHash("cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe") node_url = "http://localhost" sevctl_path = Path("/usr/bin/sevctl") - certificates_path = Path("/") + certificate_prefix = ( + "cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe/vm" + ) + platform_certificate_path = Path("/") policy = 1 with mock.patch( @@ -202,14 +204,16 @@ async def test_create_session_command(): node_url=node_url, session=aiohttp.ClientSession(), ) - _ = await vm_client.create_session(vm_id, certificates_path, policy) + _ = await vm_client.create_session( + certificate_prefix, platform_certificate_path, policy + ) export_mock.assert_called_once_with( [ str(sevctl_path), "session", "--name", - str(vm_id), - str(certificates_path), + certificate_prefix, + str(platform_certificate_path), str(policy), ], check=True, From 1c16a1ba79d768070b979875e5cbc51871cddec9 Mon Sep 17 00:00:00 2001 From: philogicae Date: Wed, 28 Aug 2024 19:22:46 +0300 Subject: [PATCH 054/122] Upgrade to aleph-superfluid>=2.1.0 (#161) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3fce31f4..0f62338e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dependencies = [ "python-magic", "typing_extensions", "aioresponses>=0.7.6", - "superfluid~=0.2.1", + "aleph-superfluid>=0.2.1", "eth_typing==4.3.1", "web3==6.3.0", ] From 1bc6a4fac7f7178a83c82f0bc71e3c82bb099a30 Mon Sep 17 00:00:00 2001 From: philogicae Date: Tue, 3 Sep 2024 13:22:05 +0300 Subject: [PATCH 055/122] Dynamic Chain Settings and Environment Variable Support in SDK Configuration" (#162) Fix settings --- .env.example | 5 ++++ .gitignore | 4 +++ src/aleph/sdk/conf.py | 61 +++++++++++++++++++++++++++++++++++++++++-- 3 files changed, 68 insertions(+), 2 deletions(-) create mode 100644 .env.example diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..dbc6b016 --- /dev/null +++ b/.env.example @@ -0,0 +1,5 @@ +# To modify src/aleph/sdk/conf.py, create a .env file and add: +# ALEPH_= +# To modify active & rpc fields in CHAINS, follow this example: +# ALEPH_CHAINS_SEPOLIA_ACTIVE=True +# ALEPH_CHAINS_SEPOLIA_RPC=https://... \ No newline at end of file diff --git a/.gitignore b/.gitignore index c4734889..2896a4e6 100644 --- a/.gitignore +++ b/.gitignore @@ -49,4 +49,8 @@ MANIFEST .venv*/ **/device.key +# environment variables +.env +.env.local + .gitsigners diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 38afc381..4236370a 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -4,6 +4,7 @@ from typing import Dict, Optional, Union from aleph_message.models import Chain +from aleph_message.models.execution.environment import HypervisorType from pydantic import BaseSettings, Field from aleph.sdk.types import ChainInfo @@ -16,7 +17,7 @@ class Settings(BaseSettings): # do an ugly and insecure write and read from disk to this file. PRIVATE_KEY_FILE: Path = Field( default=Path("ethereum.key"), - description="Path to the private key used to sign messages", + description="Path to the private key used to sign messages and transactions", ) PRIVATE_MNEMONIC_FILE: Path = Field( @@ -31,16 +32,51 @@ class Settings(BaseSettings): REMOTE_CRYPTO_HOST: Optional[str] = None REMOTE_CRYPTO_UNIX_SOCKET: Optional[str] = None ADDRESS_TO_USE: Optional[str] = None + HTTP_REQUEST_TIMEOUT = 10.0 + DEFAULT_CHANNEL: str = "ALEPH-CLOUDSOLUTIONS" DEFAULT_RUNTIME_ID: str = ( - "f873715dc2feec3833074bd4b8745363a0e0093746b987b4c8191268883b2463" # Debian 12 official runtime + "63f07193e6ee9d207b7d1fcf8286f9aee34e6f12f101d2ec77c1229f92964696" ) + DEBIAN_11_ROOTFS_ID: str = ( + "887957042bb0e360da3485ed33175882ce72a70d79f1ba599400ff4802b7cee7" + ) + DEBIAN_12_ROOTFS_ID: str = ( + "6e30de68c6cedfa6b45240c2b51e52495ac6fb1bd4b36457b3d5ca307594d595" + ) + UBUNTU_22_ROOTFS_ID: str = ( + "77fef271aa6ff9825efa3186ca2e715d19e7108279b817201c69c34cedc74c27" + ) + DEBIAN_11_QEMU_ROOTFS_ID: str = ( + "f7e68c568906b4ebcd3cd3c4bfdff96c489cd2a9ef73ba2d7503f244dfd578de" + ) + DEBIAN_12_QEMU_ROOTFS_ID: str = ( + "b6ff5c3a8205d1ca4c7c3369300eeafff498b558f71b851aa2114afd0a532717" + ) + UBUNTU_22_QEMU_ROOTFS_ID: str = ( + "4a0f62da42f4478544616519e6f5d58adb1096e069b392b151d47c3609492d0c" + ) + + DEFAULT_CONFIDENTIAL_FIRMWARE: str = ( + "ba5bb13f3abca960b101a759be162b229e2b7e93ecad9d1307e54de887f177ff" + ) + DEFAULT_CONFIDENTIAL_FIRMWARE_HASH: str = ( + "89b76b0e64fe9015084fbffdf8ac98185bafc688bfe7a0b398585c392d03c7ee" + ) + + DEFAULT_ROOTFS_SIZE: int = 20_480 + DEFAULT_INSTANCE_MEMORY: int = 2_048 + DEFAULT_HYPERVISOR: HypervisorType = HypervisorType.qemu + DEFAULT_VM_MEMORY: int = 256 DEFAULT_VM_VCPUS: int = 1 DEFAULT_VM_TIMEOUT: float = 30.0 CODE_USES_SQUASHFS: bool = which("mksquashfs") is not None # True if command exists + VM_URL_PATH = "https://aleph.sh/vm/{hash}" + VM_URL_HOST = "https://{hash_base32}.aleph.sh" + # Web3Provider settings TOKEN_DECIMALS = 18 TX_TIMEOUT = 60 * 3 @@ -78,6 +114,17 @@ class Settings(BaseSettings): active=False, ), } + # Add all placeholders to allow easy dynamic setup of CHAINS + CHAINS_SEPOLIA_ACTIVE: Optional[bool] + CHAINS_ETH_ACTIVE: Optional[bool] + CHAINS_AVAX_ACTIVE: Optional[bool] + CHAINS_BASE_ACTIVE: Optional[bool] + CHAINS_BSC_ACTIVE: Optional[bool] + CHAINS_SEPOLIA_RPC: Optional[str] + CHAINS_ETH_RPC: Optional[str] + CHAINS_AVAX_RPC: Optional[str] + CHAINS_BASE_RPC: Optional[str] + CHAINS_BSC_RPC: Optional[str] # Dns resolver DNS_IPFS_DOMAIN = "ipfs.public.aleph.sh" @@ -115,3 +162,13 @@ class Config: settings.PRIVATE_MNEMONIC_FILE = Path( settings.CONFIG_HOME, "private-keys", "substrate.mnemonic" ) + +# Update CHAINS settings and remove placeholders +CHAINS_ENV = [(key[7:], value) for key, value in settings if key.startswith("CHAINS_")] +for fields, value in CHAINS_ENV: + if value: + chain, field = fields.split("_", 1) + chain = chain if chain not in Chain.__members__ else Chain[chain] + field = field.lower() + settings.CHAINS[chain].__dict__[field] = value + settings.__delattr__(f"CHAINS_{fields}") From c62fbb73230d432eab5f7842fc7a35c87d604d5d Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Tue, 3 Sep 2024 14:37:56 +0200 Subject: [PATCH 056/122] Feature: get_message_status and overload of get_message to return status (#163) * Feature: get_message_status and overload of get_message to return status * fixup! Feature: get_message_status and overload of get_message to return status * Fix after review --------- Co-authored-by: philogicae --- src/aleph/sdk/client/http.py | 45 +++++++++++++++++++++++++++++++++--- 1 file changed, 42 insertions(+), 3 deletions(-) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 7e8755b5..2c953d4e 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -4,11 +4,24 @@ import ssl from io import BytesIO from pathlib import Path -from typing import Any, AsyncIterable, Dict, Iterable, List, Optional, Type, Union +from typing import ( + Any, + AsyncIterable, + Dict, + Iterable, + List, + Optional, + Tuple, + Type, + Union, + overload, +) import aiohttp +from aiohttp.web import HTTPNotFound from aleph_message import parse_message from aleph_message.models import AlephMessage, ItemHash, ItemType +from aleph_message.status import MessageStatus from pydantic import ValidationError from ..conf import settings @@ -343,11 +356,27 @@ async def get_messages( pagination_item=response_json["pagination_item"], ) + @overload + async def get_message( + self, + item_hash: str, + message_type: Optional[Type[GenericMessage]] = None, + ) -> GenericMessage: ... + + @overload async def get_message( self, item_hash: str, message_type: Optional[Type[GenericMessage]] = None, - ) -> GenericMessage: + with_status: bool = False, + ) -> Tuple[GenericMessage, MessageStatus]: ... + + async def get_message( + self, + item_hash: str, + message_type: Optional[Type[GenericMessage]] = None, + with_status: bool = False, + ) -> Union[GenericMessage, Tuple[GenericMessage, MessageStatus]]: async with self.http_session.get(f"/api/v0/messages/{item_hash}") as resp: try: resp.raise_for_status() @@ -368,7 +397,10 @@ async def get_message( f"The message type '{message.type}' " f"does not match the expected type '{expected_type}'" ) - return message + if with_status: + return message, message_raw["status"] + else: + return message async def get_message_error( self, @@ -428,3 +460,10 @@ async def get_program_price(self, item_hash: str) -> PriceResponse: if e.status == 400: raise InvalidHashError(f"Bad request or no such hash {item_hash}") raise e + + async def get_message_status(self, item_hash: str) -> MessageStatus: + """return Status of a message""" + async with self.http_session.get(f"/api/v0/messages/{item_hash}") as resp: + if resp.status == HTTPNotFound.status_code: + raise MessageNotFoundError(f"No such hash {item_hash}") + resp.raise_for_status() From c00d36cfc75bba03637256f66626125cf5d9a826 Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Fri, 20 Sep 2024 03:19:13 +0200 Subject: [PATCH 057/122] chore: configure dependabot to auto update dependencies --- .github/dependabot.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..8ceeaf03 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: +- package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" From cf704625e1a03a41b3534f3d68fccaf23f1cc57a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 24 Sep 2024 15:30:53 +0000 Subject: [PATCH 058/122] Chore(deps): Bump ledgereth from 0.9.0 to 0.9.1 Bumps [ledgereth](https://github.com/mikeshultz/ledger-eth-lib) from 0.9.0 to 0.9.1. - [Release notes](https://github.com/mikeshultz/ledger-eth-lib/releases) - [Commits](https://github.com/mikeshultz/ledger-eth-lib/compare/v0.9.0...v0.9.1) --- updated-dependencies: - dependency-name: ledgereth dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0f62338e..2cffe116 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ docs = [ "sphinxcontrib-plantuml", ] ledger = [ - "ledgereth==0.9.0", + "ledgereth==0.9.1", ] mqtt = [ "aiomqtt<=0.1.3", From 2790df9d2a5e25a0a826a538fc214a0f3493b9e2 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Wed, 2 Oct 2024 12:25:33 +0200 Subject: [PATCH 059/122] Feature: Account Handler (#175) * Feature: Internal account management + fix on _load_account to handle SolAccount * fixup! Feature: Internal account management + fix on _load_account to handle SolAccount * Fix: chains_config wasn't using settings.CONFIG_HOME for locations * Fix: blakc issue * Fix: rename CHAINS_CONFIG_FILE to CONFIG_FILE to avoid getting issue by conf of chain * Fix: base58 and pynacl is now needed for build * Fix: f string without nay placeholders * Fix: black error * Refactor: we now store single account at the time * Fix: ruff issue * fix: debug stuff remove * Fix: Improve code structure in pair-programming with Lyam --------- Co-authored-by: Andres D. Molins --- .gitignore | 2 +- pyproject.toml | 2 + src/aleph/sdk/account.py | 52 ++++++++++++++---- src/aleph/sdk/chains/solana.py | 94 +++++++++++++++++++++++++++++++-- src/aleph/sdk/conf.py | 68 +++++++++++++++++++++++- tests/unit/test_chain_solana.py | 60 ++++++++++++++++++++- 6 files changed, 263 insertions(+), 15 deletions(-) diff --git a/.gitignore b/.gitignore index 2896a4e6..f18f4bd6 100644 --- a/.gitignore +++ b/.gitignore @@ -50,7 +50,7 @@ MANIFEST **/device.key # environment variables -.env +.config.json .env.local .gitsigners diff --git a/pyproject.toml b/pyproject.toml index 2cffe116..f533bfe2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,8 @@ dependencies = [ "aleph-superfluid>=0.2.1", "eth_typing==4.3.1", "web3==6.3.0", + "base58==2.1.1", # Needed now as default with _load_account changement + "pynacl==1.5.0" # Needed now as default with _load_account changement ] [project.optional-dependencies] diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 59eef815..8c067283 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -1,12 +1,15 @@ import asyncio import logging from pathlib import Path -from typing import Optional, Type, TypeVar +from typing import Dict, Optional, Type, TypeVar + +from aleph_message.models import Chain from aleph.sdk.chains.common import get_fallback_private_key from aleph.sdk.chains.ethereum import ETHAccount from aleph.sdk.chains.remote import RemoteAccount -from aleph.sdk.conf import settings +from aleph.sdk.chains.solana import SOLAccount +from aleph.sdk.conf import load_main_configuration, settings from aleph.sdk.types import AccountFromPrivateKey logger = logging.getLogger(__name__) @@ -14,6 +17,16 @@ T = TypeVar("T", bound=AccountFromPrivateKey) +def load_chain_account_type(chain: Chain) -> Type[AccountFromPrivateKey]: + chain_account_map: Dict[Chain, Type[AccountFromPrivateKey]] = { + Chain.ETH: ETHAccount, + Chain.AVAX: ETHAccount, + Chain.SOL: SOLAccount, + Chain.BASE: ETHAccount, + } + return chain_account_map.get(chain) or ETHAccount + + def account_from_hex_string(private_key_str: str, account_type: Type[T]) -> T: if private_key_str.startswith("0x"): private_key_str = private_key_str[2:] @@ -28,16 +41,36 @@ def account_from_file(private_key_path: Path, account_type: Type[T]) -> T: def _load_account( private_key_str: Optional[str] = None, private_key_path: Optional[Path] = None, - account_type: Type[AccountFromPrivateKey] = ETHAccount, + account_type: Optional[Type[AccountFromPrivateKey]] = None, ) -> AccountFromPrivateKey: """Load private key from a string or a file. takes the string argument in priority""" + if private_key_str or (private_key_path and private_key_path.is_file()): + if account_type: + if private_key_path and private_key_path.is_file(): + return account_from_file(private_key_path, account_type) + elif private_key_str: + return account_from_hex_string(private_key_str, account_type) + else: + raise ValueError("Any private key specified") + else: + main_configuration = load_main_configuration(settings.CONFIG_FILE) + if main_configuration: + account_type = load_chain_account_type(main_configuration.chain) + logger.debug( + f"Detected {main_configuration.chain} account for path {settings.CONFIG_FILE}" + ) + else: + account_type = ETHAccount # Defaults to ETHAccount + logger.warning( + f"No main configuration data found in {settings.CONFIG_FILE}, defaulting to {account_type.__name__}" + ) + if private_key_path and private_key_path.is_file(): + return account_from_file(private_key_path, account_type) + elif private_key_str: + return account_from_hex_string(private_key_str, account_type) + else: + raise ValueError("Any private key specified") - if private_key_str: - logger.debug("Using account from string") - return account_from_hex_string(private_key_str, account_type) - elif private_key_path and private_key_path.is_file(): - logger.debug("Using account from file") - return account_from_file(private_key_path, account_type) elif settings.REMOTE_CRYPTO_HOST: logger.debug("Using remote account") loop = asyncio.get_event_loop() @@ -48,6 +81,7 @@ def _load_account( ) ) else: + account_type = ETHAccount # Defaults to ETHAccount new_private_key = get_fallback_private_key() account = account_type(private_key=new_private_key) logger.info( diff --git a/src/aleph/sdk/chains/solana.py b/src/aleph/sdk/chains/solana.py index ff870a4d..a9352489 100644 --- a/src/aleph/sdk/chains/solana.py +++ b/src/aleph/sdk/chains/solana.py @@ -1,6 +1,6 @@ import json from pathlib import Path -from typing import Dict, Optional, Union +from typing import Dict, List, Optional, Union import base58 from nacl.exceptions import BadSignatureError as NaclBadSignatureError @@ -22,7 +22,7 @@ class SOLAccount(BaseAccount): _private_key: PrivateKey def __init__(self, private_key: bytes): - self.private_key = private_key + self.private_key = parse_private_key(private_key_from_bytes(private_key)) self._signing_key = SigningKey(self.private_key) self._private_key = self._signing_key.to_curve25519_private_key() @@ -79,7 +79,7 @@ def verify_signature( public_key: The public key to use for verification. Can be a base58 encoded string or bytes. message: The message to verify. Can be an utf-8 string or bytes. Raises: - BadSignatureError: If the signature is invalid. + BadSignatureError: If the signature is invalid.! """ if isinstance(signature, str): signature = base58.b58decode(signature) @@ -91,3 +91,91 @@ def verify_signature( VerifyKey(public_key).verify(message, signature) except NaclBadSignatureError as e: raise BadSignatureError from e + + +def private_key_from_bytes( + private_key_bytes: bytes, output_format: str = "base58" +) -> Union[str, List[int], bytes]: + """ + Convert a Solana private key in bytes back to different formats (base58 string, uint8 list, or raw bytes). + + - For base58 string: Encode the bytes into a base58 string. + - For uint8 list: Convert the bytes into a list of integers. + - For raw bytes: Return as-is. + + Args: + private_key_bytes (bytes): The private key in byte format. + output_format (str): The format to return ('base58', 'list', 'bytes'). + + Returns: + The private key in the requested format. + + Raises: + ValueError: If the output_format is not recognized or the private key length is invalid. + """ + if not isinstance(private_key_bytes, bytes): + raise ValueError("Expected the private key in bytes.") + + if len(private_key_bytes) != 32: + raise ValueError("Solana private key must be exactly 32 bytes long.") + + if output_format == "base58": + return base58.b58encode(private_key_bytes).decode("utf-8") + + elif output_format == "list": + return list(private_key_bytes) + + elif output_format == "bytes": + return private_key_bytes + + else: + raise ValueError("Invalid output format. Choose 'base58', 'list', or 'bytes'.") + + +def parse_private_key(private_key: Union[str, List[int], bytes]) -> bytes: + """ + Parse the private key which could be either: + - a base58-encoded string (which may contain both private and public key) + - a list of uint8 integers (which may contain both private and public key) + - a byte array (exactly 32 bytes) + + Returns: + bytes: The private key in byte format (32 bytes). + + Raises: + ValueError: If the private key format is invalid or the length is incorrect. + """ + # If the private key is already in byte format + if isinstance(private_key, bytes): + if len(private_key) != 32: + raise ValueError("The private key in bytes must be exactly 32 bytes long.") + return private_key + + # If the private key is a base58-encoded string + elif isinstance(private_key, str): + try: + decoded_key = base58.b58decode(private_key) + if len(decoded_key) not in [32, 64]: + raise ValueError( + "The base58 decoded private key must be either 32 or 64 bytes long." + ) + return decoded_key[:32] + except Exception as e: + raise ValueError(f"Invalid base58 encoded private key: {e}") + + # If the private key is a list of uint8 integers + elif isinstance(private_key, list): + if all(isinstance(i, int) and 0 <= i <= 255 for i in private_key): + byte_key = bytes(private_key) + if len(byte_key) < 32: + raise ValueError("The uint8 array must contain at least 32 elements.") + return byte_key[:32] # Take the first 32 bytes (private key) + else: + raise ValueError( + "Invalid uint8 array, must contain integers between 0 and 255." + ) + + else: + raise ValueError( + "Unsupported private key format. Must be a base58 string, bytes, or a list of uint8 integers." + ) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 4236370a..114652b7 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -1,3 +1,5 @@ +import json +import logging import os from pathlib import Path from shutil import which @@ -5,14 +7,21 @@ from aleph_message.models import Chain from aleph_message.models.execution.environment import HypervisorType -from pydantic import BaseSettings, Field +from pydantic import BaseModel, BaseSettings, Field from aleph.sdk.types import ChainInfo +logger = logging.getLogger(__name__) + class Settings(BaseSettings): CONFIG_HOME: Optional[str] = None + CONFIG_FILE: Path = Field( + default=Path("config.json"), + description="Path to the JSON file containing chain account configurations", + ) + # In case the user does not want to bother with handling private keys himself, # do an ugly and insecure write and read from disk to this file. PRIVATE_KEY_FILE: Path = Field( @@ -139,6 +148,18 @@ class Config: env_file = ".env" +class MainConfiguration(BaseModel): + """ + Intern Chain Management with Account. + """ + + path: Path + chain: Chain + + class Config: + use_enum_values = True + + # Settings singleton settings = Settings() @@ -162,6 +183,19 @@ class Config: settings.PRIVATE_MNEMONIC_FILE = Path( settings.CONFIG_HOME, "private-keys", "substrate.mnemonic" ) +if str(settings.CONFIG_FILE) == "config.json": + settings.CONFIG_FILE = Path(settings.CONFIG_HOME, "config.json") + # If Config file exist and well filled we update the PRIVATE_KEY_FILE default + if settings.CONFIG_FILE.exists(): + try: + with open(settings.CONFIG_FILE, "r", encoding="utf-8") as f: + config_data = json.load(f) + + if "path" in config_data: + settings.PRIVATE_KEY_FILE = Path(config_data["path"]) + except json.JSONDecodeError: + pass + # Update CHAINS settings and remove placeholders CHAINS_ENV = [(key[7:], value) for key, value in settings if key.startswith("CHAINS_")] @@ -172,3 +206,35 @@ class Config: field = field.lower() settings.CHAINS[chain].__dict__[field] = value settings.__delattr__(f"CHAINS_{fields}") + + +def save_main_configuration(file_path: Path, data: MainConfiguration): + """ + Synchronously save a single ChainAccount object as JSON to a file. + """ + with file_path.open("w") as file: + data_serializable = data.dict() + data_serializable["path"] = str(data_serializable["path"]) + json.dump(data_serializable, file, indent=4) + + +def load_main_configuration(file_path: Path) -> Optional[MainConfiguration]: + """ + Synchronously load the private key and chain type from a file. + If the file does not exist or is empty, return None. + """ + if not file_path.exists() or file_path.stat().st_size == 0: + logger.debug(f"File {file_path} does not exist or is empty. Returning None.") + return None + + try: + with file_path.open("rb") as file: + content = file.read() + data = json.loads(content.decode("utf-8")) + return MainConfiguration(**data) + except UnicodeDecodeError as e: + logger.error(f"Unable to decode {file_path} as UTF-8: {e}") + except json.JSONDecodeError: + logger.error(f"Invalid JSON format in {file_path}.") + + return None diff --git a/tests/unit/test_chain_solana.py b/tests/unit/test_chain_solana.py index ed2fff78..0fbd717e 100644 --- a/tests/unit/test_chain_solana.py +++ b/tests/unit/test_chain_solana.py @@ -8,7 +8,12 @@ from nacl.signing import VerifyKey from aleph.sdk.chains.common import get_verification_buffer -from aleph.sdk.chains.solana import SOLAccount, get_fallback_account, verify_signature +from aleph.sdk.chains.solana import ( + SOLAccount, + get_fallback_account, + parse_private_key, + verify_signature, +) from aleph.sdk.exceptions import BadSignatureError @@ -136,3 +141,56 @@ async def test_sign_raw(solana_account): assert isinstance(signature, bytes) verify_signature(signature, solana_account.get_address(), buffer) + + +def test_parse_solana_private_key_bytes(): + # Valid 32-byte private key + private_key_bytes = bytes(range(32)) + parsed_key = parse_private_key(private_key_bytes) + assert isinstance(parsed_key, bytes) + assert len(parsed_key) == 32 + assert parsed_key == private_key_bytes + + # Invalid private key (too short) + with pytest.raises( + ValueError, match="The private key in bytes must be exactly 32 bytes long." + ): + parse_private_key(bytes(range(31))) + + +def test_parse_solana_private_key_base58(): + # Valid base58 private key (32 bytes) + base58_key = base58.b58encode(bytes(range(32))).decode("utf-8") + parsed_key = parse_private_key(base58_key) + assert isinstance(parsed_key, bytes) + assert len(parsed_key) == 32 + + # Invalid base58 key (not decodable) + with pytest.raises(ValueError, match="Invalid base58 encoded private key"): + parse_private_key("invalid_base58_key") + + # Invalid base58 key (wrong length) + with pytest.raises( + ValueError, + match="The base58 decoded private key must be either 32 or 64 bytes long.", + ): + parse_private_key(base58.b58encode(bytes(range(31))).decode("utf-8")) + + +def test_parse_solana_private_key_list(): + # Valid list of uint8 integers (64 elements, but we only take the first 32 for private key) + uint8_list = list(range(64)) + parsed_key = parse_private_key(uint8_list) + assert isinstance(parsed_key, bytes) + assert len(parsed_key) == 32 + assert parsed_key == bytes(range(32)) + + # Invalid list (contains non-integers) + with pytest.raises(ValueError, match="Invalid uint8 array"): + parse_private_key([1, 2, "not an int", 4]) # type: ignore # Ignore type check for string + + # Invalid list (less than 32 elements) + with pytest.raises( + ValueError, match="The uint8 array must contain at least 32 elements." + ): + parse_private_key(list(range(31))) From 1f08567367a4b00adb0a53873adf7ea3b47cc0a6 Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Wed, 25 Sep 2024 23:18:08 +0200 Subject: [PATCH 060/122] feat: add pyproject-fmt --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index f533bfe2..0ae14ebf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -153,6 +153,7 @@ dependencies = [ "mypy-extensions==1.0.0", "ruff==0.4.8", "isort==5.13.2", + "pyproject-fmt==2.2.1", ] [tool.hatch.envs.linting.scripts] typing = "mypy --config-file=pyproject.toml {args:} ./src/ ./tests/ ./examples/" @@ -160,11 +161,13 @@ style = [ "ruff check {args:.} ./src/ ./tests/ ./examples/", "black --check --diff {args:} ./src/ ./tests/ ./examples/", "isort --check-only --profile black {args:} ./src/ ./tests/ ./examples/", + "pyproject-fmt --check pyproject.toml", ] fmt = [ "black {args:} ./src/ ./tests/ ./examples/", "ruff check --fix {args:.} ./src/ ./tests/ ./examples/", "isort --profile black {args:} ./src/ ./tests/ ./examples/", + "pyproject-fmt pyproject.toml", "style", ] all = [ From a636106ea2d4abb1d21107a8e73dc8404cf1a9f4 Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Wed, 25 Sep 2024 23:19:03 +0200 Subject: [PATCH 061/122] fix: run pyproject-fmt --- pyproject.toml | 320 +++++++++++++++++++++++++------------------------ 1 file changed, 161 insertions(+), 159 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 0ae14ebf..409694cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,137 +1,137 @@ [build-system] -requires = ["hatchling", "hatch-vcs"] build-backend = "hatchling.build" +requires = [ "hatch-vcs", "hatchling" ] + [project] name = "aleph-sdk-python" -dynamic = ["version"] description = "Lightweight Python Client library for the Aleph.im network" readme = "README.md" license = { file = "LICENSE.txt" } authors = [ - { name = "Aleph.im Team", email = "hello@aleph.im" }, + { name = "Aleph.im Team", email = "hello@aleph.im" }, ] classifiers = [ - "Programming Language :: Python :: 3", - "Development Status :: 4 - Beta", - "Framework :: aiohttp", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: POSIX :: Linux", - "Operating System :: MacOS :: MacOS X", - "Topic :: Software Development :: Libraries", -] + "Development Status :: 4 - Beta", + "Framework :: aiohttp", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: MacOS :: MacOS X", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Software Development :: Libraries", +] +dynamic = [ "version" ] dependencies = [ - "aiohttp>=3.8.3", - "aleph-message>=0.4.9", - "coincurve; python_version<\"3.11\"", - "coincurve>=19.0.0; python_version>=\"3.11\"", - "eth_abi>=4.0.0; python_version>=\"3.11\"", - "jwcrypto==1.5.6", - "python-magic", - "typing_extensions", - "aioresponses>=0.7.6", - "aleph-superfluid>=0.2.1", - "eth_typing==4.3.1", - "web3==6.3.0", - "base58==2.1.1", # Needed now as default with _load_account changement - "pynacl==1.5.0" # Needed now as default with _load_account changement -] - -[project.optional-dependencies] -cosmos = [ - "cosmospy", -] -dns = [ - "aiodns", -] -docs = [ - "sphinxcontrib-plantuml", -] -ledger = [ - "ledgereth==0.9.1", -] -mqtt = [ - "aiomqtt<=0.1.3", - "certifi", - "Click", -] -nuls2 = [ - "aleph-nuls2", -] -substrate = [ - "py-sr25519-bindings", - "substrate-interface", -] -solana = [ - "base58", - "pynacl", -] -tezos = [ - "aleph-pytezos==0.1.1", - "pynacl", -] -encryption = [ - "eciespy; python_version<\"3.11\"", - "eciespy>=0.3.13; python_version>=\"3.11\"", + "aiohttp>=3.8.3", + "aioresponses>=0.7.6", + "aleph-message>=0.4.9", + "aleph-superfluid>=0.2.1", + "base58==2.1.1", # Needed now as default with _load_account changement + "coincurve; python_version<'3.11'", + "coincurve>=19; python_version>='3.11'", + "eth-abi>=4; python_version>='3.11'", + "eth-typing==4.3.1", + "jwcrypto==1.5.6", + "pynacl==1.5", # Needed now as default with _load_account changement + "python-magic", + "typing-extensions", + "web3==6.3", ] -all = [ - "aleph-sdk-python[cosmos,dns,docs,ledger,mqtt,nuls2,substrate,solana,tezos,encryption]", + +optional-dependencies.all = [ + "aleph-sdk-python[cosmos,dns,docs,ledger,mqtt,nuls2,substrate,solana,tezos,encryption]", +] +optional-dependencies.cosmos = [ + "cosmospy", +] +optional-dependencies.dns = [ + "aiodns", +] +optional-dependencies.docs = [ + "sphinxcontrib-plantuml", +] +optional-dependencies.encryption = [ + "eciespy; python_version<'3.11'", + "eciespy>=0.3.13; python_version>='3.11'", ] +optional-dependencies.ledger = [ + "ledgereth==0.9.1", +] +optional-dependencies.mqtt = [ + "aiomqtt<=0.1.3", + "certifi", + "click", +] +optional-dependencies.nuls2 = [ + "aleph-nuls2", +] +optional-dependencies.solana = [ + "base58", + "pynacl", +] +optional-dependencies.substrate = [ + "py-sr25519-bindings", + "substrate-interface", +] +optional-dependencies.tezos = [ + "aleph-pytezos==0.1.1", + "pynacl", +] +urls.Documentation = "https://aleph.im/" +urls.Homepage = "https://github.com/aleph-im/aleph-sdk-python" [tool.hatch.metadata] allow-direct-references = true -[project.urls] -Documentation = "https://aleph.im/" -Homepage = "https://github.com/aleph-im/aleph-sdk-python" - [tool.hatch.version] source = "vcs" [tool.hatch.build.targets.wheel] packages = [ - "src/aleph", - "pyproject.toml", - "README.md", - "LICENSE.txt", + "src/aleph", + "pyproject.toml", + "README.md", + "LICENSE.txt", ] [tool.hatch.build.targets.sdist] include = [ - "src/aleph", - "pyproject.toml", - "README.md", - "LICENSE.txt", + "src/aleph", + "pyproject.toml", + "README.md", + "LICENSE.txt", ] -[tool.isort] -profile = "black" - [[tool.hatch.envs.all.matrix]] -python = ["3.9", "3.10", "3.11"] +python = [ "3.9", "3.10", "3.11" ] [tool.hatch.envs.testing] features = [ - "cosmos", - "dns", - "ledger", - "nuls2", - "substrate", - "solana", - "tezos", - "encryption", + "cosmos", + "dns", + "ledger", + "nuls2", + "substrate", + "solana", + "tezos", + "encryption", ] dependencies = [ - "pytest==8.0.1", - "pytest-cov==4.1.0", - "pytest-mock==3.12.0", - "pytest-asyncio==0.23.5", - "pytest-aiohttp==1.0.5", - "aioresponses==0.7.6", - "fastapi", - "httpx", - "secp256k1", + "pytest==8.0.1", + "pytest-cov==4.1.0", + "pytest-mock==3.12.0", + "pytest-asyncio==0.23.5", + "pytest-aiohttp==1.0.5", + "aioresponses==0.7.6", + "fastapi", + "httpx", + "secp256k1", ] [tool.hatch.envs.testing.scripts] test = "pytest {args:} ./src/ ./tests/ ./examples/" @@ -148,101 +148,103 @@ cov = [ [tool.hatch.envs.linting] detached = true dependencies = [ - "black==24.2.0", - "mypy==1.9.0", - "mypy-extensions==1.0.0", - "ruff==0.4.8", - "isort==5.13.2", - "pyproject-fmt==2.2.1", + "black==24.2.0", + "mypy==1.9.0", + "mypy-extensions==1.0.0", + "ruff==0.4.8", + "isort==5.13.2", + "pyproject-fmt==2.2.1", ] [tool.hatch.envs.linting.scripts] typing = "mypy --config-file=pyproject.toml {args:} ./src/ ./tests/ ./examples/" style = [ - "ruff check {args:.} ./src/ ./tests/ ./examples/", - "black --check --diff {args:} ./src/ ./tests/ ./examples/", - "isort --check-only --profile black {args:} ./src/ ./tests/ ./examples/", - "pyproject-fmt --check pyproject.toml", + "ruff check {args:.} ./src/ ./tests/ ./examples/", + "black --check --diff {args:} ./src/ ./tests/ ./examples/", + "isort --check-only --profile black {args:} ./src/ ./tests/ ./examples/", + "pyproject-fmt --check pyproject.toml", ] fmt = [ - "black {args:} ./src/ ./tests/ ./examples/", - "ruff check --fix {args:.} ./src/ ./tests/ ./examples/", - "isort --profile black {args:} ./src/ ./tests/ ./examples/", - "pyproject-fmt pyproject.toml", - "style", + "black {args:} ./src/ ./tests/ ./examples/", + "ruff check --fix {args:.} ./src/ ./tests/ ./examples/", + "isort --profile black {args:} ./src/ ./tests/ ./examples/", + "pyproject-fmt pyproject.toml", + "style", ] all = [ - "style", - "typing", -] - -[tool.mypy] -python_version = 3.9 -mypy_path = "src" -exclude = [ - "conftest.py" + "style", + "typing", ] -show_column_numbers = true -check_untyped_defs = true - -# Import discovery -# Install types for third-party library stubs (e.g. from typeshed repository) -install_types = true -non_interactive = true -# Suppresses error messages about imports that cannot be resolved (no py.typed file, no stub file, etc). -ignore_missing_imports = true -# Don't follow imports -follow_imports = "silent" - -# Miscellaneous strictness flags -# Allows variables to be redefined with an arbitrary type, as long as the redefinition is in the same block and nesting level as the original definition. -allow_redefinition = true +[tool.isort] +profile = "black" [tool.pytest.ini_options] minversion = "6.0" -pythonpath = ["src"] +pythonpath = [ "src" ] addopts = "-vv -m \"not ledger_hardware\"" -norecursedirs = ["*.egg", "dist", "build", ".tox", ".venv", "*/site-packages/*"] -testpaths = ["tests/unit"] -markers = {ledger_hardware = "marks tests as requiring ledger hardware"} +norecursedirs = [ "*.egg", "dist", "build", ".tox", ".venv", "*/site-packages/*" ] +testpaths = [ "tests/unit" ] +markers = { ledger_hardware = "marks tests as requiring ledger hardware" } [tool.coverage.run] branch = true parallel = true source = [ - "src/", + "src/", ] omit = [ - "*/site-packages/*", + "*/site-packages/*", ] [tool.coverage.paths] source = [ - "src/", + "src/", ] omit = [ - "*/site-packages/*", + "*/site-packages/*", ] [tool.coverage.report] show_missing = true skip_empty = true exclude_lines = [ - # Have to re-enable the standard pragma - "pragma: no cover", + # Have to re-enable the standard pragma + "pragma: no cover", - # Don't complain about missing debug-only code: - "def __repr__", - "if self\\.debug", + # Don't complain about missing debug-only code: + "def __repr__", + "if self\\.debug", - # Don't complain if tests don't hit defensive assertion code: - "raise AssertionError", - "raise NotImplementedError", + # Don't complain if tests don't hit defensive assertion code: + "raise AssertionError", + "raise NotImplementedError", - # Don't complain if non-runnable code isn't run: - "if 0:", - "if __name__ == .__main__.:", + # Don't complain if non-runnable code isn't run: + "if 0:", + "if __name__ == .__main__.:", + + # Don't complain about ineffective code: + "pass", +] - # Don't complain about ineffective code: - "pass", +[tool.mypy] +python_version = 3.9 +mypy_path = "src" +exclude = [ + "conftest.py", ] +show_column_numbers = true +check_untyped_defs = true + +# Import discovery +# Install types for third-party library stubs (e.g. from typeshed repository) +install_types = true +non_interactive = true +# Suppresses error messages about imports that cannot be resolved (no py.typed file, no stub file, etc). +ignore_missing_imports = true +# Don't follow imports +follow_imports = "silent" + +# Miscellaneous strictness flags +# Allows variables to be redefined with an arbitrary type, as long as the redefinition is in the same block and nesting level as the original definition. +allow_redefinition = true From d54e9ac0bb2d814a5f87b1135fd1017108c6ee93 Mon Sep 17 00:00:00 2001 From: philogicae Date: Fri, 11 Oct 2024 20:01:12 +0300 Subject: [PATCH 062/122] Post-SOL fixes (#178) * Missing chain field on auth * Fix Signature of Solana operation for CRN * Add export_private_key func for accounts * Improve _load_account * Add chain arg to _load_account * Increase default HTTP_REQUEST_TIMEOUT * Typing --------- Co-authored-by: Olivier Le Thanh Duong --- src/aleph/sdk/account.py | 108 +++++++++++++++++++----------- src/aleph/sdk/chains/ethereum.py | 5 ++ src/aleph/sdk/chains/solana.py | 6 ++ src/aleph/sdk/client/vm_client.py | 17 +++-- src/aleph/sdk/conf.py | 2 +- src/aleph/sdk/types.py | 4 ++ 6 files changed, 97 insertions(+), 45 deletions(-) diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 8c067283..9bfafcd3 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -10,67 +10,95 @@ from aleph.sdk.chains.remote import RemoteAccount from aleph.sdk.chains.solana import SOLAccount from aleph.sdk.conf import load_main_configuration, settings +from aleph.sdk.evm_utils import get_chains_with_super_token from aleph.sdk.types import AccountFromPrivateKey logger = logging.getLogger(__name__) T = TypeVar("T", bound=AccountFromPrivateKey) +chain_account_map: Dict[Chain, Type[T]] = { # type: ignore + Chain.ETH: ETHAccount, + Chain.AVAX: ETHAccount, + Chain.BASE: ETHAccount, + Chain.SOL: SOLAccount, +} + def load_chain_account_type(chain: Chain) -> Type[AccountFromPrivateKey]: - chain_account_map: Dict[Chain, Type[AccountFromPrivateKey]] = { - Chain.ETH: ETHAccount, - Chain.AVAX: ETHAccount, - Chain.SOL: SOLAccount, - Chain.BASE: ETHAccount, - } - return chain_account_map.get(chain) or ETHAccount + return chain_account_map.get(chain) or ETHAccount # type: ignore -def account_from_hex_string(private_key_str: str, account_type: Type[T]) -> T: +def account_from_hex_string( + private_key_str: str, + account_type: Optional[Type[T]], + chain: Optional[Chain] = None, +) -> AccountFromPrivateKey: if private_key_str.startswith("0x"): private_key_str = private_key_str[2:] - return account_type(bytes.fromhex(private_key_str)) + if not chain: + if not account_type: + account_type = load_chain_account_type(Chain.ETH) # type: ignore + return account_type(bytes.fromhex(private_key_str)) # type: ignore + + account_type = load_chain_account_type(chain) + account = account_type(bytes.fromhex(private_key_str)) + if chain in get_chains_with_super_token(): + account.switch_chain(chain) + return account # type: ignore -def account_from_file(private_key_path: Path, account_type: Type[T]) -> T: + +def account_from_file( + private_key_path: Path, + account_type: Optional[Type[T]], + chain: Optional[Chain] = None, +) -> AccountFromPrivateKey: private_key = private_key_path.read_bytes() - return account_type(private_key) + + if not chain: + if not account_type: + account_type = load_chain_account_type(Chain.ETH) # type: ignore + return account_type(private_key) # type: ignore + + account_type = load_chain_account_type(chain) + account = account_type(private_key) + if chain in get_chains_with_super_token(): + account.switch_chain(chain) + return account def _load_account( private_key_str: Optional[str] = None, private_key_path: Optional[Path] = None, account_type: Optional[Type[AccountFromPrivateKey]] = None, + chain: Optional[Chain] = None, ) -> AccountFromPrivateKey: - """Load private key from a string or a file. takes the string argument in priority""" - if private_key_str or (private_key_path and private_key_path.is_file()): - if account_type: - if private_key_path and private_key_path.is_file(): - return account_from_file(private_key_path, account_type) - elif private_key_str: - return account_from_hex_string(private_key_str, account_type) - else: - raise ValueError("Any private key specified") + """Load an account from a private key string or file, or from the configuration file.""" + + # Loads configuration if no account_type is specified + if not account_type: + config = load_main_configuration(settings.CONFIG_FILE) + if config and hasattr(config, "chain"): + account_type = load_chain_account_type(config.chain) + logger.debug( + f"Detected {config.chain} account for path {settings.CONFIG_FILE}" + ) else: - main_configuration = load_main_configuration(settings.CONFIG_FILE) - if main_configuration: - account_type = load_chain_account_type(main_configuration.chain) - logger.debug( - f"Detected {main_configuration.chain} account for path {settings.CONFIG_FILE}" - ) - else: - account_type = ETHAccount # Defaults to ETHAccount - logger.warning( - f"No main configuration data found in {settings.CONFIG_FILE}, defaulting to {account_type.__name__}" - ) - if private_key_path and private_key_path.is_file(): - return account_from_file(private_key_path, account_type) - elif private_key_str: - return account_from_hex_string(private_key_str, account_type) - else: - raise ValueError("Any private key specified") + account_type = account_type = load_chain_account_type( + Chain.ETH + ) # Defaults to ETHAccount + logger.warning( + f"No main configuration data found in {settings.CONFIG_FILE}, defaulting to {account_type and account_type.__name__}" + ) + # Loads private key from a string + if private_key_str: + return account_from_hex_string(private_key_str, account_type, chain) + # Loads private key from a file + elif private_key_path and private_key_path.is_file(): + return account_from_file(private_key_path, account_type, chain) + # For ledger keys elif settings.REMOTE_CRYPTO_HOST: logger.debug("Using remote account") loop = asyncio.get_event_loop() @@ -80,10 +108,12 @@ def _load_account( unix_socket=settings.REMOTE_CRYPTO_UNIX_SOCKET, ) ) + # Fallback: config.path if set, else generate a new private key else: - account_type = ETHAccount # Defaults to ETHAccount new_private_key = get_fallback_private_key() - account = account_type(private_key=new_private_key) + account = account_from_hex_string( + bytes.hex(new_private_key), account_type, chain + ) logger.info( f"Generated fallback private key with address {account.get_address()}" ) diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index 32f459b7..ab93df56 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -1,4 +1,5 @@ import asyncio +import base64 from decimal import Decimal from pathlib import Path from typing import Awaitable, Optional, Union @@ -61,6 +62,10 @@ def from_mnemonic(mnemonic: str, chain: Optional[Chain] = None) -> "ETHAccount": private_key=Account.from_mnemonic(mnemonic=mnemonic).key, chain=chain ) + def export_private_key(self) -> str: + """Export the private key using standard format.""" + return f"0x{base64.b16encode(self.private_key).decode().lower()}" + def get_address(self) -> str: return self._account.address diff --git a/src/aleph/sdk/chains/solana.py b/src/aleph/sdk/chains/solana.py index a9352489..920ca8a0 100644 --- a/src/aleph/sdk/chains/solana.py +++ b/src/aleph/sdk/chains/solana.py @@ -43,6 +43,12 @@ async def sign_raw(self, buffer: bytes) -> bytes: sig = self._signing_key.sign(buffer) return sig.signature + def export_private_key(self) -> str: + """Export the private key using Phantom format.""" + return base58.b58encode( + self.private_key + self._signing_key.verify_key.encode() + ).decode() + def get_address(self) -> str: return encode(self._signing_key.verify_key) diff --git a/src/aleph/sdk/client/vm_client.py b/src/aleph/sdk/client/vm_client.py index 18d280cc..83b00dc9 100644 --- a/src/aleph/sdk/client/vm_client.py +++ b/src/aleph/sdk/client/vm_client.py @@ -5,10 +5,11 @@ from urllib.parse import urlparse import aiohttp -from aleph_message.models import ItemHash +from aleph_message.models import Chain, ItemHash from eth_account.messages import encode_defunct from jwcrypto import jwk +from aleph.sdk.chains.solana import SOLAccount from aleph.sdk.types import Account from aleph.sdk.utils import ( create_vm_control_payload, @@ -36,11 +37,13 @@ def __init__( self.account = account self.ephemeral_key = jwk.JWK.generate(kty="EC", crv="P-256") self.node_url = node_url.rstrip("/") - self.pubkey_payload = self._generate_pubkey_payload() + self.pubkey_payload = self._generate_pubkey_payload( + Chain.SOL if isinstance(account, SOLAccount) else Chain.ETH + ) self.pubkey_signature_header = "" self.session = session or aiohttp.ClientSession() - def _generate_pubkey_payload(self) -> Dict[str, Any]: + def _generate_pubkey_payload(self, chain: Chain = Chain.ETH) -> Dict[str, Any]: return { "pubkey": json.loads(self.ephemeral_key.export_public()), "alg": "ECDSA", @@ -50,12 +53,16 @@ def _generate_pubkey_payload(self) -> Dict[str, Any]: datetime.datetime.utcnow() + datetime.timedelta(days=1) ).isoformat() + "Z", + "chain": chain.value, } async def _generate_pubkey_signature_header(self) -> str: pubkey_payload = json.dumps(self.pubkey_payload).encode("utf-8").hex() - signable_message = encode_defunct(hexstr=pubkey_payload) - buffer_to_sign = signable_message.body + if isinstance(self.account, SOLAccount): + buffer_to_sign = bytes(pubkey_payload, encoding="utf-8") + else: + signable_message = encode_defunct(hexstr=pubkey_payload) + buffer_to_sign = signable_message.body signed_message = await self.account.sign_raw(buffer_to_sign) pubkey_signature = to_0x_hex(signed_message) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 114652b7..5fe4cd4b 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -41,7 +41,7 @@ class Settings(BaseSettings): REMOTE_CRYPTO_HOST: Optional[str] = None REMOTE_CRYPTO_UNIX_SOCKET: Optional[str] = None ADDRESS_TO_USE: Optional[str] = None - HTTP_REQUEST_TIMEOUT = 10.0 + HTTP_REQUEST_TIMEOUT = 15.0 DEFAULT_CHANNEL: str = "ALEPH-CLOUDSOLUTIONS" DEFAULT_RUNTIME_ID: str = ( diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 081a3465..dab90379 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -39,6 +39,10 @@ def __init__(self, private_key: bytes): ... async def sign_raw(self, buffer: bytes) -> bytes: ... + def export_private_key(self) -> str: ... + + def switch_chain(self, chain: Optional[str] = None) -> None: ... + GenericMessage = TypeVar("GenericMessage", bound=AlephMessage) From c24a3cf0e8f1aeabef4d5e96907be1991e65b141 Mon Sep 17 00:00:00 2001 From: nesitor Date: Mon, 4 Nov 2024 17:44:20 +0100 Subject: [PATCH 063/122] Implement new EVM chains (#182) * Feature: Implemented new EVM chains. * Fix: Added chain argument on initialization. * Fix: Remove venv folder to keep on track. * Fix: Added chain auto-loading if it's not defined. * Fix: Added chain auto-loading by default configuration if the user don't request it explicitly. * Fix: Solve issue with str chain value * Fix: Solve typing issue passing the chain argument. * Fix: Disable temporarily the chain field change to test it deeply. * Fix: Update to already released aleph_message dependency. * Fix: Removed build action for macos-12 as it's deprecated on GitHub actions. --------- Co-authored-by: Andres D. Molins --- .github/workflows/build-wheels.yml | 2 +- .gitignore | 1 + pyproject.toml | 2 +- src/aleph/sdk/account.py | 51 ++++++++++++++++-------- src/aleph/sdk/chains/evm.py | 48 +++++++++++++++++++++++ src/aleph/sdk/conf.py | 62 ++++++++++++++++++++++++++++-- src/aleph/sdk/evm_utils.py | 8 +++- src/aleph/sdk/types.py | 6 +-- 8 files changed, 153 insertions(+), 27 deletions(-) create mode 100644 src/aleph/sdk/chains/evm.py diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 4e32a239..61074b57 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-12, macos-13, macos-14, ubuntu-22.04, ubuntu-24.04] + os: [macos-13, macos-14, ubuntu-22.04, ubuntu-24.04] runs-on: ${{ matrix.os }} steps: diff --git a/.gitignore b/.gitignore index f18f4bd6..5ab655ca 100644 --- a/.gitignore +++ b/.gitignore @@ -47,6 +47,7 @@ MANIFEST # Per-project virtualenvs .venv*/ +venv/* **/device.key # environment variables diff --git a/pyproject.toml b/pyproject.toml index 409694cf..d6644265 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=0.4.9", + "aleph-message>=0.5", "aleph-superfluid>=0.2.1", "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version<'3.11'", diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 9bfafcd3..0bf54201 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -7,8 +7,10 @@ from aleph.sdk.chains.common import get_fallback_private_key from aleph.sdk.chains.ethereum import ETHAccount +from aleph.sdk.chains.evm import EVMAccount from aleph.sdk.chains.remote import RemoteAccount from aleph.sdk.chains.solana import SOLAccount +from aleph.sdk.chains.substrate import DOTAccount from aleph.sdk.conf import load_main_configuration, settings from aleph.sdk.evm_utils import get_chains_with_super_token from aleph.sdk.types import AccountFromPrivateKey @@ -18,10 +20,24 @@ T = TypeVar("T", bound=AccountFromPrivateKey) chain_account_map: Dict[Chain, Type[T]] = { # type: ignore - Chain.ETH: ETHAccount, + Chain.ARBITRUM: EVMAccount, Chain.AVAX: ETHAccount, Chain.BASE: ETHAccount, + Chain.BLAST: EVMAccount, + Chain.BOB: EVMAccount, + Chain.CYBER: EVMAccount, + Chain.DOT: DOTAccount, + Chain.ETH: ETHAccount, + Chain.FRAXTAL: EVMAccount, + Chain.LINEA: EVMAccount, + Chain.LISK: EVMAccount, + Chain.METIS: EVMAccount, + Chain.MODE: EVMAccount, + Chain.OPTIMISM: EVMAccount, + Chain.POL: EVMAccount, Chain.SOL: SOLAccount, + Chain.WORLDCHAIN: EVMAccount, + Chain.ZORA: EVMAccount, } @@ -43,7 +59,7 @@ def account_from_hex_string( return account_type(bytes.fromhex(private_key_str)) # type: ignore account_type = load_chain_account_type(chain) - account = account_type(bytes.fromhex(private_key_str)) + account = account_type(bytes.fromhex(private_key_str), chain) if chain in get_chains_with_super_token(): account.switch_chain(chain) return account # type: ignore @@ -62,7 +78,7 @@ def account_from_file( return account_type(private_key) # type: ignore account_type = load_chain_account_type(chain) - account = account_type(private_key) + account = account_type(private_key, chain) if chain in get_chains_with_super_token(): account.switch_chain(chain) return account @@ -76,28 +92,29 @@ def _load_account( ) -> AccountFromPrivateKey: """Load an account from a private key string or file, or from the configuration file.""" - # Loads configuration if no account_type is specified - if not account_type: - config = load_main_configuration(settings.CONFIG_FILE) + config = load_main_configuration(settings.CONFIG_FILE) + chain_to_use = settings.DEFAULT_CHAIN + + if not chain: if config and hasattr(config, "chain"): - account_type = load_chain_account_type(config.chain) + chain_to_use = config.chain logger.debug( f"Detected {config.chain} account for path {settings.CONFIG_FILE}" ) - else: - account_type = account_type = load_chain_account_type( - Chain.ETH - ) # Defaults to ETHAccount - logger.warning( - f"No main configuration data found in {settings.CONFIG_FILE}, defaulting to {account_type and account_type.__name__}" - ) + + # Loads configuration if no account_type is specified + if not account_type: + account_type = load_chain_account_type(chain_to_use) + logger.warning( + f"No main configuration data found in {settings.CONFIG_FILE}, defaulting to {account_type and account_type.__name__}" + ) # Loads private key from a string if private_key_str: - return account_from_hex_string(private_key_str, account_type, chain) + return account_from_hex_string(private_key_str, account_type, chain_to_use) # Loads private key from a file elif private_key_path and private_key_path.is_file(): - return account_from_file(private_key_path, account_type, chain) + return account_from_file(private_key_path, account_type, chain_to_use) # For ledger keys elif settings.REMOTE_CRYPTO_HOST: logger.debug("Using remote account") @@ -112,7 +129,7 @@ def _load_account( else: new_private_key = get_fallback_private_key() account = account_from_hex_string( - bytes.hex(new_private_key), account_type, chain + bytes.hex(new_private_key), account_type, chain_to_use ) logger.info( f"Generated fallback private key with address {account.get_address()}" diff --git a/src/aleph/sdk/chains/evm.py b/src/aleph/sdk/chains/evm.py new file mode 100644 index 00000000..5bf66ef1 --- /dev/null +++ b/src/aleph/sdk/chains/evm.py @@ -0,0 +1,48 @@ +from decimal import Decimal +from pathlib import Path +from typing import Awaitable, Optional + +from aleph_message.models import Chain +from eth_account import Account # type: ignore + +from .common import get_fallback_private_key +from .ethereum import ETHAccount + + +class EVMAccount(ETHAccount): + def __init__(self, private_key: bytes, chain: Optional[Chain] = None): + super().__init__(private_key, chain) + # Decide if we have to send also the specified chain value or always use ETH + # if chain: + # self.CHAIN = chain + + @staticmethod + def from_mnemonic(mnemonic: str, chain: Optional[Chain] = None) -> "EVMAccount": + Account.enable_unaudited_hdwallet_features() + return EVMAccount( + private_key=Account.from_mnemonic(mnemonic=mnemonic).key, chain=chain + ) + + def get_token_balance(self) -> Decimal: + raise ValueError(f"Token not implemented for this chain {self.CHAIN}") + + def get_super_token_balance(self) -> Decimal: + raise ValueError(f"Super token not implemented for this chain {self.CHAIN}") + + def create_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: + raise ValueError(f"Flow creation not implemented for this chain {self.CHAIN}") + + def get_flow(self, receiver: str): + raise ValueError(f"Get flow not implemented for this chain {self.CHAIN}") + + def update_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: + raise ValueError(f"Flow update not implemented for this chain {self.CHAIN}") + + def delete_flow(self, receiver: str) -> Awaitable[str]: + raise ValueError(f"Flow deletion not implemented for this chain {self.CHAIN}") + + +def get_fallback_account( + path: Optional[Path] = None, chain: Optional[Chain] = None +) -> ETHAccount: + return ETHAccount(private_key=get_fallback_private_key(path=path), chain=chain) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 5fe4cd4b..846d82ab 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -99,10 +99,9 @@ class Settings(BaseSettings): active=False, ), # MAINNETS - Chain.ETH: ChainInfo( - chain_id=1, - rpc="https://eth-mainnet.public.blastapi.io", - token="0x27702a26126e0B3702af63Ee09aC4d1A084EF628", + Chain.ARBITRUM: ChainInfo( + chain_id=42161, + rpc="https://arbitrum-one.publicnode.com", ), Chain.AVAX: ChainInfo( chain_id=43114, @@ -116,12 +115,65 @@ class Settings(BaseSettings): token="0xc0Fbc4967259786C743361a5885ef49380473dCF", super_token="0xc0Fbc4967259786C743361a5885ef49380473dCF", ), + Chain.BLAST: ChainInfo( + chain_id=81457, + rpc="https://blastl2-mainnet.public.blastapi.io", + ), + Chain.BOB: ChainInfo( + chain_id=60808, + rpc="https://bob-mainnet.public.blastapi.io", + ), Chain.BSC: ChainInfo( chain_id=56, rpc="https://binance.llamarpc.com", token="0x82D2f8E02Afb160Dd5A480a617692e62de9038C4", active=False, ), + Chain.CYBER: ChainInfo( + chain_id=7560, + rpc="https://rpc.cyber.co", + ), + Chain.ETH: ChainInfo( + chain_id=1, + rpc="https://eth-mainnet.public.blastapi.io", + token="0x27702a26126e0B3702af63Ee09aC4d1A084EF628", + ), + Chain.FRAXTAL: ChainInfo( + chain_id=252, + rpc="https://rpc.frax.com", + ), + Chain.LINEA: ChainInfo( + chain_id=59144, + rpc="https://linea-rpc.publicnode.com", + ), + Chain.LISK: ChainInfo( + chain_id=1135, + rpc="https://rpc.api.lisk.com", + ), + Chain.METIS: ChainInfo( + chain_id=1088, + rpc="https://metis.drpc.org", + ), + Chain.MODE: ChainInfo( + chain_id=34443, + rpc="https://mode.drpc.org", + ), + Chain.OPTIMISM: ChainInfo( + chain_id=10, + rpc="https://optimism-rpc.publicnode.com", + ), + Chain.POL: ChainInfo( + chain_id=137, + rpc="https://polygon.gateway.tenderly.co", + ), + Chain.WORLDCHAIN: ChainInfo( + chain_id=480, + rpc="https://worldchain-mainnet.gateway.tenderly.co", + ), + Chain.ZORA: ChainInfo( + chain_id=7777777, + rpc="https://rpc.zora.energy/", + ), } # Add all placeholders to allow easy dynamic setup of CHAINS CHAINS_SEPOLIA_ACTIVE: Optional[bool] @@ -135,6 +187,8 @@ class Settings(BaseSettings): CHAINS_BASE_RPC: Optional[str] CHAINS_BSC_RPC: Optional[str] + DEFAULT_CHAIN: Chain = Chain.ETH + # Dns resolver DNS_IPFS_DOMAIN = "ipfs.public.aleph.sh" DNS_PROGRAM_DOMAIN = "program.public.aleph.sh" diff --git a/src/aleph/sdk/evm_utils.py b/src/aleph/sdk/evm_utils.py index c7166cec..4d2026ef 100644 --- a/src/aleph/sdk/evm_utils.py +++ b/src/aleph/sdk/evm_utils.py @@ -79,10 +79,16 @@ def get_super_token_address( return None -def get_chains_with_holding() -> List[Union[Chain, str]]: +def get_compatible_chains() -> List[Union[Chain, str]]: return [chain for chain, info in settings.CHAINS.items() if info.active] +def get_chains_with_holding() -> List[Union[Chain, str]]: + return [ + chain for chain, info in settings.CHAINS.items() if info.active and info.token + ] + + def get_chains_with_super_token() -> List[Union[Chain, str]]: return [ chain diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index dab90379..8f70dbd8 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -6,7 +6,7 @@ __all__ = ("StorageEnum", "Account", "AccountFromPrivateKey", "GenericMessage") -from aleph_message.models import AlephMessage +from aleph_message.models import AlephMessage, Chain class StorageEnum(str, Enum): @@ -35,7 +35,7 @@ def get_public_key(self) -> str: ... class AccountFromPrivateKey(Account, Protocol): """Only accounts that are initialized from a private key string are supported.""" - def __init__(self, private_key: bytes): ... + def __init__(self, private_key: bytes, chain: Chain): ... async def sign_raw(self, buffer: bytes) -> bytes: ... @@ -77,6 +77,6 @@ class ChainInfo(BaseModel): chain_id: int rpc: str - token: str + token: Optional[str] = None super_token: Optional[str] = None active: bool = True From a1c73139a59233899b55f5d7aa452b0c51257001 Mon Sep 17 00:00:00 2001 From: nesitor Date: Fri, 15 Nov 2024 17:37:03 +0700 Subject: [PATCH 064/122] Fix wrong account config alert (#184) --- src/aleph/sdk/account.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 0bf54201..09ecb6a5 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -93,28 +93,35 @@ def _load_account( """Load an account from a private key string or file, or from the configuration file.""" config = load_main_configuration(settings.CONFIG_FILE) - chain_to_use = settings.DEFAULT_CHAIN + default_chain = settings.DEFAULT_CHAIN if not chain: if config and hasattr(config, "chain"): - chain_to_use = config.chain + chain = config.chain logger.debug( f"Detected {config.chain} account for path {settings.CONFIG_FILE}" ) + else: + chain = default_chain + logger.warning( + f"No main configuration found on path {settings.CONFIG_FILE}, defaulting to {chain}" + ) + else: + chain = default_chain # Loads configuration if no account_type is specified if not account_type: - account_type = load_chain_account_type(chain_to_use) - logger.warning( - f"No main configuration data found in {settings.CONFIG_FILE}, defaulting to {account_type and account_type.__name__}" + account_type = load_chain_account_type(chain) + logger.debug( + f"No account type specified defaulting to {account_type and account_type.__name__}" ) # Loads private key from a string if private_key_str: - return account_from_hex_string(private_key_str, account_type, chain_to_use) + return account_from_hex_string(private_key_str, account_type, chain) # Loads private key from a file elif private_key_path and private_key_path.is_file(): - return account_from_file(private_key_path, account_type, chain_to_use) + return account_from_file(private_key_path, account_type, chain) # For ledger keys elif settings.REMOTE_CRYPTO_HOST: logger.debug("Using remote account") @@ -129,7 +136,7 @@ def _load_account( else: new_private_key = get_fallback_private_key() account = account_from_hex_string( - bytes.hex(new_private_key), account_type, chain_to_use + bytes.hex(new_private_key), account_type, chain ) logger.info( f"Generated fallback private key with address {account.get_address()}" From dfd81afb2c31ed4cb4d2d8008b68fd9865184524 Mon Sep 17 00:00:00 2001 From: philogicae Date: Tue, 26 Nov 2024 14:33:12 +0200 Subject: [PATCH 065/122] Fix wrong chain account loading + refactoring (#189) --- src/aleph/sdk/account.py | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 09ecb6a5..872ee3c4 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -54,12 +54,14 @@ def account_from_hex_string( private_key_str = private_key_str[2:] if not chain: - if not account_type: - account_type = load_chain_account_type(Chain.ETH) # type: ignore - return account_type(bytes.fromhex(private_key_str)) # type: ignore + chain = settings.DEFAULT_CHAIN + if not account_type: + account_type = load_chain_account_type(chain) # type: ignore + account = account_type( + bytes.fromhex(private_key_str), + **({"chain": chain} if type(account_type) in [ETHAccount, EVMAccount] else {}), + ) # type: ignore - account_type = load_chain_account_type(chain) - account = account_type(bytes.fromhex(private_key_str), chain) if chain in get_chains_with_super_token(): account.switch_chain(chain) return account # type: ignore @@ -73,12 +75,14 @@ def account_from_file( private_key = private_key_path.read_bytes() if not chain: - if not account_type: - account_type = load_chain_account_type(Chain.ETH) # type: ignore - return account_type(private_key) # type: ignore + chain = settings.DEFAULT_CHAIN + if not account_type: + account_type = load_chain_account_type(chain) # type: ignore + account = account_type( + private_key, + **({"chain": chain} if type(account_type) in [ETHAccount, EVMAccount] else {}), + ) # type: ignore - account_type = load_chain_account_type(chain) - account = account_type(private_key, chain) if chain in get_chains_with_super_token(): account.switch_chain(chain) return account @@ -106,8 +110,6 @@ def _load_account( logger.warning( f"No main configuration found on path {settings.CONFIG_FILE}, defaulting to {chain}" ) - else: - chain = default_chain # Loads configuration if no account_type is specified if not account_type: From 4242aa3e341f8e18e8401f991e5df9253cbd3c50 Mon Sep 17 00:00:00 2001 From: philogicae Date: Tue, 26 Nov 2024 14:34:14 +0200 Subject: [PATCH 066/122] Add missing env var placeholders for new EVM chains (#188) --- src/aleph/sdk/conf.py | 47 ++++++++++++++++++++++++++++++++++--------- 1 file changed, 37 insertions(+), 10 deletions(-) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 846d82ab..f81b60df 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -176,16 +176,43 @@ class Settings(BaseSettings): ), } # Add all placeholders to allow easy dynamic setup of CHAINS - CHAINS_SEPOLIA_ACTIVE: Optional[bool] - CHAINS_ETH_ACTIVE: Optional[bool] - CHAINS_AVAX_ACTIVE: Optional[bool] - CHAINS_BASE_ACTIVE: Optional[bool] - CHAINS_BSC_ACTIVE: Optional[bool] - CHAINS_SEPOLIA_RPC: Optional[str] - CHAINS_ETH_RPC: Optional[str] - CHAINS_AVAX_RPC: Optional[str] - CHAINS_BASE_RPC: Optional[str] - CHAINS_BSC_RPC: Optional[str] + CHAINS_SEPOLIA_ACTIVE: Optional[bool] = None + CHAINS_ETH_ACTIVE: Optional[bool] = None + CHAINS_AVAX_ACTIVE: Optional[bool] = None + CHAINS_BASE_ACTIVE: Optional[bool] = None + CHAINS_BSC_ACTIVE: Optional[bool] = None + CHAINS_ARBITRUM_ACTIVE: Optional[bool] = None + CHAINS_BLAST_ACTIVE: Optional[bool] = None + CHAINS_BOB_ACTIVE: Optional[bool] = None + CHAINS_CYBER_ACTIVE: Optional[bool] = None + CHAINS_FRAXTAL_ACTIVE: Optional[bool] = None + CHAINS_LINEA_ACTIVE: Optional[bool] = None + CHAINS_LISK_ACTIVE: Optional[bool] = None + CHAINS_METIS_ACTIVE: Optional[bool] = None + CHAINS_MODE_ACTIVE: Optional[bool] = None + CHAINS_OPTIMISM_ACTIVE: Optional[bool] = None + CHAINS_POL_ACTIVE: Optional[bool] = None + CHAINS_WORLDCHAIN_ACTIVE: Optional[bool] = None + CHAINS_ZORA_ACTIVE: Optional[bool] = None + + CHAINS_SEPOLIA_RPC: Optional[str] = None + CHAINS_ETH_RPC: Optional[str] = None + CHAINS_AVAX_RPC: Optional[str] = None + CHAINS_BASE_RPC: Optional[str] = None + CHAINS_BSC_RPC: Optional[str] = None + CHAINS_ARBITRUM_RPC: Optional[str] = None + CHAINS_BLAST_RPC: Optional[str] = None + CHAINS_BOB_RPC: Optional[str] = None + CHAINS_CYBER_RPC: Optional[str] = None + CHAINS_FRAXTAL_RPC: Optional[str] = None + CHAINS_LINEA_RPC: Optional[str] = None + CHAINS_LISK_RPC: Optional[str] = None + CHAINS_METIS_RPC: Optional[str] = None + CHAINS_MODE_RPC: Optional[str] = None + CHAINS_OPTIMISM_RPC: Optional[str] = None + CHAINS_POL_RPC: Optional[str] = None + CHAINS_WORLDCHAIN_RPC: Optional[str] = None + CHAINS_ZORA_RPC: Optional[str] = None DEFAULT_CHAIN: Chain = Chain.ETH From abdb7607ec34b3a7332567bb7b75cd4c1dc0435d Mon Sep 17 00:00:00 2001 From: "Alie.E" Date: Tue, 26 Nov 2024 13:35:22 +0100 Subject: [PATCH 067/122] Fix get_message_status #186 (#187) --- src/aleph/sdk/client/http.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 2c953d4e..7a2c7cca 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -467,3 +467,5 @@ async def get_message_status(self, item_hash: str) -> MessageStatus: if resp.status == HTTPNotFound.status_code: raise MessageNotFoundError(f"No such hash {item_hash}") resp.raise_for_status() + result = await resp.json() + return MessageStatus(result["status"]) From fe1823c1aacbc1b1571cde54bf0f363c3687ba3b Mon Sep 17 00:00:00 2001 From: nesitor Date: Mon, 9 Dec 2024 18:29:07 +0100 Subject: [PATCH 068/122] Implement GPU support (#191) * Problem: If a user wants to deploy a VM using the GPU support, he cannot do it from the SDK because it's not updated with latest aleph_message protocol. Solution: Update aleph_message protocol with last changes. * Fix: Put released version of `aleph-message` package. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d6644265..cfddfdb9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=0.5", + "aleph-message>=0.6", "aleph-superfluid>=0.2.1", "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version<'3.11'", From 63fec9a59abe1f2659ed1fd00da16997810afc24 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 03:40:34 +0000 Subject: [PATCH 069/122] Chore(deps): Bump ledgereth from 0.9.1 to 0.10.0 Bumps [ledgereth](https://github.com/mikeshultz/ledger-eth-lib) from 0.9.1 to 0.10.0. - [Release notes](https://github.com/mikeshultz/ledger-eth-lib/releases) - [Commits](https://github.com/mikeshultz/ledger-eth-lib/compare/v0.9.1...v0.10.0) --- updated-dependencies: - dependency-name: ledgereth dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index cfddfdb9..ca59ef79 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,7 +61,7 @@ optional-dependencies.encryption = [ "eciespy>=0.3.13; python_version>='3.11'", ] optional-dependencies.ledger = [ - "ledgereth==0.9.1", + "ledgereth==0.10", ] optional-dependencies.mqtt = [ "aiomqtt<=0.1.3", From 1e2eebe46f249a5990fe3203752bdbc578c52a22 Mon Sep 17 00:00:00 2001 From: Olivier Le Thanh Duong Date: Fri, 13 Dec 2024 10:55:33 +0100 Subject: [PATCH 070/122] Add VmClient.operate (#194) Variation on perform_operation that allow a raw response. Move from the aleph-client code introduced in https://github.com/aleph-im/aleph-client/pull/304 --- src/aleph/sdk/client/vm_client.py | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/src/aleph/sdk/client/vm_client.py b/src/aleph/sdk/client/vm_client.py index 83b00dc9..f41c6ac1 100644 --- a/src/aleph/sdk/client/vm_client.py +++ b/src/aleph/sdk/client/vm_client.py @@ -5,6 +5,7 @@ from urllib.parse import urlparse import aiohttp +from aiohttp.client import _RequestContextManager from aleph_message.models import Chain, ItemHash from eth_account.messages import encode_defunct from jwcrypto import jwk @@ -127,6 +128,32 @@ async def perform_operation( logger.error(f"HTTP error during operation {operation}: {str(e)}") return None, str(e) + def operate( + self, vm_id: ItemHash, operation: str, method: str = "POST" + ) -> _RequestContextManager: + """Request a CRN an operation for a VM (eg reboot, logs) + + This operation is authenticated via the user wallet. + Use as an async context manager. + e.g `async with client.operate(vm_id=item_hash, operation="logs", method="GET") as response:` + """ + + async def authenticated_request(): + if not self.pubkey_signature_header: + self.pubkey_signature_header = ( + await self._generate_pubkey_signature_header() + ) + + url, header = await self._generate_header( + vm_id=vm_id, operation=operation, method=method + ) + resp = await self.session._request( + method=method, str_or_url=url, headers=header + ) + return resp + + return _RequestContextManager(authenticated_request()) + async def get_logs(self, vm_id: ItemHash) -> AsyncGenerator[str, None]: if not self.pubkey_signature_header: self.pubkey_signature_header = ( From 859cbd086ad26e1fc57b907252599a0df32d276f Mon Sep 17 00:00:00 2001 From: Reza Rahemtola <49811529+RezaRahemtola@users.noreply.github.com> Date: Mon, 9 Dec 2024 06:12:03 +0100 Subject: [PATCH 071/122] fix(create_instance): Default memory in docstring --- src/aleph/sdk/client/abstract.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 5f1bd942..025aae6a 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -430,7 +430,7 @@ async def create_instance( :param channel: Channel to use (Default: "TEST") :param address: Address to use (Default: account.get_address()) :param sync: If true, waits for the message to be processed by the API server - :param memory: Memory in MB for the VM to be allocated (Default: 128) + :param memory: Memory in MB for the VM to be allocated (Default: 2048) :param vcpus: Number of vCPUs to allocate (Default: 1) :param timeout_seconds: Timeout in seconds (Default: 30.0) :param allow_amend: Whether the deployed VM image may be changed (Default: False) From 964020c1909c268dfb596ba4fc01494ea1f38aa7 Mon Sep 17 00:00:00 2001 From: philogicae Date: Thu, 19 Dec 2024 11:09:35 +0200 Subject: [PATCH 072/122] Fix for Terms & Conditions (#193) * Add IPFS Gateway to conf * Move safe_getattr from CLI to SDK * Add get_stored_content --- src/aleph/sdk/client/http.py | 38 ++++++++++++++++++++++++++++++++++-- src/aleph/sdk/conf.py | 1 + src/aleph/sdk/types.py | 7 +++++++ src/aleph/sdk/utils.py | 8 ++++++++ 4 files changed, 52 insertions(+), 2 deletions(-) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 7a2c7cca..4b42f08a 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -20,7 +20,7 @@ import aiohttp from aiohttp.web import HTTPNotFound from aleph_message import parse_message -from aleph_message.models import AlephMessage, ItemHash, ItemType +from aleph_message.models import AlephMessage, ItemHash, ItemType, MessageType from aleph_message.status import MessageStatus from pydantic import ValidationError @@ -33,13 +33,14 @@ ) from ..query.filters import MessageFilter, PostFilter from ..query.responses import MessagesResponse, Post, PostsResponse, PriceResponse -from ..types import GenericMessage +from ..types import GenericMessage, StoredContent from ..utils import ( Writable, check_unix_socket_valid, copy_async_readable_to_buffer, extended_json_encoder, get_message_type_value, + safe_getattr, ) from .abstract import AlephClient @@ -469,3 +470,36 @@ async def get_message_status(self, item_hash: str) -> MessageStatus: resp.raise_for_status() result = await resp.json() return MessageStatus(result["status"]) + + async def get_stored_content( + self, + item_hash: str, + ) -> StoredContent: + """return the underlying content for a store message""" + + result, resp = None, None + try: + message: AlephMessage + message, status = await self.get_message( + item_hash=ItemHash(item_hash), with_status=True + ) + if status != MessageStatus.PROCESSED: + resp = f"Invalid message status: {status}" + elif message.type != MessageType.store: + resp = f"Invalid message type: {message.type}" + elif not message.content.item_hash: + resp = f"Invalid CID: {message.content.item_hash}" + else: + filename = safe_getattr(message.content, "metadata.name") + hash = message.content.item_hash + url = ( + f"{self.api_server}/api/v0/storage/raw/" + if len(hash) == 64 + else settings.IPFS_GATEWAY + ) + hash + result = StoredContent(filename=filename, hash=hash, url=url) + except MessageNotFoundError: + resp = f"Message not found: {item_hash}" + except ForgottenMessageError: + resp = f"Message forgotten: {item_hash}" + return result if result else StoredContent(error=resp) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index f81b60df..4dc7c9e7 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -85,6 +85,7 @@ class Settings(BaseSettings): VM_URL_PATH = "https://aleph.sh/vm/{hash}" VM_URL_HOST = "https://{hash_base32}.aleph.sh" + IPFS_GATEWAY = "https://ipfs.aleph.cloud/ipfs/" # Web3Provider settings TOKEN_DECIMALS = 18 diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 8f70dbd8..c698da5d 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -80,3 +80,10 @@ class ChainInfo(BaseModel): token: Optional[str] = None super_token: Optional[str] = None active: bool = True + + +class StoredContent(BaseModel): + filename: Optional[str] + hash: Optional[str] + url: Optional[str] + error: Optional[str] diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index 116c7b42..c3fc154a 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -393,3 +393,11 @@ def make_packet_header( header[20:52] = h.digest() return header + + +def safe_getattr(obj, attr, default=None): + for part in attr.split("."): + obj = getattr(obj, part, default) + if obj is default: + break + return obj From 7e832e2f5c0ec27b98885d51c61ca2a7c9975222 Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Wed, 14 Aug 2024 15:57:41 +0200 Subject: [PATCH 073/122] feat: upgrade to our latest version of pytezos --- pyproject.toml | 2 +- tests/unit/test_chain_tezos.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ca59ef79..3fd02d17 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,7 @@ optional-dependencies.substrate = [ "substrate-interface", ] optional-dependencies.tezos = [ - "aleph-pytezos==0.1.1", + "aleph-pytezos==3.13.4", "pynacl", ] urls.Documentation = "https://aleph.im/" diff --git a/tests/unit/test_chain_tezos.py b/tests/unit/test_chain_tezos.py index 0beaffc9..96e52ca3 100644 --- a/tests/unit/test_chain_tezos.py +++ b/tests/unit/test_chain_tezos.py @@ -31,7 +31,7 @@ async def test_tezos_account(tezos_account: TezosAccount): message = Message("TEZOS", tezos_account.get_address(), "SomeType", "ItemHash") signed = await tezos_account.sign_message(asdict(message)) assert signed["signature"] - assert len(signed["signature"]) == 188 + assert len(signed["signature"]) == 187 address = tezos_account.get_address() assert address is not None @@ -40,7 +40,7 @@ async def test_tezos_account(tezos_account: TezosAccount): pubkey = tezos_account.get_public_key() assert isinstance(pubkey, str) - assert len(pubkey) == 55 + assert len(pubkey) == 54 @pytest.mark.asyncio From 7b04ee5e702d015b944c395816ffd174d17a337e Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Wed, 14 Aug 2024 16:50:04 +0200 Subject: [PATCH 074/122] ci: use python 3.12 as the reference version for tests --- .github/workflows/build-wheels.yml | 5 ++--- .github/workflows/pytest.yml | 10 +++------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 61074b57..97d03fc1 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -27,7 +27,7 @@ jobs: if: startsWith(matrix.os, 'macos') uses: actions/setup-python@v2 with: - python-version: 3.11 + python-version: 3.12 - name: Cache dependencies uses: actions/cache@v4 @@ -64,9 +64,8 @@ jobs: run: | /tmp/venv/bin/python3 -m pip install dist/aleph_sdk_python-*.whl - - name: Install `setuptools` on systems where it is missing by default + - name: Install/upgrade `setuptools` run: /tmp/venv/bin/python3 -m pip install --upgrade setuptools - if: matrix.os == 'ubuntu-24.04' - name: Import and use the package run: | diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index b4fecc57..18322137 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -16,10 +16,9 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.9", "3.10", "3.11" ] - # An issue with secp256k1 prevents Python 3.12 from working - # See https://github.com/baking-bad/pytezos/issues/370 - runs-on: ubuntu-latest + python-version: [ "3.9", "3.10", "3.11", "3.12" ] + os: [ubuntu-22.04, ubuntu-24.04] + runs-on: ${{ matrix.os }} steps: - uses: actions/checkout@v4 @@ -37,13 +36,10 @@ jobs: /tmp/venv/bin/pip freeze /tmp/venv/bin/hatch run testing:pip freeze /tmp/venv/bin/hatch run testing:test - if: matrix.python-version != '3.11' - run: /tmp/venv/bin/hatch run testing:cov - if: matrix.python-version == '3.11' - uses: codecov/codecov-action@v4.0.1 - if: matrix.python-version == '3.11' with: token: ${{ secrets.CODECOV_TOKEN }} slug: aleph-im/aleph-sdk-python From 5fd68b4698a75267eb1b4c705b1685b637de741f Mon Sep 17 00:00:00 2001 From: Laurent Peuch Date: Thu, 24 Oct 2024 22:03:11 +0200 Subject: [PATCH 075/122] feat: run pytest on ubuntu 22.04 and 24.04 --- .github/workflows/pytest.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 18322137..f1af47c5 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -26,7 +26,9 @@ jobs: with: python-version: ${{ matrix.python-version }} - - run: sudo apt-get install -y python3-pip libsecp256k1-dev + - run: | + sudo apt-get update + sudo apt-get install -y python3-pip libsodium-dev - run: | python3 -m venv /tmp/venv From e4ce90ef2ce120737c4c49c6b5fec63eec048974 Mon Sep 17 00:00:00 2001 From: philogicae Date: Tue, 18 Feb 2025 19:05:00 +0200 Subject: [PATCH 076/122] Changes for new pricing system (#199) - Move/improve flow code parts from CLI to SDK - Add utils functions - Add `make_instance_content` and `make_program_content` - Refactor `create_instance` and `create_program` - Add `get_estimated_price` - Fixes for mypy/ruff/pytest - Minor improvements - Remove firecracker rootfs hashes for instances --- src/aleph/sdk/chains/ethereum.py | 26 ++- src/aleph/sdk/chains/evm.py | 9 + src/aleph/sdk/client/abstract.py | 80 ++++--- src/aleph/sdk/client/authenticated_http.py | 237 +++++++++------------ src/aleph/sdk/client/http.py | 77 ++++++- src/aleph/sdk/conf.py | 20 +- src/aleph/sdk/connectors/superfluid.py | 65 +++++- src/aleph/sdk/evm_utils.py | 23 +- src/aleph/sdk/exceptions.py | 11 +- src/aleph/sdk/types.py | 13 ++ src/aleph/sdk/utils.py | 212 +++++++++++++++++- tests/unit/aleph_vm_authentication.py | 2 +- tests/unit/test_asynchronous.py | 10 +- tests/unit/test_price.py | 12 +- tests/unit/test_superfluid.py | 13 ++ tests/unit/test_utils.py | 10 +- 16 files changed, 586 insertions(+), 234 deletions(-) diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index ab93df56..c185d174 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -15,6 +15,7 @@ from web3.types import TxParams, TxReceipt from aleph.sdk.exceptions import InsufficientFundsError +from aleph.sdk.types import TokenType from ..conf import settings from ..connectors.superfluid import Superfluid @@ -22,12 +23,13 @@ BALANCEOF_ABI, MIN_ETH_BALANCE, MIN_ETH_BALANCE_WEI, + FlowUpdate, + from_wei_token, get_chain_id, get_chains_with_super_token, get_rpc, get_super_token_address, get_token_address, - to_human_readable_token, ) from ..exceptions import BadSignatureError from ..utils import bytes_from_hex @@ -106,8 +108,9 @@ def can_transact(self, block=True) -> bool: valid = balance > MIN_ETH_BALANCE_WEI if self.chain else False if not valid and block: raise InsufficientFundsError( + token_type=TokenType.GAS, required_funds=MIN_ETH_BALANCE, - available_funds=to_human_readable_token(balance), + available_funds=float(from_wei_token(balance)), ) return valid @@ -162,6 +165,12 @@ def get_super_token_balance(self) -> Decimal: return Decimal(contract.functions.balanceOf(self.get_address()).call()) return Decimal(0) + def can_start_flow(self, flow: Decimal) -> bool: + """Check if the account has enough funds to start a Superfluid flow of the given size.""" + if not self.superfluid_connector: + raise ValueError("Superfluid connector is required to check a flow") + return self.superfluid_connector.can_start_flow(flow) + def create_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: """Creat a Superfluid flow between this account and the receiver address.""" if not self.superfluid_connector: @@ -188,6 +197,19 @@ def delete_flow(self, receiver: str) -> Awaitable[str]: raise ValueError("Superfluid connector is required to delete a flow") return self.superfluid_connector.delete_flow(receiver=receiver) + def manage_flow( + self, + receiver: str, + flow: Decimal, + update_type: FlowUpdate, + ) -> Awaitable[Optional[str]]: + """Manage the Superfluid flow between this account and the receiver address.""" + if not self.superfluid_connector: + raise ValueError("Superfluid connector is required to manage a flow") + return self.superfluid_connector.manage_flow( + receiver=receiver, flow=flow, update_type=update_type + ) + def get_fallback_account( path: Optional[Path] = None, chain: Optional[Chain] = None diff --git a/src/aleph/sdk/chains/evm.py b/src/aleph/sdk/chains/evm.py index 5bf66ef1..a5eeed84 100644 --- a/src/aleph/sdk/chains/evm.py +++ b/src/aleph/sdk/chains/evm.py @@ -5,6 +5,7 @@ from aleph_message.models import Chain from eth_account import Account # type: ignore +from ..evm_utils import FlowUpdate from .common import get_fallback_private_key from .ethereum import ETHAccount @@ -29,6 +30,9 @@ def get_token_balance(self) -> Decimal: def get_super_token_balance(self) -> Decimal: raise ValueError(f"Super token not implemented for this chain {self.CHAIN}") + def can_start_flow(self, flow: Decimal) -> bool: + raise ValueError(f"Flow checking not implemented for this chain {self.CHAIN}") + def create_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: raise ValueError(f"Flow creation not implemented for this chain {self.CHAIN}") @@ -41,6 +45,11 @@ def update_flow(self, receiver: str, flow: Decimal) -> Awaitable[str]: def delete_flow(self, receiver: str) -> Awaitable[str]: raise ValueError(f"Flow deletion not implemented for this chain {self.CHAIN}") + def manage_flow( + self, receiver: str, flow: Decimal, update_type: FlowUpdate + ) -> Awaitable[Optional[str]]: + raise ValueError(f"Flow management not implemented for this chain {self.CHAIN}") + def get_fallback_account( path: Optional[Path] = None, chain: Optional[Chain] = None diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 025aae6a..7f9fed8e 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -20,9 +20,9 @@ from aleph_message.models import ( AlephMessage, + ExecutableContent, ItemHash, ItemType, - MessagesResponse, MessageType, Payment, PostMessage, @@ -41,7 +41,7 @@ from aleph.sdk.utils import extended_json_encoder from ..query.filters import MessageFilter, PostFilter -from ..query.responses import PostsResponse, PriceResponse +from ..query.responses import MessagesResponse, PostsResponse, PriceResponse from ..types import GenericMessage, StorageEnum from ..utils import Writable, compute_sha256 @@ -110,7 +110,7 @@ async def get_posts_iterator( ) page += 1 for post in resp.posts: - yield post + yield post # type: ignore @abstractmethod async def download_file(self, file_hash: str) -> bytes: @@ -242,6 +242,18 @@ def watch_messages( """ raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + @abstractmethod + def get_estimated_price( + self, + content: ExecutableContent, + ) -> Coroutine[Any, Any, PriceResponse]: + """ + Get Instance/Program content estimated price + + :param content: Instance or Program content + """ + raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + @abstractmethod def get_program_price( self, @@ -265,7 +277,7 @@ async def create_post( post_type: str, ref: Optional[str] = None, address: Optional[str] = None, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, inline: bool = True, storage_engine: StorageEnum = StorageEnum.storage, sync: bool = False, @@ -290,9 +302,9 @@ async def create_post( async def create_aggregate( self, key: str, - content: Mapping[str, Any], + content: dict[str, Any], address: Optional[str] = None, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, inline: bool = True, sync: bool = False, ) -> Tuple[AlephMessage, MessageStatus]: @@ -302,7 +314,7 @@ async def create_aggregate( :param key: Key to use to store the content :param content: Content to store :param address: Address to use to sign the message - :param channel: Channel to use (Default: "TEST") + :param channel: Channel to use (Default: "ALEPH-CLOUDSOLUTIONS") :param inline: Whether to write content inside the message (Default: True) :param sync: If true, waits for the message to be processed by the API server (Default: False) """ @@ -321,7 +333,7 @@ async def create_store( ref: Optional[str] = None, storage_engine: StorageEnum = StorageEnum.storage, extra_fields: Optional[dict] = None, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, sync: bool = False, ) -> Tuple[AlephMessage, MessageStatus]: """ @@ -350,22 +362,22 @@ async def create_program( program_ref: str, entrypoint: str, runtime: str, - environment_variables: Optional[Mapping[str, str]] = None, - storage_engine: StorageEnum = StorageEnum.storage, - channel: Optional[str] = None, + metadata: Optional[dict[str, Any]] = None, address: Optional[str] = None, - sync: bool = False, - memory: Optional[int] = None, vcpus: Optional[int] = None, + memory: Optional[int] = None, timeout_seconds: Optional[float] = None, - persistent: bool = False, - allow_amend: bool = False, internet: bool = True, + allow_amend: bool = False, aleph_api: bool = True, encoding: Encoding = Encoding.zip, + persistent: bool = False, volumes: Optional[List[Mapping]] = None, - subscriptions: Optional[List[Mapping]] = None, - metadata: Optional[Mapping[str, Any]] = None, + environment_variables: Optional[dict[str, str]] = None, + subscriptions: Optional[List[dict]] = None, + sync: bool = False, + channel: Optional[str] = settings.DEFAULT_CHANNEL, + storage_engine: StorageEnum = StorageEnum.storage, ) -> Tuple[AlephMessage, MessageStatus]: """ Post a (create) PROGRAM message. @@ -373,22 +385,22 @@ async def create_program( :param program_ref: Reference to the program to run :param entrypoint: Entrypoint to run :param runtime: Runtime to use - :param environment_variables: Environment variables to pass to the program - :param storage_engine: Storage engine to use (Default: "storage") - :param channel: Channel to use (Default: "TEST") + :param metadata: Metadata to attach to the message :param address: Address to use (Default: account.get_address()) - :param sync: If true, waits for the message to be processed by the API server - :param memory: Memory in MB for the VM to be allocated (Default: 128) :param vcpus: Number of vCPUs to allocate (Default: 1) + :param memory: Memory in MB for the VM to be allocated (Default: 128) :param timeout_seconds: Timeout in seconds (Default: 30.0) - :param persistent: Whether the program should be persistent or not (Default: False) - :param allow_amend: Whether the deployed VM image may be changed (Default: False) :param internet: Whether the VM should have internet connectivity. (Default: True) + :param allow_amend: Whether the deployed VM image may be changed (Default: False) :param aleph_api: Whether the VM needs access to Aleph messages API (Default: True) :param encoding: Encoding to use (Default: Encoding.zip) + :param persistent: Whether the program should be persistent or not (Default: False) :param volumes: Volumes to mount + :param environment_variables: Environment variables to pass to the program :param subscriptions: Patterns of aleph.im messages to forward to the program's event receiver - :param metadata: Metadata to attach to the message + :param sync: If true, waits for the message to be processed by the API server + :param channel: Channel to use (Default: "ALEPH-CLOUDSOLUTIONS") + :param storage_engine: Storage engine to use (Default: "storage") """ raise NotImplementedError( "Did you mean to import `AuthenticatedAlephHttpClient`?" @@ -400,9 +412,9 @@ async def create_instance( rootfs: str, rootfs_size: int, payment: Optional[Payment] = None, - environment_variables: Optional[Mapping[str, str]] = None, + environment_variables: Optional[dict[str, str]] = None, storage_engine: StorageEnum = StorageEnum.storage, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, address: Optional[str] = None, sync: bool = False, memory: Optional[int] = None, @@ -416,7 +428,7 @@ async def create_instance( volumes: Optional[List[Mapping]] = None, volume_persistence: str = "host", ssh_keys: Optional[List[str]] = None, - metadata: Optional[Mapping[str, Any]] = None, + metadata: Optional[dict[str, Any]] = None, requirements: Optional[HostRequirements] = None, ) -> Tuple[AlephMessage, MessageStatus]: """ @@ -427,7 +439,7 @@ async def create_instance( :param payment: Payment method used to pay for the instance :param environment_variables: Environment variables to pass to the program :param storage_engine: Storage engine to use (Default: "storage") - :param channel: Channel to use (Default: "TEST") + :param channel: Channel to use (Default: "ALEPH-CLOUDSOLUTIONS") :param address: Address to use (Default: account.get_address()) :param sync: If true, waits for the message to be processed by the API server :param memory: Memory in MB for the VM to be allocated (Default: 2048) @@ -455,7 +467,7 @@ async def forget( hashes: List[ItemHash], reason: Optional[str], storage_engine: StorageEnum = StorageEnum.storage, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, address: Optional[str] = None, sync: bool = False, ) -> Tuple[AlephMessage, MessageStatus]: @@ -468,7 +480,7 @@ async def forget( :param hashes: Hashes of the messages to forget :param reason: Reason for forgetting the messages :param storage_engine: Storage engine to use (Default: "storage") - :param channel: Channel to use (Default: "TEST") + :param channel: Channel to use (Default: "ALEPH-CLOUDSOLUTIONS") :param address: Address to use (Default: account.get_address()) :param sync: If true, waits for the message to be processed by the API server (Default: False) """ @@ -490,7 +502,7 @@ async def generate_signed_message( :param message_type: Type of the message (PostMessage, ...) :param content: User-defined content of the message - :param channel: Channel to use (Default: "TEST") + :param channel: Channel to use (Default: "ALEPH-CLOUDSOLUTIONS") :param allow_inlining: Whether to allow inlining the content of the message (Default: True) :param storage_engine: Storage engine to use (Default: "storage") """ @@ -537,7 +549,7 @@ async def submit( self, content: Dict[str, Any], message_type: MessageType, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, storage_engine: StorageEnum = StorageEnum.storage, allow_inlining: bool = True, sync: bool = False, @@ -549,7 +561,7 @@ async def submit( :param content: Content of the message :param message_type: Type of the message - :param channel: Channel to use (Default: "TEST") + :param channel: Channel to use (Default: "ALEPH-CLOUDSOLUTIONS") :param storage_engine: Storage engine to use (Default: "storage") :param allow_inlining: Whether to allow inlining the content of the message (Default: True) :param sync: If true, waits for the message to be processed by the API server (Default: False) diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index f84b97ca..9bb9a1e7 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -5,45 +5,37 @@ import time from io import BytesIO from pathlib import Path -from typing import Any, Dict, List, Mapping, NoReturn, Optional, Tuple, Union +from typing import Any, Dict, Mapping, NoReturn, Optional, Tuple, Union import aiohttp from aleph_message.models import ( AggregateContent, AggregateMessage, AlephMessage, - Chain, ForgetContent, ForgetMessage, - InstanceContent, InstanceMessage, ItemHash, + ItemType, MessageType, PostContent, PostMessage, - ProgramContent, ProgramMessage, StoreContent, StoreMessage, ) -from aleph_message.models.execution.base import Encoding, Payment, PaymentType +from aleph_message.models.execution.base import Encoding, Payment from aleph_message.models.execution.environment import ( - FunctionEnvironment, HostRequirements, HypervisorType, - InstanceEnvironment, - MachineResources, TrustedExecutionEnvironment, ) -from aleph_message.models.execution.instance import RootfsVolume -from aleph_message.models.execution.program import CodeContent, FunctionRuntime -from aleph_message.models.execution.volume import MachineVolume, ParentVolume from aleph_message.status import MessageStatus from ..conf import settings from ..exceptions import BroadcastError, InsufficientFundsError, InvalidMessageError -from ..types import Account, StorageEnum -from ..utils import extended_json_encoder, parse_volume +from ..types import Account, StorageEnum, TokenType +from ..utils import extended_json_encoder, make_instance_content, make_program_content from .abstract import AuthenticatedAlephClient from .http import AlephHttpClient @@ -285,7 +277,7 @@ async def create_post( post_type: str, ref: Optional[str] = None, address: Optional[str] = None, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, inline: bool = True, storage_engine: StorageEnum = StorageEnum.storage, sync: bool = False, @@ -308,14 +300,14 @@ async def create_post( storage_engine=storage_engine, sync=sync, ) - return message, status + return message, status # type: ignore async def create_aggregate( self, key: str, - content: Mapping[str, Any], + content: dict[str, Any], address: Optional[str] = None, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, inline: bool = True, sync: bool = False, ) -> Tuple[AggregateMessage, MessageStatus]: @@ -335,7 +327,7 @@ async def create_aggregate( allow_inlining=inline, sync=sync, ) - return message, status + return message, status # type: ignore async def create_store( self, @@ -347,7 +339,7 @@ async def create_store( ref: Optional[str] = None, storage_engine: StorageEnum = StorageEnum.storage, extra_fields: Optional[dict] = None, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, sync: bool = False, ) -> Tuple[StoreMessage, MessageStatus]: address = address or settings.ADDRESS_TO_USE or self.account.get_address() @@ -400,7 +392,7 @@ async def create_store( if extra_fields is not None: values.update(extra_fields) - content = StoreContent(**values) + content = StoreContent.parse_obj(values) message, status, _ = await self.submit( content=content.dict(exclude_none=True), @@ -409,109 +401,89 @@ async def create_store( allow_inlining=True, sync=sync, ) - return message, status + return message, status # type: ignore async def create_program( self, program_ref: str, entrypoint: str, runtime: str, - environment_variables: Optional[Mapping[str, str]] = None, - storage_engine: StorageEnum = StorageEnum.storage, - channel: Optional[str] = None, + metadata: Optional[dict[str, Any]] = None, address: Optional[str] = None, - sync: bool = False, - memory: Optional[int] = None, vcpus: Optional[int] = None, + memory: Optional[int] = None, timeout_seconds: Optional[float] = None, - persistent: bool = False, - allow_amend: bool = False, internet: bool = True, + allow_amend: bool = False, aleph_api: bool = True, encoding: Encoding = Encoding.zip, - volumes: Optional[List[Mapping]] = None, - subscriptions: Optional[List[Mapping]] = None, - metadata: Optional[Mapping[str, Any]] = None, + persistent: bool = False, + volumes: Optional[list[Mapping]] = None, + environment_variables: Optional[dict[str, str]] = None, + subscriptions: Optional[list[dict]] = None, + sync: bool = False, + channel: Optional[str] = settings.DEFAULT_CHANNEL, + storage_engine: StorageEnum = StorageEnum.storage, ) -> Tuple[ProgramMessage, MessageStatus]: address = address or settings.ADDRESS_TO_USE or self.account.get_address() - volumes = volumes if volumes is not None else [] - memory = memory or settings.DEFAULT_VM_MEMORY - vcpus = vcpus or settings.DEFAULT_VM_VCPUS - timeout_seconds = timeout_seconds or settings.DEFAULT_VM_TIMEOUT - - # TODO: Check that program_ref, runtime and data_ref exist - - # Register the different ways to trigger a VM - if subscriptions: - # Trigger on HTTP calls and on aleph.im message subscriptions. - triggers = { - "http": True, - "persistent": persistent, - "message": subscriptions, - } - else: - # Trigger on HTTP calls. - triggers = {"http": True, "persistent": persistent} - - volumes: List[MachineVolume] = [parse_volume(volume) for volume in volumes] - - content = ProgramContent( - type="vm-function", + content = make_program_content( + program_ref=program_ref, + entrypoint=entrypoint, + runtime=runtime, + metadata=metadata, address=address, + vcpus=vcpus, + memory=memory, + timeout_seconds=timeout_seconds, + internet=internet, + aleph_api=aleph_api, allow_amend=allow_amend, - code=CodeContent( - encoding=encoding, - entrypoint=entrypoint, - ref=program_ref, - use_latest=True, - ), - on=triggers, - environment=FunctionEnvironment( - reproducible=False, - internet=internet, - aleph_api=aleph_api, - ), - variables=environment_variables, - resources=MachineResources( - vcpus=vcpus, - memory=memory, - seconds=timeout_seconds, - ), - runtime=FunctionRuntime( - ref=runtime, - use_latest=True, - comment=( - "Official aleph.im runtime" - if runtime == settings.DEFAULT_RUNTIME_ID - else "" - ), - ), - volumes=[parse_volume(volume) for volume in volumes], - time=time.time(), - metadata=metadata, + encoding=encoding, + persistent=persistent, + volumes=volumes, + environment_variables=environment_variables, + subscriptions=subscriptions, ) - # Ensure that the version of aleph-message used supports the field. - assert content.on.persistent == persistent - message, status, _ = await self.submit( content=content.dict(exclude_none=True), message_type=MessageType.program, channel=channel, storage_engine=storage_engine, sync=sync, + raise_on_rejected=False, ) - return message, status + if status in (MessageStatus.PROCESSED, MessageStatus.PENDING): + return message, status # type: ignore + + # get the reason for rejection + rejected_message = await self.get_message_error(message.item_hash) + assert rejected_message, "No rejected message found" + error_code = rejected_message["error_code"] + if error_code == 5: + # not enough balance + details = rejected_message["details"] + errors = details["errors"] + error = errors[0] + account_balance = float(error["account_balance"]) + required_balance = float(error["required_balance"]) + raise InsufficientFundsError( + token_type=TokenType.ALEPH, + required_funds=required_balance, + available_funds=account_balance, + ) + else: + raise ValueError(f"Unknown error code {error_code}: {rejected_message}") async def create_instance( self, rootfs: str, rootfs_size: int, payment: Optional[Payment] = None, - environment_variables: Optional[Mapping[str, str]] = None, + environment_variables: Optional[dict[str, str]] = None, storage_engine: StorageEnum = StorageEnum.storage, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, address: Optional[str] = None, sync: bool = False, memory: Optional[int] = None, @@ -522,55 +494,34 @@ async def create_instance( aleph_api: bool = True, hypervisor: Optional[HypervisorType] = None, trusted_execution: Optional[TrustedExecutionEnvironment] = None, - volumes: Optional[List[Mapping]] = None, + volumes: Optional[list[Mapping]] = None, volume_persistence: str = "host", - ssh_keys: Optional[List[str]] = None, - metadata: Optional[Mapping[str, Any]] = None, + ssh_keys: Optional[list[str]] = None, + metadata: Optional[dict[str, Any]] = None, requirements: Optional[HostRequirements] = None, ) -> Tuple[InstanceMessage, MessageStatus]: address = address or settings.ADDRESS_TO_USE or self.account.get_address() - volumes = volumes if volumes is not None else [] - memory = memory or settings.DEFAULT_VM_MEMORY - vcpus = vcpus or settings.DEFAULT_VM_VCPUS - timeout_seconds = timeout_seconds or settings.DEFAULT_VM_TIMEOUT - - payment = payment or Payment(chain=Chain.ETH, type=PaymentType.hold) - - # Default to the QEMU hypervisor for instances. - selected_hypervisor: HypervisorType = hypervisor or HypervisorType.qemu - - content = InstanceContent( + content = make_instance_content( + rootfs=rootfs, + rootfs_size=rootfs_size, + payment=payment, + environment_variables=environment_variables, address=address, + memory=memory, + vcpus=vcpus, + timeout_seconds=timeout_seconds, allow_amend=allow_amend, - environment=InstanceEnvironment( - internet=internet, - aleph_api=aleph_api, - hypervisor=selected_hypervisor, - trusted_execution=trusted_execution, - ), - variables=environment_variables, - resources=MachineResources( - vcpus=vcpus, - memory=memory, - seconds=timeout_seconds, - ), - rootfs=RootfsVolume( - parent=ParentVolume( - ref=rootfs, - use_latest=True, - ), - size_mib=rootfs_size, - persistence="host", - use_latest=True, - ), - volumes=[parse_volume(volume) for volume in volumes], - requirements=requirements, - time=time.time(), - authorized_keys=ssh_keys, + internet=internet, + aleph_api=aleph_api, + hypervisor=hypervisor, + trusted_execution=trusted_execution, + volumes=volumes, + ssh_keys=ssh_keys, metadata=metadata, - payment=payment, + requirements=requirements, ) + message, status, response = await self.submit( content=content.dict(exclude_none=True), message_type=MessageType.instance, @@ -580,7 +531,7 @@ async def create_instance( raise_on_rejected=False, ) if status in (MessageStatus.PROCESSED, MessageStatus.PENDING): - return message, status + return message, status # type: ignore # get the reason for rejection rejected_message = await self.get_message_error(message.item_hash) @@ -594,17 +545,19 @@ async def create_instance( account_balance = float(error["account_balance"]) required_balance = float(error["required_balance"]) raise InsufficientFundsError( - required_funds=required_balance, available_funds=account_balance + token_type=TokenType.ALEPH, + required_funds=required_balance, + available_funds=account_balance, ) else: raise ValueError(f"Unknown error code {error_code}: {rejected_message}") async def forget( self, - hashes: List[ItemHash], + hashes: list[ItemHash], reason: Optional[str], storage_engine: StorageEnum = StorageEnum.storage, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, address: Optional[str] = None, sync: bool = False, ) -> Tuple[ForgetMessage, MessageStatus]: @@ -625,13 +578,13 @@ async def forget( allow_inlining=True, sync=sync, ) - return message, status + return message, status # type: ignore async def submit( self, content: Dict[str, Any], message_type: MessageType, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, storage_engine: StorageEnum = StorageEnum.storage, allow_inlining: bool = True, sync: bool = False, @@ -653,7 +606,7 @@ async def _storage_push_file_with_message( self, file_content: bytes, store_content: StoreContent, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, sync: bool = False, ) -> Tuple[StoreMessage, MessageStatus]: """Push a file to the storage service.""" @@ -685,7 +638,7 @@ async def _storage_push_file_with_message( message_status = ( MessageStatus.PENDING if resp.status == 202 else MessageStatus.PROCESSED ) - return message, message_status + return message, message_status # type: ignore async def _upload_file_native( self, @@ -694,7 +647,7 @@ async def _upload_file_native( guess_mime_type: bool = False, ref: Optional[str] = None, extra_fields: Optional[dict] = None, - channel: Optional[str] = None, + channel: Optional[str] = settings.DEFAULT_CHANNEL, sync: bool = False, ) -> Tuple[StoreMessage, MessageStatus]: file_hash = hashlib.sha256(file_content).hexdigest() @@ -706,9 +659,9 @@ async def _upload_file_native( store_content = StoreContent( address=address, ref=ref, - item_type=StorageEnum.storage, - item_hash=file_hash, - mime_type=mime_type, + item_type=ItemType.storage, + item_hash=ItemHash(file_hash), + mime_type=mime_type, # type: ignore time=time.time(), **extra_fields, ) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 4b42f08a..f4e8b898 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -2,6 +2,7 @@ import logging import os.path import ssl +import time from io import BytesIO from pathlib import Path from typing import ( @@ -20,7 +21,15 @@ import aiohttp from aiohttp.web import HTTPNotFound from aleph_message import parse_message -from aleph_message.models import AlephMessage, ItemHash, ItemType, MessageType +from aleph_message.models import ( + AlephMessage, + Chain, + ExecutableContent, + ItemHash, + ItemType, + MessageType, + ProgramContent, +) from aleph_message.status import MessageStatus from pydantic import ValidationError @@ -37,6 +46,7 @@ from ..utils import ( Writable, check_unix_socket_valid, + compute_sha256, copy_async_readable_to_buffer, extended_json_encoder, get_message_type_value, @@ -358,7 +368,7 @@ async def get_messages( ) @overload - async def get_message( + async def get_message( # type: ignore self, item_hash: str, message_type: Optional[Type[GenericMessage]] = None, @@ -383,7 +393,7 @@ async def get_message( resp.raise_for_status() except aiohttp.ClientResponseError as e: if e.status == 404: - raise MessageNotFoundError(f"No such hash {item_hash}") + raise MessageNotFoundError(f"No such hash {item_hash}") from e raise e message_raw = await resp.json() if message_raw["status"] == "forgotten": @@ -399,9 +409,9 @@ async def get_message( f"does not match the expected type '{expected_type}'" ) if with_status: - return message, message_raw["status"] + return message, message_raw["status"] # type: ignore else: - return message + return message # type: ignore async def get_message_error( self, @@ -448,6 +458,47 @@ async def watch_messages( elif msg.type == aiohttp.WSMsgType.ERROR: break + async def get_estimated_price( + self, + content: ExecutableContent, + ) -> PriceResponse: + cleaned_content = content.dict(exclude_none=True) + item_content: str = json.dumps( + cleaned_content, + separators=(",", ":"), + default=extended_json_encoder, + ) + message = parse_message( + dict( + sender=content.address, + chain=Chain.ETH, + type=( + MessageType.program + if isinstance(content, ProgramContent) + else MessageType.instance + ), + content=cleaned_content, + item_content=item_content, + time=time.time(), + channel=settings.DEFAULT_CHANNEL, + item_type=ItemType.inline, + item_hash=compute_sha256(item_content), + ) + ) + + async with self.http_session.post( + "/api/v0/price/estimate", json=dict(message=message) + ) as resp: + try: + resp.raise_for_status() + response_json = await resp.json() + return PriceResponse( + required_tokens=response_json["required_tokens"], + payment_type=response_json["payment_type"], + ) + except aiohttp.ClientResponseError as e: + raise e + async def get_program_price(self, item_hash: str) -> PriceResponse: async with self.http_session.get(f"/api/v0/price/{item_hash}") as resp: try: @@ -491,15 +542,21 @@ async def get_stored_content( resp = f"Invalid CID: {message.content.item_hash}" else: filename = safe_getattr(message.content, "metadata.name") - hash = message.content.item_hash + item_hash = message.content.item_hash url = ( f"{self.api_server}/api/v0/storage/raw/" - if len(hash) == 64 + if len(item_hash) == 64 else settings.IPFS_GATEWAY - ) + hash - result = StoredContent(filename=filename, hash=hash, url=url) + ) + item_hash + result = StoredContent( + filename=filename, hash=item_hash, url=url, error=None + ) except MessageNotFoundError: resp = f"Message not found: {item_hash}" except ForgottenMessageError: resp = f"Message forgotten: {item_hash}" - return result if result else StoredContent(error=resp) + return ( + result + if result + else StoredContent(error=resp, filename=None, hash=None, url=None) + ) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 4dc7c9e7..c925a05e 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -44,27 +44,22 @@ class Settings(BaseSettings): HTTP_REQUEST_TIMEOUT = 15.0 DEFAULT_CHANNEL: str = "ALEPH-CLOUDSOLUTIONS" + + # Firecracker runtime for programs DEFAULT_RUNTIME_ID: str = ( "63f07193e6ee9d207b7d1fcf8286f9aee34e6f12f101d2ec77c1229f92964696" ) - DEBIAN_11_ROOTFS_ID: str = ( - "887957042bb0e360da3485ed33175882ce72a70d79f1ba599400ff4802b7cee7" - ) - DEBIAN_12_ROOTFS_ID: str = ( - "6e30de68c6cedfa6b45240c2b51e52495ac6fb1bd4b36457b3d5ca307594d595" - ) - UBUNTU_22_ROOTFS_ID: str = ( - "77fef271aa6ff9825efa3186ca2e715d19e7108279b817201c69c34cedc74c27" - ) - DEBIAN_11_QEMU_ROOTFS_ID: str = ( - "f7e68c568906b4ebcd3cd3c4bfdff96c489cd2a9ef73ba2d7503f244dfd578de" - ) + + # Qemu rootfs for instances DEBIAN_12_QEMU_ROOTFS_ID: str = ( "b6ff5c3a8205d1ca4c7c3369300eeafff498b558f71b851aa2114afd0a532717" ) UBUNTU_22_QEMU_ROOTFS_ID: str = ( "4a0f62da42f4478544616519e6f5d58adb1096e069b392b151d47c3609492d0c" ) + UBUNTU_24_QEMU_ROOTFS_ID: str = ( + "5330dcefe1857bcd97b7b7f24d1420a7d46232d53f27be280c8a7071d88bd84e" + ) DEFAULT_CONFIDENTIAL_FIRMWARE: str = ( "ba5bb13f3abca960b101a759be162b229e2b7e93ecad9d1307e54de887f177ff" @@ -86,6 +81,7 @@ class Settings(BaseSettings): VM_URL_PATH = "https://aleph.sh/vm/{hash}" VM_URL_HOST = "https://{hash_base32}.aleph.sh" IPFS_GATEWAY = "https://ipfs.aleph.cloud/ipfs/" + CRN_URL_FOR_PROGRAMS = "https://dchq.staging.aleph.sh/" # Web3Provider settings TOKEN_DECIMALS = 18 diff --git a/src/aleph/sdk/connectors/superfluid.py b/src/aleph/sdk/connectors/superfluid.py index 4b7274f8..76bbf907 100644 --- a/src/aleph/sdk/connectors/superfluid.py +++ b/src/aleph/sdk/connectors/superfluid.py @@ -1,14 +1,19 @@ from __future__ import annotations from decimal import Decimal -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional from eth_utils import to_normalized_address from superfluid import CFA_V1, Operation, Web3FlowInfo +from aleph.sdk.evm_utils import ( + FlowUpdate, + from_wei_token, + get_super_token_address, + to_wei_token, +) from aleph.sdk.exceptions import InsufficientFundsError - -from ..evm_utils import get_super_token_address, to_human_readable_token, to_wei_token +from aleph.sdk.types import TokenType if TYPE_CHECKING: from aleph.sdk.chains.ethereum import ETHAccount @@ -44,6 +49,7 @@ async def _execute_operation_with_account(self, operation: Operation) -> str: return await self.account._sign_and_send_transaction(populated_transaction) def can_start_flow(self, flow: Decimal, block=True) -> bool: + """Check if the account has enough funds to start a Superfluid flow of the given size.""" valid = False if self.account.can_transact(block=block): balance = self.account.get_super_token_balance() @@ -51,8 +57,9 @@ def can_start_flow(self, flow: Decimal, block=True) -> bool: valid = balance > MIN_FLOW_4H if not valid and block: raise InsufficientFundsError( - required_funds=float(MIN_FLOW_4H), - available_funds=to_human_readable_token(balance), + token_type=TokenType.ALEPH, + required_funds=float(from_wei_token(MIN_FLOW_4H)), + available_funds=float(from_wei_token(balance)), ) return valid @@ -96,3 +103,51 @@ async def update_flow(self, receiver: str, flow: Decimal) -> str: flow_rate=int(to_wei_token(flow)), ), ) + + async def manage_flow( + self, + receiver: str, + flow: Decimal, + update_type: FlowUpdate, + ) -> Optional[str]: + """ + Update the flow of a Superfluid stream between a sender and receiver. + This function either increases or decreases the flow rate between the sender and receiver, + based on the update_type. If no flow exists and the update type is augmentation, it creates a new flow + with the specified rate. If the update type is reduction and the reduction amount brings the flow to zero + or below, the flow is deleted. + + :param receiver: Address of the receiver in hexadecimal format. + :param flow: The flow rate to be added or removed (in ether). + :param update_type: The type of update to perform (augmentation or reduction). + :return: The transaction hash of the executed operation (create, update, or delete flow). + """ + + # Retrieve current flow info + flow_info: Web3FlowInfo = await self.account.get_flow(receiver) + + current_flow_rate_wei: Decimal = Decimal(flow_info["flowRate"] or 0) + flow_rate_wei: int = int(to_wei_token(flow)) + + if update_type == FlowUpdate.INCREASE: + if current_flow_rate_wei > 0: + # Update existing flow by increasing the rate + new_flow_rate_wei = current_flow_rate_wei + flow_rate_wei + new_flow_rate_ether = from_wei_token(new_flow_rate_wei) + return await self.account.update_flow(receiver, new_flow_rate_ether) + else: + # Create a new flow if none exists + return await self.account.create_flow(receiver, flow) + else: + if current_flow_rate_wei > 0: + # Reduce the existing flow + new_flow_rate_wei = current_flow_rate_wei - flow_rate_wei + # Ensure to not leave infinitesimal flows + # Often, there were 1-10 wei remaining in the flow rate, which prevented the flow from being deleted + if new_flow_rate_wei > 99: + new_flow_rate_ether = from_wei_token(new_flow_rate_wei) + return await self.account.update_flow(receiver, new_flow_rate_ether) + else: + # Delete the flow if the new flow rate is zero or negative + return await self.account.delete_flow(receiver) + return None diff --git a/src/aleph/sdk/evm_utils.py b/src/aleph/sdk/evm_utils.py index 4d2026ef..a425d580 100644 --- a/src/aleph/sdk/evm_utils.py +++ b/src/aleph/sdk/evm_utils.py @@ -1,4 +1,5 @@ -from decimal import Decimal +from decimal import ROUND_CEILING, Context, Decimal +from enum import Enum from typing import List, Optional, Union from aleph_message.models import Chain @@ -21,12 +22,26 @@ }]""" -def to_human_readable_token(amount: Decimal) -> float: - return float(amount / (Decimal(10) ** Decimal(settings.TOKEN_DECIMALS))) +class FlowUpdate(str, Enum): + REDUCE = "reduce" + INCREASE = "increase" + + +def ether_rounding(amount: Decimal) -> Decimal: + """Rounds the given value to 18 decimals.""" + return amount.quantize( + Decimal(1) / Decimal(10**18), rounding=ROUND_CEILING, context=Context(prec=36) + ) + + +def from_wei_token(amount: Decimal) -> Decimal: + """Converts the given wei value to ether.""" + return ether_rounding(amount / Decimal(10) ** Decimal(settings.TOKEN_DECIMALS)) def to_wei_token(amount: Decimal) -> Decimal: - return amount * Decimal(10) ** Decimal(settings.TOKEN_DECIMALS) + """Converts the given ether value to wei.""" + return Decimal(int(amount * Decimal(10) ** Decimal(settings.TOKEN_DECIMALS))) def get_chain_id(chain: Union[Chain, str, None]) -> Optional[int]: diff --git a/src/aleph/sdk/exceptions.py b/src/aleph/sdk/exceptions.py index a538a31c..05ed755f 100644 --- a/src/aleph/sdk/exceptions.py +++ b/src/aleph/sdk/exceptions.py @@ -1,5 +1,8 @@ from abc import ABC +from .types import TokenType +from .utils import displayable_amount + class QueryError(ABC, ValueError): """The result of an API query is inconsistent.""" @@ -69,14 +72,18 @@ class ForgottenMessageError(QueryError): class InsufficientFundsError(Exception): """Raised when the account does not have enough funds to perform an action""" + token_type: TokenType required_funds: float available_funds: float - def __init__(self, required_funds: float, available_funds: float): + def __init__( + self, token_type: TokenType, required_funds: float, available_funds: float + ): + self.token_type = token_type self.required_funds = required_funds self.available_funds = available_funds super().__init__( - f"Insufficient funds: required {required_funds}, available {available_funds}" + f"Insufficient funds ({self.token_type.value}): required {displayable_amount(self.required_funds, decimals=8)}, available {displayable_amount(self.available_funds, decimals=8)}" ) diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index c698da5d..05fa9815 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -83,7 +83,20 @@ class ChainInfo(BaseModel): class StoredContent(BaseModel): + """ + A stored content. + """ + filename: Optional[str] hash: Optional[str] url: Optional[str] error: Optional[str] + + +class TokenType(str, Enum): + """ + A token type. + """ + + GAS = "GAS" + ALEPH = "ALEPH" diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index c3fc154a..5cbc1e8c 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -8,6 +8,7 @@ import os import subprocess from datetime import date, datetime, time +from decimal import Context, Decimal, InvalidOperation from enum import Enum from pathlib import Path from shutil import make_archive @@ -15,7 +16,6 @@ Any, Dict, Iterable, - List, Mapping, Optional, Protocol, @@ -28,9 +28,38 @@ from uuid import UUID from zipfile import BadZipFile, ZipFile -from aleph_message.models import ItemHash, MessageType -from aleph_message.models.execution.program import Encoding -from aleph_message.models.execution.volume import MachineVolume +from aleph_message.models import ( + Chain, + InstanceContent, + ItemHash, + MachineType, + MessageType, + ProgramContent, +) +from aleph_message.models.execution.base import Payment, PaymentType +from aleph_message.models.execution.environment import ( + FunctionEnvironment, + FunctionTriggers, + HostRequirements, + HypervisorType, + InstanceEnvironment, + MachineResources, + Subscription, + TrustedExecutionEnvironment, +) +from aleph_message.models.execution.instance import RootfsVolume +from aleph_message.models.execution.program import ( + CodeContent, + Encoding, + FunctionRuntime, +) +from aleph_message.models.execution.volume import ( + MachineVolume, + ParentVolume, + PersistentVolumeSizeMib, + VolumePersistence, +) +from aleph_message.utils import Mebibytes from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from jwcrypto.jwa import JWA @@ -177,19 +206,17 @@ def extended_json_encoder(obj: Any) -> Any: def parse_volume(volume_dict: Union[Mapping, MachineVolume]) -> MachineVolume: - # Python 3.9 does not support `isinstance(volume_dict, MachineVolume)`, - # so we need to iterate over all types. if any( isinstance(volume_dict, volume_type) for volume_type in get_args(MachineVolume) ): - return volume_dict + return volume_dict # type: ignore + for volume_type in get_args(MachineVolume): try: return volume_type.parse_obj(volume_dict) except ValueError: - continue - else: - raise ValueError(f"Could not parse volume: {volume_dict}") + pass + raise ValueError(f"Could not parse volume: {volume_dict}") def compute_sha256(s: str) -> str: @@ -234,7 +261,7 @@ def sign_vm_control_payload(payload: Dict[str, str], ephemeral_key) -> str: async def run_in_subprocess( - command: List[str], check: bool = True, stdin_input: Optional[bytes] = None + command: list[str], check: bool = True, stdin_input: Optional[bytes] = None ) -> bytes: """Run the specified command in a subprocess, returns the stdout of the process.""" logger.debug(f"command: {' '.join(command)}") @@ -401,3 +428,166 @@ def safe_getattr(obj, attr, default=None): if obj is default: break return obj + + +def displayable_amount( + amount: Union[str, int, float, Decimal], decimals: int = 18 +) -> str: + """Returns the amount as a string without unnecessary decimals.""" + + str_amount = "" + try: + dec_amount = Decimal(amount) + if decimals: + dec_amount = dec_amount.quantize( + Decimal(1) / Decimal(10**decimals), context=Context(prec=36) + ) + str_amount = str(format(dec_amount.normalize(), "f")) + except ValueError: + logger.error(f"Invalid amount to display: {amount}") + exit(1) + except InvalidOperation: + logger.error(f"Invalid operation on amount to display: {amount}") + exit(1) + return str_amount + + +def make_instance_content( + rootfs: str, + rootfs_size: int, + payment: Optional[Payment] = None, + environment_variables: Optional[dict[str, str]] = None, + address: Optional[str] = None, + memory: Optional[int] = None, + vcpus: Optional[int] = None, + timeout_seconds: Optional[float] = None, + allow_amend: bool = False, + internet: bool = True, + aleph_api: bool = True, + hypervisor: Optional[HypervisorType] = None, + trusted_execution: Optional[TrustedExecutionEnvironment] = None, + volumes: Optional[list[Mapping]] = None, + ssh_keys: Optional[list[str]] = None, + metadata: Optional[dict[str, Any]] = None, + requirements: Optional[HostRequirements] = None, +) -> InstanceContent: + """ + Create InstanceContent object given the provided fields. + """ + + address = address or "0x0000000000000000000000000000000000000000" + payment = payment or Payment(chain=Chain.ETH, type=PaymentType.hold, receiver=None) + selected_hypervisor: HypervisorType = hypervisor or HypervisorType.qemu + vcpus = vcpus or settings.DEFAULT_VM_VCPUS + memory = memory or settings.DEFAULT_VM_MEMORY + timeout_seconds = timeout_seconds or settings.DEFAULT_VM_TIMEOUT + volumes = volumes if volumes is not None else [] + + return InstanceContent( + address=address, + allow_amend=allow_amend, + environment=InstanceEnvironment( + internet=internet, + aleph_api=aleph_api, + hypervisor=selected_hypervisor, + trusted_execution=trusted_execution, + ), + variables=environment_variables, + resources=MachineResources( + vcpus=vcpus, + memory=Mebibytes(memory), + seconds=int(timeout_seconds), + ), + rootfs=RootfsVolume( + parent=ParentVolume( + ref=ItemHash(rootfs), + use_latest=True, + ), + size_mib=PersistentVolumeSizeMib(rootfs_size), + persistence=VolumePersistence.host, + ), + volumes=[parse_volume(volume) for volume in volumes], + requirements=requirements, + time=datetime.now().timestamp(), + authorized_keys=ssh_keys, + metadata=metadata, + payment=payment, + ) + + +def make_program_content( + program_ref: str, + entrypoint: str, + runtime: str, + metadata: Optional[dict[str, Any]] = None, + address: Optional[str] = None, + vcpus: Optional[int] = None, + memory: Optional[int] = None, + timeout_seconds: Optional[float] = None, + internet: bool = False, + aleph_api: bool = True, + allow_amend: bool = False, + encoding: Encoding = Encoding.zip, + persistent: bool = False, + volumes: Optional[list[Mapping]] = None, + environment_variables: Optional[dict[str, str]] = None, + subscriptions: Optional[list[dict]] = None, + payment: Optional[Payment] = None, +) -> ProgramContent: + """ + Create ProgramContent object given the provided fields. + """ + + address = address or "0x0000000000000000000000000000000000000000" + payment = payment or Payment(chain=Chain.ETH, type=PaymentType.hold, receiver=None) + vcpus = vcpus or settings.DEFAULT_VM_VCPUS + memory = memory or settings.DEFAULT_VM_MEMORY + timeout_seconds = timeout_seconds or settings.DEFAULT_VM_TIMEOUT + volumes = volumes if volumes is not None else [] + subscriptions = ( + [Subscription(**sub) for sub in subscriptions] + if subscriptions is not None + else None + ) + + return ProgramContent( + type=MachineType.vm_function, + address=address, + allow_amend=allow_amend, + code=CodeContent( + encoding=encoding, + entrypoint=entrypoint, + ref=ItemHash(program_ref), + use_latest=True, + ), + on=FunctionTriggers( + http=True, + persistent=persistent, + message=subscriptions, + ), + environment=FunctionEnvironment( + reproducible=False, + internet=internet, + aleph_api=aleph_api, + ), + variables=environment_variables, + resources=MachineResources( + vcpus=vcpus, + memory=Mebibytes(memory), + seconds=int(timeout_seconds), + ), + runtime=FunctionRuntime( + ref=ItemHash(runtime), + use_latest=True, + comment=( + "Official aleph.im runtime" + if runtime == settings.DEFAULT_RUNTIME_ID + else "" + ), + ), + volumes=[parse_volume(volume) for volume in volumes], + time=datetime.now().timestamp(), + metadata=metadata, + authorized_keys=[], + payment=payment, + ) diff --git a/tests/unit/aleph_vm_authentication.py b/tests/unit/aleph_vm_authentication.py index 491da51a..6083a119 100644 --- a/tests/unit/aleph_vm_authentication.py +++ b/tests/unit/aleph_vm_authentication.py @@ -263,7 +263,7 @@ async def authenticate_websocket_message( signed_operation = SignedOperation.parse_obj(message["X-SignedOperation"]) if signed_operation.content.domain != domain_name: logger.debug( - f"Invalid domain '{signed_pubkey.content.domain}' != '{domain_name}'" + f"Invalid domain '{signed_operation.content.domain}' != '{domain_name}'" ) raise web.HTTPUnauthorized(reason="Invalid domain") return verify_signed_operation(signed_operation, signed_pubkey) diff --git a/tests/unit/test_asynchronous.py b/tests/unit/test_asynchronous.py index b044e170..e2647590 100644 --- a/tests/unit/test_asynchronous.py +++ b/tests/unit/test_asynchronous.py @@ -7,6 +7,7 @@ Chain, ForgetMessage, InstanceMessage, + ItemHash, MessageType, Payment, PaymentType, @@ -184,12 +185,16 @@ async def test_create_confidential_instance(mock_session_with_post_success): ), hypervisor=HypervisorType.qemu, trusted_execution=TrustedExecutionEnvironment( - firmware="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", + firmware=ItemHash( + "cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe" + ), policy=0b1, ), requirements=HostRequirements( node=NodeRequirements( - node_hash="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", + node_hash=ItemHash( + "cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe" + ), ) ), ) @@ -285,5 +290,6 @@ async def test_create_instance_insufficient_funds_error( payment=Payment( chain=Chain.ETH, type=PaymentType.hold, + receiver=None, ), ) diff --git a/tests/unit/test_price.py b/tests/unit/test_price.py index bed9304a..fe9e3468 100644 --- a/tests/unit/test_price.py +++ b/tests/unit/test_price.py @@ -11,14 +11,14 @@ async def test_get_program_price_valid(): Test that the get_program_price method returns the correct PriceResponse when given a valid item hash. """ - expected_response = { - "required_tokens": 3.0555555555555556e-06, - "payment_type": "superfluid", - } - mock_session = make_mock_get_session(expected_response) + expected = PriceResponse( + required_tokens=3.0555555555555556e-06, + payment_type="superfluid", + ) + mock_session = make_mock_get_session(expected.dict()) async with mock_session: response = await mock_session.get_program_price("cacacacacacaca") - assert response == PriceResponse(**expected_response) + assert response == expected @pytest.mark.asyncio diff --git a/tests/unit/test_superfluid.py b/tests/unit/test_superfluid.py index c2f853bd..74bcc38e 100644 --- a/tests/unit/test_superfluid.py +++ b/tests/unit/test_superfluid.py @@ -7,6 +7,7 @@ from eth_utils import to_checksum_address from aleph.sdk.chains.ethereum import ETHAccount +from aleph.sdk.evm_utils import FlowUpdate def generate_fake_eth_address(): @@ -24,6 +25,7 @@ def mock_superfluid(): mock_superfluid.create_flow = AsyncMock(return_value="0xTransactionHash") mock_superfluid.delete_flow = AsyncMock(return_value="0xTransactionHash") mock_superfluid.update_flow = AsyncMock(return_value="0xTransactionHash") + mock_superfluid.manage_flow = AsyncMock(return_value="0xTransactionHash") # Mock get_flow to return a mock Web3FlowInfo mock_flow_info = {"timestamp": 0, "flowRate": 0, "deposit": 0, "owedDeposit": 0} @@ -98,3 +100,14 @@ async def test_get_flow(eth_account, mock_superfluid): assert flow_info["flowRate"] == 0 assert flow_info["deposit"] == 0 assert flow_info["owedDeposit"] == 0 + + +@pytest.mark.asyncio +async def test_manage_flow(eth_account, mock_superfluid): + receiver = generate_fake_eth_address() + flow = Decimal("0.005") + + tx_hash = await eth_account.manage_flow(receiver, flow, FlowUpdate.INCREASE) + + assert tx_hash == "0xTransactionHash" + mock_superfluid.manage_flow.assert_awaited_once() diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index bfca23a5..c560455d 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -1,5 +1,6 @@ import base64 import datetime +from unittest.mock import MagicMock import pytest as pytest from aleph_message.models import ( @@ -158,6 +159,7 @@ def test_parse_immutable_volume(): def test_parse_ephemeral_volume(): volume_dict = { "comment": "Dummy hash", + "mount": "/opt/data", "ephemeral": True, "size_mib": 1, } @@ -169,6 +171,8 @@ def test_parse_ephemeral_volume(): def test_parse_persistent_volume(): volume_dict = { + "comment": "Dummy hash", + "mount": "/opt/data", "parent": { "ref": "QmX8K1c22WmQBAww5ShWQqwMiFif7XFrJD6iFBj7skQZXW", "use_latest": True, @@ -184,9 +188,9 @@ def test_parse_persistent_volume(): assert isinstance(volume, PersistentVolume) -def test_calculate_firmware_hash(mocker): - mock_path = mocker.Mock( - read_bytes=mocker.Mock(return_value=b"abc"), +def test_calculate_firmware_hash(): + mock_path = MagicMock( + read_bytes=MagicMock(return_value=b"abc"), ) assert ( From 7b86ff6039703a5cc0b05f3c3f4926d0db529ada Mon Sep 17 00:00:00 2001 From: philogicae Date: Mon, 24 Feb 2025 14:31:35 +0200 Subject: [PATCH 077/122] Fix programs for SOL payment chain (#201) --- src/aleph/sdk/client/abstract.py | 2 ++ src/aleph/sdk/client/authenticated_http.py | 2 ++ 2 files changed, 4 insertions(+) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 7f9fed8e..2816aa3d 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -364,6 +364,7 @@ async def create_program( runtime: str, metadata: Optional[dict[str, Any]] = None, address: Optional[str] = None, + payment: Optional[Payment] = None, vcpus: Optional[int] = None, memory: Optional[int] = None, timeout_seconds: Optional[float] = None, @@ -387,6 +388,7 @@ async def create_program( :param runtime: Runtime to use :param metadata: Metadata to attach to the message :param address: Address to use (Default: account.get_address()) + :param payment: Payment method used to pay for the program (Default: None) :param vcpus: Number of vCPUs to allocate (Default: 1) :param memory: Memory in MB for the VM to be allocated (Default: 128) :param timeout_seconds: Timeout in seconds (Default: 30.0) diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 9bb9a1e7..1d0d69d7 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -410,6 +410,7 @@ async def create_program( runtime: str, metadata: Optional[dict[str, Any]] = None, address: Optional[str] = None, + payment: Optional[Payment] = None, vcpus: Optional[int] = None, memory: Optional[int] = None, timeout_seconds: Optional[float] = None, @@ -433,6 +434,7 @@ async def create_program( runtime=runtime, metadata=metadata, address=address, + payment=payment, vcpus=vcpus, memory=memory, timeout_seconds=timeout_seconds, From 43bdc76a2d8787f0e26b0508552ef0e3583ad5a6 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Thu, 27 Feb 2025 17:10:38 +0100 Subject: [PATCH 078/122] Fix: Remove references to slow server api1.aleph.im Server api1.aleph.im is very slow and outdated (Core i7 from 2018, up to 40 seconds to respond to `/metrics` in the monitoring). We suspect that this causes issues in the monitoring and performance of the network. This branch removes all references to api1 and replaces them with api3 where relevant. --- tests/integration/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration/config.py b/tests/integration/config.py index 3e613c18..bd78ef1a 100644 --- a/tests/integration/config.py +++ b/tests/integration/config.py @@ -1,3 +1,3 @@ -TARGET_NODE = "https://api1.aleph.im" +TARGET_NODE = "https://api3.aleph.im" REFERENCE_NODE = "https://api2.aleph.im" TEST_CHANNEL = "INTEGRATION_TESTS" From ef40aa76f2cc232470c9c2ea372425820b058d93 Mon Sep 17 00:00:00 2001 From: Bram Date: Tue, 11 Mar 2025 10:32:40 +0100 Subject: [PATCH 079/122] feat: aleph-pytezos has been renamed to pytezos-crypto (#206) --- pyproject.toml | 2 +- src/aleph/sdk/chains/tezos.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3fd02d17..42156baf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,8 +80,8 @@ optional-dependencies.substrate = [ "substrate-interface", ] optional-dependencies.tezos = [ - "aleph-pytezos==3.13.4", "pynacl", + "pytezos-crypto==3.13.4.1", ] urls.Documentation = "https://aleph.im/" urls.Homepage = "https://github.com/aleph-im/aleph-sdk-python" diff --git a/src/aleph/sdk/chains/tezos.py b/src/aleph/sdk/chains/tezos.py index cffa3e78..c4ee08ab 100644 --- a/src/aleph/sdk/chains/tezos.py +++ b/src/aleph/sdk/chains/tezos.py @@ -2,9 +2,9 @@ from pathlib import Path from typing import Dict, Optional, Union -from aleph_pytezos.crypto.key import Key from nacl.public import SealedBox from nacl.signing import SigningKey +from pytezos_crypto.key import Key from .common import BaseAccount, get_fallback_private_key, get_verification_buffer From 08927357be7b9f19a6c34aa4b45ef0a2c763f9b7 Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Tue, 11 Mar 2025 10:36:44 +0100 Subject: [PATCH 080/122] Fix: Redundant dependency in pyproject --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 42156baf..fb0792f9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -80,7 +80,6 @@ optional-dependencies.substrate = [ "substrate-interface", ] optional-dependencies.tezos = [ - "pynacl", "pytezos-crypto==3.13.4.1", ] urls.Documentation = "https://aleph.im/" From cb5ce9fd789b421f57d0c42ef0fb497511c57d6f Mon Sep 17 00:00:00 2001 From: Hugo Herter Date: Wed, 5 Mar 2025 12:05:34 +0100 Subject: [PATCH 081/122] Fix: Tests were not running on macOS --- .github/workflows/pytest.yml | 22 ++++++++++++++-------- 1 file changed, 14 insertions(+), 8 deletions(-) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index f1af47c5..75bc8193 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -6,9 +6,6 @@ on: branches: - main schedule: - # Run every night at 04:00 (GitHub Actions timezone) - # in order to catch when unfrozen dependency updates - # break the use of the library. - cron: '4 0 * * *' jobs: @@ -16,8 +13,8 @@ jobs: strategy: fail-fast: false matrix: - python-version: [ "3.9", "3.10", "3.11", "3.12" ] - os: [ubuntu-22.04, ubuntu-24.04] + python-version: ["3.9", "3.10", "3.11", "3.12"] + os: [ubuntu-22.04, ubuntu-24.04, macos-14, macos-15] runs-on: ${{ matrix.os }} steps: @@ -26,15 +23,24 @@ jobs: with: python-version: ${{ matrix.python-version }} - - run: | + - name: "apt-get install" + run: | sudo apt-get update - sudo apt-get install -y python3-pip libsodium-dev + sudo apt-get install -y python3-pip libsodium-dev libgmp-dev + if: runner.os == 'Linux' - run: | + brew install libsodium + echo "DYLD_LIBRARY_PATH=$(brew --prefix libsodium)/lib" >> $GITHUB_ENV + if: runner.os == 'macOS' + + - name: "Install Hatch" + run: | python3 -m venv /tmp/venv /tmp/venv/bin/python -m pip install --upgrade pip hatch coverage - - run: | + - name: "Run Tests" + run: | /tmp/venv/bin/pip freeze /tmp/venv/bin/hatch run testing:pip freeze /tmp/venv/bin/hatch run testing:test From e40033f9461cb3c021e28aa8a39faa0826fd05c9 Mon Sep 17 00:00:00 2001 From: nesitor Date: Tue, 8 Apr 2025 16:50:09 +0200 Subject: [PATCH 082/122] Feature: Implement Sonic Blockchain. (#210) --- pyproject.toml | 6 +++--- src/aleph/sdk/account.py | 1 + src/aleph/sdk/conf.py | 6 ++++++ 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fb0792f9..edb37f7f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,15 +30,15 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=0.6", + "aleph-message @ git+https://github.com/aleph-im/aleph-message.git@andres-feature-integrate_sonic_blockchain", "aleph-superfluid>=0.2.1", - "base58==2.1.1", # Needed now as default with _load_account changement + "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version<'3.11'", "coincurve>=19; python_version>='3.11'", "eth-abi>=4; python_version>='3.11'", "eth-typing==4.3.1", "jwcrypto==1.5.6", - "pynacl==1.5", # Needed now as default with _load_account changement + "pynacl==1.5", # Needed now as default with _load_account changement "python-magic", "typing-extensions", "web3==6.3", diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 872ee3c4..15dd79d1 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -36,6 +36,7 @@ Chain.OPTIMISM: EVMAccount, Chain.POL: EVMAccount, Chain.SOL: SOLAccount, + Chain.SONIC: EVMAccount, Chain.WORLDCHAIN: EVMAccount, Chain.ZORA: EVMAccount, } diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index c925a05e..6a1fcf46 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -163,6 +163,10 @@ class Settings(BaseSettings): chain_id=137, rpc="https://polygon.gateway.tenderly.co", ), + Chain.SONIC: ChainInfo( + chain_id=146, + rpc="https://rpc.soniclabs.com", + ), Chain.WORLDCHAIN: ChainInfo( chain_id=480, rpc="https://worldchain-mainnet.gateway.tenderly.co", @@ -189,6 +193,7 @@ class Settings(BaseSettings): CHAINS_MODE_ACTIVE: Optional[bool] = None CHAINS_OPTIMISM_ACTIVE: Optional[bool] = None CHAINS_POL_ACTIVE: Optional[bool] = None + CHAINS_SONIC_ACTIVE: Optional[bool] = None CHAINS_WORLDCHAIN_ACTIVE: Optional[bool] = None CHAINS_ZORA_ACTIVE: Optional[bool] = None @@ -208,6 +213,7 @@ class Settings(BaseSettings): CHAINS_MODE_RPC: Optional[str] = None CHAINS_OPTIMISM_RPC: Optional[str] = None CHAINS_POL_RPC: Optional[str] = None + CHAINS_SONIC_RPC: Optional[str] = None CHAINS_WORLDCHAIN_RPC: Optional[str] = None CHAINS_ZORA_RPC: Optional[str] = None From 0c848b59f9696375fbe100771ef082f3e621bf13 Mon Sep 17 00:00:00 2001 From: Antony JIN <91880456+Antonyjin@users.noreply.github.com> Date: Thu, 10 Apr 2025 21:40:22 +0200 Subject: [PATCH 083/122] Upgrade pydantic version (#179) * Migrate to Pydantic v2, update model validation and fix async issues - Migrated to Pydantic v2: - Replaced deprecated `parse_obj()` and `parse_raw()` with `model_validate()` and `model_validate_json()`. - Replaced `.dict()` with `.model_dump()` for serializing models to dictionaries. - Updated `validator` to `field_validator` and `root_validator` to `model_validator` to comply with Pydantic v2 syntax changes. - Fixed asyncio issues: - Added `await` for asynchronous methods like `raise_for_status()` in `RemoteAccount` and other HTTP operations to avoid `RuntimeWarning`. - Updated config handling: - Used `ClassVar` for constants in `Settings` and other configuration classes. - Replaced `Config` with `ConfigDict` in Pydantic models to follow v2 conventions. - Added default values for missing fields in chain configurations (`CHAINS_SEPOLIA_ACTIVE`, etc.). - Adjusted signature handling: - Updated the signing logic to prepend `0x` in the `BaseAccount` signature generation to ensure correct Ethereum address formatting. - Minor fixes: - Resolved issue with extra fields not being allowed by default by specifying `extra="allow"` or `extra="forbid"` where necessary. - Fixed tests to account for changes in model validation and serialization behavior. - Added `pydantic-settings` as a new dependency for configuration management. * fix: lint tests were failing - Updated all instances of **extra_fields to ensure proper handling of Optional dictionaries using `(extra_fields or {})` pattern. - Added proper return statements in `AlephHttpClient.get_message_status` to return parsed JSON data as a `MessageStatus` object. - Updated `Settings` class in `conf.py` to correct DNS resolvers type and simplify the `model_config` definition. - Refactored `parse_volume` to ensure correct handling of Mapping types and MachineVolume types, avoiding TypeErrors. - Improved field validation and model validation in `SignedPubKeyHeader` by using correct Pydantic v2 validation decorators and ensuring compatibility with the new model behavior. - Applied formatting and consistency fixes for `model_dump` usage and indentation improvements in test files. * feat: add pyproject-fmt * fix: run pyproject-fmt * Post-SOL fixes (#178) * Missing chain field on auth * Fix Signature of Solana operation for CRN * Add export_private_key func for accounts * Improve _load_account * Add chain arg to _load_account * Increase default HTTP_REQUEST_TIMEOUT * Typing --------- Co-authored-by: Olivier Le Thanh Duong * Migrate to Pydantic v2, update model validation and fix async issues - Migrated to Pydantic v2: - Replaced deprecated `parse_obj()` and `parse_raw()` with `model_validate()` and `model_validate_json()`. - Replaced `.dict()` with `.model_dump()` for serializing models to dictionaries. - Updated `validator` to `field_validator` and `root_validator` to `model_validator` to comply with Pydantic v2 syntax changes. - Fixed asyncio issues: - Added `await` for asynchronous methods like `raise_for_status()` in `RemoteAccount` and other HTTP operations to avoid `RuntimeWarning`. - Updated config handling: - Used `ClassVar` for constants in `Settings` and other configuration classes. - Replaced `Config` with `ConfigDict` in Pydantic models to follow v2 conventions. - Added default values for missing fields in chain configurations (`CHAINS_SEPOLIA_ACTIVE`, etc.). - Adjusted signature handling: - Updated the signing logic to prepend `0x` in the `BaseAccount` signature generation to ensure correct Ethereum address formatting. - Minor fixes: - Resolved issue with extra fields not being allowed by default by specifying `extra="allow"` or `extra="forbid"` where necessary. - Fixed tests to account for changes in model validation and serialization behavior. - Added `pydantic-settings` as a new dependency for configuration management. * fix: add explicit float type for HTTP_REQUEST_TIMEOUT to comply with Pydantic v2 requirements Pydantic v2 requires explicit type annotations for fields, so added `float` to ensure proper validation of HTTP_REQUEST_TIMEOUT. * Fix: Linting tests did not pass: * Fix: Project don't use the good version of aleph-message There were changes made on aleph-message on the main branch about pydantic version. Using the version by the url and then change it later after the release. * fix: Wrong aleph-message version * Fix: list[str] rise an error in ubuntu 20.04 Using List from typing instead to assure the compatibility between python3.8 and above * style: isort * fix: Hugo comments * Add pydantic for better mypy tests + Fixes * fix: Changing version of aleph-message * style: Missing type for URL * style: Missing type for URL * fix: Changing version of aleph-message and fix mypy Changing the version from the branch to the main of aleph-message mypy rose some errors about missing name argument, so setting the as None because they are optional * fix: Changing version of aleph-message * fix: Changing version of pytezos * Changes for new pricing system (#199) - Move/improve flow code parts from CLI to SDK - Add utils functions - Add `make_instance_content` and `make_program_content` - Refactor `create_instance` and `create_program` - Add `get_estimated_price` - Fixes for mypy/ruff/pytest - Minor improvements - Remove firecracker rootfs hashes for instances * Migrate to Pydantic v2, update model validation and fix async issues - Migrated to Pydantic v2: - Replaced deprecated `parse_obj()` and `parse_raw()` with `model_validate()` and `model_validate_json()`. - Replaced `.dict()` with `.model_dump()` for serializing models to dictionaries. - Updated `validator` to `field_validator` and `root_validator` to `model_validator` to comply with Pydantic v2 syntax changes. - Fixed asyncio issues: - Added `await` for asynchronous methods like `raise_for_status()` in `RemoteAccount` and other HTTP operations to avoid `RuntimeWarning`. - Updated config handling: - Used `ClassVar` for constants in `Settings` and other configuration classes. - Replaced `Config` with `ConfigDict` in Pydantic models to follow v2 conventions. - Added default values for missing fields in chain configurations (`CHAINS_SEPOLIA_ACTIVE`, etc.). - Adjusted signature handling: - Updated the signing logic to prepend `0x` in the `BaseAccount` signature generation to ensure correct Ethereum address formatting. - Minor fixes: - Resolved issue with extra fields not being allowed by default by specifying `extra="allow"` or `extra="forbid"` where necessary. - Fixed tests to account for changes in model validation and serialization behavior. - Added `pydantic-settings` as a new dependency for configuration management. * fix: lint tests were failing - Updated all instances of **extra_fields to ensure proper handling of Optional dictionaries using `(extra_fields or {})` pattern. - Added proper return statements in `AlephHttpClient.get_message_status` to return parsed JSON data as a `MessageStatus` object. - Updated `Settings` class in `conf.py` to correct DNS resolvers type and simplify the `model_config` definition. - Refactored `parse_volume` to ensure correct handling of Mapping types and MachineVolume types, avoiding TypeErrors. - Improved field validation and model validation in `SignedPubKeyHeader` by using correct Pydantic v2 validation decorators and ensuring compatibility with the new model behavior. - Applied formatting and consistency fixes for `model_dump` usage and indentation improvements in test files. * Migrate to Pydantic v2, update model validation and fix async issues - Migrated to Pydantic v2: - Replaced deprecated `parse_obj()` and `parse_raw()` with `model_validate()` and `model_validate_json()`. - Replaced `.dict()` with `.model_dump()` for serializing models to dictionaries. - Updated `validator` to `field_validator` and `root_validator` to `model_validator` to comply with Pydantic v2 syntax changes. - Fixed asyncio issues: - Added `await` for asynchronous methods like `raise_for_status()` in `RemoteAccount` and other HTTP operations to avoid `RuntimeWarning`. - Updated config handling: - Used `ClassVar` for constants in `Settings` and other configuration classes. - Replaced `Config` with `ConfigDict` in Pydantic models to follow v2 conventions. - Added default values for missing fields in chain configurations (`CHAINS_SEPOLIA_ACTIVE`, etc.). - Adjusted signature handling: - Updated the signing logic to prepend `0x` in the `BaseAccount` signature generation to ensure correct Ethereum address formatting. - Minor fixes: - Resolved issue with extra fields not being allowed by default by specifying `extra="allow"` or `extra="forbid"` where necessary. - Fixed tests to account for changes in model validation and serialization behavior. - Added `pydantic-settings` as a new dependency for configuration management. * Fix: Linting tests did not pass: * fix: Wrong aleph-message version * fix: Hugo comments * Add pydantic for better mypy tests + Fixes * fix: Changing version of aleph-message * style: Missing type for URL * fix: Changing version of aleph-message and fix mypy Changing the version from the branch to the main of aleph-message mypy rose some errors about missing name argument, so setting the as None because they are optional * fix: Changing version of aleph-message * Fix: Missing pydantic_core and wrong version of tezos * Fix: Access to PersistentVolumeSizeMib is incompatible after migrating to Pydantic2 Using model_validate to access it * Fix: Wrong name given to the variable * Style: isort * Fix: PersistentVolumeSizeMib no longer exist This class has been deleted from aleph_message and the size is now inside the PersistentVolume class * Fix: Update last `aleph-message` version and use again `PersistentVolumeSizeMib` class * fix: invalid signature cause by `0x` + signature.hex() * fix: add '0x' to the signature if not here (error happenings only on unit test) * Refactor: Apply the `.hex()` quick fix on the ETHAccount class instead on the base one as other chains can be affected. * fix: pydantic model should use `.model_dump()` instead of `dict()` * fix: add dummy signature for unauthenticated price estimates When estimating prices without authentication, there's no valid signature available. This fix uses a dummy signature so that message validation passes in these cases. --------- Co-authored-by: Laurent Peuch Co-authored-by: philogicae Co-authored-by: Olivier Le Thanh Duong Co-authored-by: philogicae <38438271+philogicae@users.noreply.github.com> Co-authored-by: Andres D. Molins Co-authored-by: 1yam Co-authored-by: Andres D. Molins --- pyproject.toml | 8 +-- src/aleph/sdk/chains/common.py | 1 + src/aleph/sdk/chains/ethereum.py | 18 ++++++- src/aleph/sdk/chains/remote.py | 4 +- src/aleph/sdk/client/authenticated_http.py | 20 ++++---- src/aleph/sdk/client/http.py | 37 +++++++------- .../sdk/client/vm_confidential_client.py | 2 +- src/aleph/sdk/conf.py | 43 ++++++++-------- src/aleph/sdk/domain.py | 4 +- src/aleph/sdk/query/responses.py | 10 ++-- src/aleph/sdk/types.py | 10 ++-- src/aleph/sdk/utils.py | 6 +-- src/aleph/sdk/vm/cache.py | 2 +- tests/unit/aleph_vm_authentication.py | 49 +++++++++---------- tests/unit/conftest.py | 8 +-- tests/unit/test_price.py | 2 +- tests/unit/test_remote_account.py | 2 +- tests/unit/test_utils.py | 18 +++---- tests/unit/test_vm_client.py | 4 +- 19 files changed, 134 insertions(+), 114 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index edb37f7f..691596fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,15 +30,17 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message @ git+https://github.com/aleph-im/aleph-message.git@andres-feature-integrate_sonic_blockchain", + "aleph-message>=1", "aleph-superfluid>=0.2.1", - "base58==2.1.1", # Needed now as default with _load_account changement + "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version<'3.11'", "coincurve>=19; python_version>='3.11'", "eth-abi>=4; python_version>='3.11'", "eth-typing==4.3.1", "jwcrypto==1.5.6", - "pynacl==1.5", # Needed now as default with _load_account changement + "pydantic>=2,<3", + "pydantic-settings>=2", + "pynacl==1.5", # Needed now as default with _load_account changement "python-magic", "typing-extensions", "web3==6.3", diff --git a/src/aleph/sdk/chains/common.py b/src/aleph/sdk/chains/common.py index 0a90183c..d2714d62 100644 --- a/src/aleph/sdk/chains/common.py +++ b/src/aleph/sdk/chains/common.py @@ -73,6 +73,7 @@ async def sign_message(self, message: Dict) -> Dict: message = self._setup_sender(message) signature = await self.sign_raw(get_verification_buffer(message)) message["signature"] = signature.hex() + return message @abstractmethod diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index c185d174..863e2bbf 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -2,7 +2,7 @@ import base64 from decimal import Decimal from pathlib import Path -from typing import Awaitable, Optional, Union +from typing import Awaitable, Dict, Optional, Union from aleph_message.models import Chain from eth_account import Account # type: ignore @@ -80,6 +80,22 @@ async def sign_raw(self, buffer: bytes) -> bytes: sig = self._account.sign_message(msghash) return sig["signature"] + async def sign_message(self, message: Dict) -> Dict: + """ + Returns a signed message from an aleph.im message. + Args: + message: Message to sign + Returns: + Dict: Signed message + """ + signed_message = await super().sign_message(message) + + # Apply that fix as seems that sometimes the .hex() method doesn't add the 0x str at the beginning + if not str(signed_message["signature"]).startswith("0x"): + signed_message["signature"] = "0x" + signed_message["signature"] + + return signed_message + def connect_chain(self, chain: Optional[Chain] = None): self.chain = chain if self.chain: diff --git a/src/aleph/sdk/chains/remote.py b/src/aleph/sdk/chains/remote.py index 931b68f3..917cf39b 100644 --- a/src/aleph/sdk/chains/remote.py +++ b/src/aleph/sdk/chains/remote.py @@ -52,7 +52,7 @@ async def from_crypto_host( session = aiohttp.ClientSession(connector=connector) async with session.get(f"{host}/properties") as response: - response.raise_for_status() + await response.raise_for_status() data = await response.json() properties = AccountProperties(**data) @@ -75,7 +75,7 @@ def private_key(self): async def sign_message(self, message: Dict) -> Dict: """Sign a message inplace.""" async with self._session.post(f"{self._host}/sign", json=message) as response: - response.raise_for_status() + await response.raise_for_status() return await response.json() async def sign_raw(self, buffer: bytes) -> bytes: diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 1d0d69d7..2975e112 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -251,7 +251,7 @@ async def _broadcast( url = "/api/v0/messages" logger.debug(f"Posting message on {url}") - message_dict = message.dict(include=self.BROADCAST_MESSAGE_FIELDS) + message_dict = message.model_dump(include=self.BROADCAST_MESSAGE_FIELDS) async with self.http_session.post( url, json={ @@ -293,7 +293,7 @@ async def create_post( ) message, status, _ = await self.submit( - content=content.dict(exclude_none=True), + content=content.model_dump(exclude_none=True), message_type=MessageType.post, channel=channel, allow_inlining=inline, @@ -321,7 +321,7 @@ async def create_aggregate( ) message, status, _ = await self.submit( - content=content_.dict(exclude_none=True), + content=content_.model_dump(exclude_none=True), message_type=MessageType.aggregate, channel=channel, allow_inlining=inline, @@ -395,7 +395,7 @@ async def create_store( content = StoreContent.parse_obj(values) message, status, _ = await self.submit( - content=content.dict(exclude_none=True), + content=content.model_dump(exclude_none=True), message_type=MessageType.store, channel=channel, allow_inlining=True, @@ -449,7 +449,7 @@ async def create_program( ) message, status, _ = await self.submit( - content=content.dict(exclude_none=True), + content=content.model_dump(exclude_none=True), message_type=MessageType.program, channel=channel, storage_engine=storage_engine, @@ -525,7 +525,7 @@ async def create_instance( ) message, status, response = await self.submit( - content=content.dict(exclude_none=True), + content=content.model_dump(exclude_none=True), message_type=MessageType.instance, channel=channel, storage_engine=storage_engine, @@ -573,7 +573,7 @@ async def forget( ) message, status, _ = await self.submit( - content=content.dict(exclude_none=True), + content=content.model_dump(exclude_none=True), message_type=MessageType.forget, channel=channel, storage_engine=storage_engine, @@ -617,11 +617,11 @@ async def _storage_push_file_with_message( # Prepare the STORE message message = await self.generate_signed_message( message_type=MessageType.store, - content=store_content.dict(exclude_none=True), + content=store_content.model_dump(exclude_none=True), channel=channel, ) metadata = { - "message": message.dict(exclude_none=True), + "message": message.model_dump(exclude_none=True), "sync": sync, } data.add_field( @@ -665,7 +665,7 @@ async def _upload_file_native( item_hash=ItemHash(file_hash), mime_type=mime_type, # type: ignore time=time.time(), - **extra_fields, + **(extra_fields or {}), ) message, _ = await self._storage_push_file_with_message( file_content=file_content, diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index f4e8b898..3d42d490 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -191,7 +191,7 @@ async def get_posts( posts: List[Post] = [] for post_raw in posts_raw: try: - posts.append(Post.parse_obj(post_raw)) + posts.append(Post.model_validate(post_raw)) except ValidationError as e: if not ignore_invalid_messages: raise e @@ -462,30 +462,31 @@ async def get_estimated_price( self, content: ExecutableContent, ) -> PriceResponse: - cleaned_content = content.dict(exclude_none=True) + cleaned_content = content.model_dump(exclude_none=True) item_content: str = json.dumps( cleaned_content, separators=(",", ":"), default=extended_json_encoder, ) - message = parse_message( - dict( - sender=content.address, - chain=Chain.ETH, - type=( - MessageType.program - if isinstance(content, ProgramContent) - else MessageType.instance - ), - content=cleaned_content, - item_content=item_content, - time=time.time(), - channel=settings.DEFAULT_CHANNEL, - item_type=ItemType.inline, - item_hash=compute_sha256(item_content), - ) + message_dict = dict( + sender=content.address, + chain=Chain.ETH, + type=( + MessageType.program + if isinstance(content, ProgramContent) + else MessageType.instance + ), + content=cleaned_content, + item_content=item_content, + time=time.time(), + channel=settings.DEFAULT_CHANNEL, + item_type=ItemType.inline, + item_hash=compute_sha256(item_content), + signature="0x" + "0" * 130, # Add a dummy signature to pass validation ) + message = parse_message(message_dict) + async with self.http_session.post( "/api/v0/price/estimate", json=dict(message=message) ) as resp: diff --git a/src/aleph/sdk/client/vm_confidential_client.py b/src/aleph/sdk/client/vm_confidential_client.py index e027b384..0d9d6e18 100644 --- a/src/aleph/sdk/client/vm_confidential_client.py +++ b/src/aleph/sdk/client/vm_confidential_client.py @@ -105,7 +105,7 @@ async def measurement(self, vm_id: ItemHash) -> SEVMeasurement: status, text = await self.perform_operation( vm_id, "confidential/measurement", method="GET" ) - sev_measurement = SEVMeasurement.parse_raw(text) + sev_measurement = SEVMeasurement.model_validate_json(text) return sev_measurement async def validate_measure( diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 6a1fcf46..b289cc2b 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -3,11 +3,12 @@ import os from pathlib import Path from shutil import which -from typing import Dict, Optional, Union +from typing import ClassVar, Dict, List, Optional, Union from aleph_message.models import Chain from aleph_message.models.execution.environment import HypervisorType -from pydantic import BaseModel, BaseSettings, Field +from pydantic import BaseModel, Field +from pydantic_settings import BaseSettings, SettingsConfigDict from aleph.sdk.types import ChainInfo @@ -41,7 +42,7 @@ class Settings(BaseSettings): REMOTE_CRYPTO_HOST: Optional[str] = None REMOTE_CRYPTO_UNIX_SOCKET: Optional[str] = None ADDRESS_TO_USE: Optional[str] = None - HTTP_REQUEST_TIMEOUT = 15.0 + HTTP_REQUEST_TIMEOUT: ClassVar[float] = 15.0 DEFAULT_CHANNEL: str = "ALEPH-CLOUDSOLUTIONS" @@ -78,14 +79,14 @@ class Settings(BaseSettings): CODE_USES_SQUASHFS: bool = which("mksquashfs") is not None # True if command exists - VM_URL_PATH = "https://aleph.sh/vm/{hash}" - VM_URL_HOST = "https://{hash_base32}.aleph.sh" - IPFS_GATEWAY = "https://ipfs.aleph.cloud/ipfs/" - CRN_URL_FOR_PROGRAMS = "https://dchq.staging.aleph.sh/" + VM_URL_PATH: ClassVar[str] = "https://aleph.sh/vm/{hash}" + VM_URL_HOST: ClassVar[str] = "https://{hash_base32}.aleph.sh" + IPFS_GATEWAY: ClassVar[str] = "https://ipfs.aleph.cloud/ipfs/" + CRN_URL_FOR_PROGRAMS: ClassVar[str] = "https://dchq.staging.aleph.sh/" # Web3Provider settings - TOKEN_DECIMALS = 18 - TX_TIMEOUT = 60 * 3 + TOKEN_DECIMALS: ClassVar[int] = 18 + TX_TIMEOUT: ClassVar[int] = 60 * 3 CHAINS: Dict[Union[Chain, str], ChainInfo] = { # TESTNETS "SEPOLIA": ChainInfo( @@ -220,16 +221,15 @@ class Settings(BaseSettings): DEFAULT_CHAIN: Chain = Chain.ETH # Dns resolver - DNS_IPFS_DOMAIN = "ipfs.public.aleph.sh" - DNS_PROGRAM_DOMAIN = "program.public.aleph.sh" - DNS_INSTANCE_DOMAIN = "instance.public.aleph.sh" - DNS_STATIC_DOMAIN = "static.public.aleph.sh" - DNS_RESOLVERS = ["9.9.9.9", "1.1.1.1"] - - class Config: - env_prefix = "ALEPH_" - case_sensitive = False - env_file = ".env" + DNS_IPFS_DOMAIN: ClassVar[str] = "ipfs.public.aleph.sh" + DNS_PROGRAM_DOMAIN: ClassVar[str] = "program.public.aleph.sh" + DNS_INSTANCE_DOMAIN: ClassVar[str] = "instance.public.aleph.sh" + DNS_STATIC_DOMAIN: ClassVar[str] = "static.public.aleph.sh" + DNS_RESOLVERS: ClassVar[List[str]] = ["9.9.9.9", "1.1.1.1"] + + model_config = SettingsConfigDict( + env_prefix="ALEPH_", case_sensitive=False, env_file=".env" + ) class MainConfiguration(BaseModel): @@ -240,8 +240,7 @@ class MainConfiguration(BaseModel): path: Path chain: Chain - class Config: - use_enum_values = True + model_config = SettingsConfigDict(use_enum_values=True) # Settings singleton @@ -297,7 +296,7 @@ def save_main_configuration(file_path: Path, data: MainConfiguration): Synchronously save a single ChainAccount object as JSON to a file. """ with file_path.open("w") as file: - data_serializable = data.dict() + data_serializable = data.model_dump() data_serializable["path"] = str(data_serializable["path"]) json.dump(data_serializable, file, indent=4) diff --git a/src/aleph/sdk/domain.py b/src/aleph/sdk/domain.py index a8f3fd82..525e6cef 100644 --- a/src/aleph/sdk/domain.py +++ b/src/aleph/sdk/domain.py @@ -52,11 +52,11 @@ def raise_error(self, status: Dict[str, bool]): def hostname_from_url(url: Union[HttpUrl, str]) -> Hostname: """Extract FQDN from url""" - parsed = urlparse(url) + parsed = urlparse(str(url)) if all([parsed.scheme, parsed.netloc]) is True: url = parsed.netloc - return Hostname(url) + return Hostname(str(url)) async def get_target_type(fqdn: Hostname) -> Optional[TargetType]: diff --git a/src/aleph/sdk/query/responses.py b/src/aleph/sdk/query/responses.py index 4b598f12..277a1bea 100644 --- a/src/aleph/sdk/query/responses.py +++ b/src/aleph/sdk/query/responses.py @@ -9,7 +9,7 @@ ItemType, MessageConfirmation, ) -from pydantic import BaseModel, Field +from pydantic import BaseModel, ConfigDict, Field class Post(BaseModel): @@ -48,9 +48,9 @@ class Post(BaseModel): ref: Optional[Union[str, Any]] = Field( description="Other message referenced by this one" ) + address: Optional[str] = Field(description="Address of the sender") - class Config: - allow_extra = False + model_config = ConfigDict(extra="forbid") class PaginationResponse(BaseModel): @@ -64,14 +64,14 @@ class PostsResponse(PaginationResponse): """Response from an aleph.im node API on the path /api/v0/posts.json""" posts: List[Post] - pagination_item = "posts" + pagination_item: str = "posts" class MessagesResponse(PaginationResponse): """Response from an aleph.im node API on the path /api/v0/messages.json""" messages: List[AlephMessage] - pagination_item = "messages" + pagination_item: str = "messages" class PriceResponse(BaseModel): diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 05fa9815..cf23f19d 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -2,7 +2,7 @@ from enum import Enum from typing import Dict, Optional, Protocol, TypeVar -from pydantic import BaseModel +from pydantic import BaseModel, Field __all__ = ("StorageEnum", "Account", "AccountFromPrivateKey", "GenericMessage") @@ -87,10 +87,10 @@ class StoredContent(BaseModel): A stored content. """ - filename: Optional[str] - hash: Optional[str] - url: Optional[str] - error: Optional[str] + filename: Optional[str] = Field(default=None) + hash: Optional[str] = Field(default=None) + url: Optional[str] = Field(default=None) + error: Optional[str] = Field(default=None) class TokenType(str, Enum): diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index 5cbc1e8c..31b2be8d 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -28,6 +28,7 @@ from uuid import UUID from zipfile import BadZipFile, ZipFile +import pydantic_core from aleph_message.models import ( Chain, InstanceContent, @@ -63,7 +64,6 @@ from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from jwcrypto.jwa import JWA -from pydantic.json import pydantic_encoder from aleph.sdk.conf import settings from aleph.sdk.types import GenericMessage, SEVInfo, SEVMeasurement @@ -202,7 +202,7 @@ def extended_json_encoder(obj: Any) -> Any: elif isinstance(obj, time): return obj.hour * 3600 + obj.minute * 60 + obj.second + obj.microsecond / 1e6 else: - return pydantic_encoder(obj) + return pydantic_core.to_jsonable_python(obj) def parse_volume(volume_dict: Union[Mapping, MachineVolume]) -> MachineVolume: @@ -213,7 +213,7 @@ def parse_volume(volume_dict: Union[Mapping, MachineVolume]) -> MachineVolume: for volume_type in get_args(MachineVolume): try: - return volume_type.parse_obj(volume_dict) + return volume_type.model_validate(volume_dict) except ValueError: pass raise ValueError(f"Could not parse volume: {volume_dict}") diff --git a/src/aleph/sdk/vm/cache.py b/src/aleph/sdk/vm/cache.py index ff5ca7c8..a7ac6acc 100644 --- a/src/aleph/sdk/vm/cache.py +++ b/src/aleph/sdk/vm/cache.py @@ -70,7 +70,7 @@ def __init__( ) self.cache = {} - self.api_host = connector_url if connector_url else settings.API_HOST + self.api_host = str(connector_url) if connector_url else settings.API_HOST async def get(self, key: str) -> Optional[bytes]: sanitized_key = sanitize_cache_key(key) diff --git a/tests/unit/aleph_vm_authentication.py b/tests/unit/aleph_vm_authentication.py index 6083a119..c1710c16 100644 --- a/tests/unit/aleph_vm_authentication.py +++ b/tests/unit/aleph_vm_authentication.py @@ -1,4 +1,6 @@ # Keep datetime import as is as it allow patching in test +from __future__ import annotations + import datetime import functools import json @@ -13,7 +15,7 @@ from eth_account.messages import encode_defunct from jwcrypto import jwk from jwcrypto.jwa import JWA -from pydantic import BaseModel, ValidationError, root_validator, validator +from pydantic import BaseModel, ValidationError, field_validator, model_validator from aleph.sdk.utils import bytes_from_hex @@ -63,23 +65,21 @@ class SignedPubKeyHeader(BaseModel): signature: bytes payload: bytes - @validator("signature") + @field_validator("signature") def signature_must_be_hex(cls, value: bytes) -> bytes: """Convert the signature from hexadecimal to bytes""" - return bytes_from_hex(value.decode()) - @validator("payload") + @field_validator("payload") def payload_must_be_hex(cls, value: bytes) -> bytes: """Convert the payload from hexadecimal to bytes""" - return bytes_from_hex(value.decode()) - @root_validator(pre=False, skip_on_failure=True) - def check_expiry(cls, values) -> Dict[str, bytes]: + @model_validator(mode="after") # type: ignore + def check_expiry(cls, values: SignedPubKeyHeader) -> SignedPubKeyHeader: """Check that the token has not expired""" - payload: bytes = values["payload"] - content = SignedPubKeyPayload.parse_raw(payload) + payload: bytes = values.payload + content = SignedPubKeyPayload.model_validate_json(payload) if not is_token_still_valid(content.expires): msg = "Token expired" @@ -87,12 +87,11 @@ def check_expiry(cls, values) -> Dict[str, bytes]: return values - @root_validator(pre=False, skip_on_failure=True) - def check_signature(cls, values: Dict[str, bytes]) -> Dict[str, bytes]: - """Check that the signature is valid""" - signature: bytes = values["signature"] - payload: bytes = values["payload"] - content = SignedPubKeyPayload.parse_raw(payload) + @model_validator(mode="after") # type: ignore + def check_signature(cls, values: SignedPubKeyHeader) -> SignedPubKeyHeader: + signature: bytes = values.signature + payload: bytes = values.payload + content = SignedPubKeyPayload.model_validate_json(payload) if not verify_wallet_signature(signature, payload.hex(), content.address): msg = "Invalid signature" @@ -103,7 +102,7 @@ def check_signature(cls, values: Dict[str, bytes]) -> Dict[str, bytes]: @property def content(self) -> SignedPubKeyPayload: """Return the content of the header""" - return SignedPubKeyPayload.parse_raw(self.payload) + return SignedPubKeyPayload.model_validate_json(self.payload) class SignedOperationPayload(BaseModel): @@ -113,7 +112,7 @@ class SignedOperationPayload(BaseModel): path: str # body_sha256: str # disabled since there is no body - @validator("time") + @field_validator("time") def time_is_current(cls, v: datetime.datetime) -> datetime.datetime: """Check that the time is current and the payload is not a replay attack.""" max_past = datetime.datetime.now(tz=datetime.timezone.utc) - datetime.timedelta( @@ -135,7 +134,7 @@ class SignedOperation(BaseModel): signature: bytes payload: bytes - @validator("signature") + @field_validator("signature") def signature_must_be_hex(cls, value: str) -> bytes: """Convert the signature from hexadecimal to bytes""" @@ -147,17 +146,17 @@ def signature_must_be_hex(cls, value: str) -> bytes: logger.warning(value) raise error - @validator("payload") + @field_validator("payload") def payload_must_be_hex(cls, v) -> bytes: """Convert the payload from hexadecimal to bytes""" v = bytes.fromhex(v.decode()) - _ = SignedOperationPayload.parse_raw(v) + _ = SignedOperationPayload.model_validate_json(v) return v @property def content(self) -> SignedOperationPayload: """Return the content of the header""" - return SignedOperationPayload.parse_raw(self.payload) + return SignedOperationPayload.model_validate_json(self.payload) def get_signed_pubkey(request: web.Request) -> SignedPubKeyHeader: @@ -168,7 +167,7 @@ def get_signed_pubkey(request: web.Request) -> SignedPubKeyHeader: raise web.HTTPBadRequest(reason="Missing X-SignedPubKey header") try: - return SignedPubKeyHeader.parse_raw(signed_pubkey_header) + return SignedPubKeyHeader.model_validate_json(signed_pubkey_header) except KeyError as error: logger.debug(f"Missing X-SignedPubKey header: {error}") @@ -199,7 +198,7 @@ def get_signed_operation(request: web.Request) -> SignedOperation: """Get the signed operation public key that is signed by the ephemeral key from the request headers.""" try: signed_operation = request.headers["X-SignedOperation"] - return SignedOperation.parse_raw(signed_operation) + return SignedOperation.model_validate_json(signed_operation) except KeyError as error: raise web.HTTPBadRequest(reason="Missing X-SignedOperation header") from error except json.JSONDecodeError as error: @@ -259,8 +258,8 @@ async def authenticate_websocket_message( message, domain_name: Optional[str] = DOMAIN_NAME ) -> str: """Authenticate a websocket message since JS cannot configure headers on WebSockets.""" - signed_pubkey = SignedPubKeyHeader.parse_obj(message["X-SignedPubKey"]) - signed_operation = SignedOperation.parse_obj(message["X-SignedOperation"]) + signed_pubkey = SignedPubKeyHeader.model_validate(message["X-SignedPubKey"]) + signed_operation = SignedOperation.model_validate(message["X-SignedOperation"]) if signed_operation.content.domain != domain_name: logger.debug( f"Invalid domain '{signed_operation.content.domain}' != '{domain_name}'" diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index c1c56fcd..385d2836 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -71,7 +71,7 @@ def rejected_message(): @pytest.fixture def aleph_messages() -> List[AlephMessage]: return [ - AggregateMessage.parse_obj( + AggregateMessage.model_validate( { "item_hash": "5b26d949fe05e38f535ef990a89da0473f9d700077cced228f2d36e73fca1fd6", "type": "AGGREGATE", @@ -95,7 +95,7 @@ def aleph_messages() -> List[AlephMessage]: "confirmed": False, } ), - PostMessage.parse_obj( + PostMessage.model_validate( { "item_hash": "70f3798fdc68ce0ee03715a5547ee24e2c3e259bf02e3f5d1e4bf5a6f6a5e99f", "type": "POST", @@ -135,7 +135,9 @@ def json_post() -> dict: def raw_messages_response(aleph_messages) -> Callable[[int], Dict[str, Any]]: return lambda page: { "messages": ( - [message.dict() for message in aleph_messages] if int(page) == 1 else [] + [message.model_dump() for message in aleph_messages] + if int(page) == 1 + else [] ), "pagination_item": "messages", "pagination_page": int(page), diff --git a/tests/unit/test_price.py b/tests/unit/test_price.py index fe9e3468..e60680f8 100644 --- a/tests/unit/test_price.py +++ b/tests/unit/test_price.py @@ -15,7 +15,7 @@ async def test_get_program_price_valid(): required_tokens=3.0555555555555556e-06, payment_type="superfluid", ) - mock_session = make_mock_get_session(expected.dict()) + mock_session = make_mock_get_session(expected.model_dump()) async with mock_session: response = await mock_session.get_program_price("cacacacacacaca") assert response == expected diff --git a/tests/unit/test_remote_account.py b/tests/unit/test_remote_account.py index cb4a2af5..3abe979e 100644 --- a/tests/unit/test_remote_account.py +++ b/tests/unit/test_remote_account.py @@ -22,7 +22,7 @@ async def test_remote_storage(): curve="secp256k1", address=local_account.get_address(), public_key=local_account.get_public_key(), - ).dict() + ).model_dump() ) remote_account = await RemoteAccount.from_crypto_host( diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index c560455d..4ceb5a3f 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -13,7 +13,6 @@ ProgramMessage, StoreMessage, ) -from aleph_message.models.execution.environment import MachineResources from aleph_message.models.execution.volume import ( EphemeralVolume, ImmutableVolume, @@ -116,15 +115,16 @@ def test_enum_as_str(): ( MessageType.aggregate, { + "address": "0x1", "content": { - "Hello": MachineResources( - vcpus=1, - memory=1024, - seconds=1, - ) + "Hello": { + "vcpus": 1, + "memory": 1024, + "seconds": 1, + "published_ports": None, + }, }, "key": "test", - "address": "0x1", "time": 1.0, }, ), @@ -141,7 +141,7 @@ async def test_prepare_aleph_message( channel="TEST", ) - assert message.content.dict() == content + assert message.content.model_dump() == content def test_parse_immutable_volume(): @@ -219,7 +219,7 @@ def test_compute_confidential_measure(): assert base64.b64encode(tik) == b"npOTEc4mtRGfXfB+G6EBdw==" expected_hash = "d06471f485c0a61aba5a431ec136b947be56907acf6ed96afb11788ae4525aeb" nonce = base64.b64decode("URQNqJAqh/2ep4drjx/XvA==") - sev_info = SEVInfo.parse_obj( + sev_info = SEVInfo.model_validate( { "enabled": True, "api_major": 1, diff --git a/tests/unit/test_vm_client.py b/tests/unit/test_vm_client.py index 7cc9a2c3..d9a9a36b 100644 --- a/tests/unit/test_vm_client.py +++ b/tests/unit/test_vm_client.py @@ -290,8 +290,8 @@ async def test_vm_client_generate_correct_authentication_headers(): ) path, headers = await vm_client._generate_header(vm_id, "reboot", method="post") - signed_pubkey = SignedPubKeyHeader.parse_raw(headers["X-SignedPubKey"]) - signed_operation = SignedOperation.parse_raw(headers["X-SignedOperation"]) + signed_pubkey = SignedPubKeyHeader.model_validate_json(headers["X-SignedPubKey"]) + signed_operation = SignedOperation.model_validate_json(headers["X-SignedOperation"]) address = verify_signed_operation(signed_operation, signed_pubkey) assert vm_client.account.get_address() == address From 142fc189b5b86afedab118acdecfa51814671471 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Wed, 16 Apr 2025 16:59:20 +0200 Subject: [PATCH 084/122] Feature: New SVMAccount (#213) * Feature: new SVMAccount * Feature: Add `Eclipse` chain with `SVMAccount` in `chain_account_map`to handle load_account * UnitTest: new unitest for SVM chains --- src/aleph/sdk/account.py | 2 + src/aleph/sdk/chains/svm.py | 13 +++ tests/unit/test_chain_svm.py | 199 +++++++++++++++++++++++++++++++++++ 3 files changed, 214 insertions(+) create mode 100644 src/aleph/sdk/chains/svm.py create mode 100644 tests/unit/test_chain_svm.py diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 15dd79d1..2c289fbf 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -11,6 +11,7 @@ from aleph.sdk.chains.remote import RemoteAccount from aleph.sdk.chains.solana import SOLAccount from aleph.sdk.chains.substrate import DOTAccount +from aleph.sdk.chains.svm import SVMAccount from aleph.sdk.conf import load_main_configuration, settings from aleph.sdk.evm_utils import get_chains_with_super_token from aleph.sdk.types import AccountFromPrivateKey @@ -39,6 +40,7 @@ Chain.SONIC: EVMAccount, Chain.WORLDCHAIN: EVMAccount, Chain.ZORA: EVMAccount, + Chain.ECLIPSE: SVMAccount, } diff --git a/src/aleph/sdk/chains/svm.py b/src/aleph/sdk/chains/svm.py new file mode 100644 index 00000000..80f433dd --- /dev/null +++ b/src/aleph/sdk/chains/svm.py @@ -0,0 +1,13 @@ +from typing import Optional + +from aleph_message.models import Chain + +from .solana import SOLAccount + + +class SVMAccount(SOLAccount): + def __init__(self, private_key: bytes, chain: Optional[Chain] = None): + super().__init__(private_key=private_key) + # Same as EVM ACCOUNT need to decided if we want to send the specified chain or always use SOL + if chain: + self.CHAIN = chain diff --git a/tests/unit/test_chain_svm.py b/tests/unit/test_chain_svm.py new file mode 100644 index 00000000..ced673c3 --- /dev/null +++ b/tests/unit/test_chain_svm.py @@ -0,0 +1,199 @@ +import json +from dataclasses import asdict, dataclass +from pathlib import Path +from tempfile import NamedTemporaryFile + +import base58 +import pytest +from aleph_message.models import Chain +from nacl.signing import VerifyKey + +from aleph.sdk.chains.common import get_verification_buffer +from aleph.sdk.chains.solana import get_fallback_account as get_solana_account +from aleph.sdk.chains.solana import verify_signature +from aleph.sdk.chains.svm import SVMAccount +from aleph.sdk.exceptions import BadSignatureError + + +@dataclass +class Message: + chain: str + sender: str + type: str + item_hash: str + + +@pytest.fixture +def svm_account() -> SVMAccount: + with NamedTemporaryFile(delete=False) as private_key_file: + private_key_file.close() + solana_account = get_solana_account(path=Path(private_key_file.name)) + return SVMAccount(private_key=solana_account.private_key) + + +@pytest.fixture +def svm_eclipse_account() -> SVMAccount: + with NamedTemporaryFile(delete=False) as private_key_file: + private_key_file.close() + solana_account = get_solana_account(path=Path(private_key_file.name)) + return SVMAccount(private_key=solana_account.private_key, chain=Chain.ECLIPSE) + + +def test_svm_account_init(): + with NamedTemporaryFile() as private_key_file: + solana_account = get_solana_account(path=Path(private_key_file.name)) + account = SVMAccount(private_key=solana_account.private_key) + + # Default chain should be SOL + assert account.CHAIN == Chain.SOL + assert account.CURVE == "curve25519" + assert account._signing_key.verify_key + assert isinstance(account.private_key, bytes) + assert len(account.private_key) == 32 + + # Test with custom chain + account_eclipse = SVMAccount( + private_key=solana_account.private_key, chain=Chain.ECLIPSE + ) + assert account_eclipse.CHAIN == Chain.ECLIPSE + + +@pytest.mark.asyncio +async def test_svm_sign_message(svm_account): + message = asdict(Message("ES", svm_account.get_address(), "SomeType", "ItemHash")) + initial_message = message.copy() + await svm_account.sign_message(message) + assert message["signature"] + + address = message["sender"] + assert address + assert isinstance(address, str) + signature = json.loads(message["signature"]) + + pubkey = base58.b58decode(signature["publicKey"]) + assert isinstance(pubkey, bytes) + assert len(pubkey) == 32 + + verify_key = VerifyKey(pubkey) + verification_buffer = get_verification_buffer(message) + assert get_verification_buffer(initial_message) == verification_buffer + verif = verify_key.verify( + verification_buffer, signature=base58.b58decode(signature["signature"]) + ) + + assert verif == verification_buffer + assert message["sender"] == signature["publicKey"] + + pubkey = svm_account.get_public_key() + assert isinstance(pubkey, str) + assert len(pubkey) == 64 + + +@pytest.mark.asyncio +async def test_svm_custom_chain_sign_message(svm_eclipse_account): + message = asdict( + Message( + Chain.ECLIPSE, svm_eclipse_account.get_address(), "SomeType", "ItemHash" + ) + ) + await svm_eclipse_account.sign_message(message) + assert message["signature"] + + # Verify message has correct chain + assert message["chain"] == Chain.ECLIPSE + + # Rest of verification is the same + signature = json.loads(message["signature"]) + pubkey = base58.b58decode(signature["publicKey"]) + verify_key = VerifyKey(pubkey) + verification_buffer = get_verification_buffer(message) + verif = verify_key.verify( + verification_buffer, signature=base58.b58decode(signature["signature"]) + ) + assert verif == verification_buffer + + +@pytest.mark.asyncio +async def test_svm_decrypt(svm_account): + assert svm_account.CURVE == "curve25519" + content = b"SomeContent" + + encrypted = await svm_account.encrypt(content) + assert isinstance(encrypted, bytes) + decrypted = await svm_account.decrypt(encrypted) + assert isinstance(decrypted, bytes) + assert content == decrypted + + +@pytest.mark.asyncio +async def test_svm_verify_signature(svm_account): + message = asdict( + Message( + "SVM", + svm_account.get_address(), + "POST", + "SomeHash", + ) + ) + await svm_account.sign_message(message) + assert message["signature"] + raw_signature = json.loads(message["signature"])["signature"] + assert isinstance(raw_signature, str) + + verify_signature(raw_signature, message["sender"], get_verification_buffer(message)) + + # as bytes + verify_signature( + base58.b58decode(raw_signature), + base58.b58decode(message["sender"]), + get_verification_buffer(message).decode("utf-8"), + ) + + +@pytest.mark.asyncio +async def test_verify_signature_with_forged_signature(svm_account): + message = asdict( + Message( + "SVM", + svm_account.get_address(), + "POST", + "SomeHash", + ) + ) + await svm_account.sign_message(message) + assert message["signature"] + # create forged 64 bit signature from random bytes + forged = base58.b58encode(bytes(64)).decode("utf-8") + + with pytest.raises(BadSignatureError): + verify_signature(forged, message["sender"], get_verification_buffer(message)) + + +@pytest.mark.asyncio +async def test_svm_sign_raw(svm_account): + buffer = b"SomeBuffer" + signature = await svm_account.sign_raw(buffer) + assert signature + assert isinstance(signature, bytes) + + verify_signature(signature, svm_account.get_address(), buffer) + + +def test_svm_with_various_chain_values(): + # Test with different chain formats + with NamedTemporaryFile() as private_key_file: + solana_account = get_solana_account(path=Path(private_key_file.name)) + + # Test with string + account1 = SVMAccount(private_key=solana_account.private_key, chain="ES") + assert account1.CHAIN == Chain.ECLIPSE + + # Test with Chain enum if it exists + account2 = SVMAccount( + private_key=solana_account.private_key, chain=Chain.ECLIPSE + ) + assert account2.CHAIN == Chain.ECLIPSE + + # Test default + account3 = SVMAccount(private_key=solana_account.private_key) + assert account3.CHAIN == Chain.SOL From d8e7b7d088be1bf027827bf9332402b12f60ce6d Mon Sep 17 00:00:00 2001 From: nesitor Date: Wed, 16 Apr 2025 16:59:34 +0200 Subject: [PATCH 085/122] Update `web3` dependency (#212) * Feature: Update web3 dependency * Fix: Reduce minimal ETH balance to 0.001 instead 0.005 * Fix: Change `rawTransaction` method to the new `raw_transaction` * Fix: Solve python 3.9 dependencies for coincurve --- pyproject.toml | 16 ++++++++-------- src/aleph/sdk/chains/ethereum.py | 6 +++--- src/aleph/sdk/evm_utils.py | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 691596fd..8b839052 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,19 +31,19 @@ dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", "aleph-message>=1", - "aleph-superfluid>=0.2.1", - "base58==2.1.1", # Needed now as default with _load_account changement - "coincurve; python_version<'3.11'", - "coincurve>=19; python_version>='3.11'", - "eth-abi>=4; python_version>='3.11'", - "eth-typing==4.3.1", + "aleph-superfluid @ git+https://github.com/aleph-im/superfluid.py", + "base58==2.1.1", # Needed now as default with _load_account changement + "coincurve; python_version>='3.9'", + "coincurve>=19; python_version>='3.9'", + "eth-abi>=5.0.1; python_version>='3.9'", + "eth-typing>=5.0.1", "jwcrypto==1.5.6", "pydantic>=2,<3", "pydantic-settings>=2", - "pynacl==1.5", # Needed now as default with _load_account changement + "pynacl==1.5", # Needed now as default with _load_account changement "python-magic", "typing-extensions", - "web3==6.3", + "web3>=7.10", ] optional-dependencies.all = [ diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index 863e2bbf..8815825e 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -11,7 +11,7 @@ from eth_keys.exceptions import BadSignature as EthBadSignatureError from superfluid import Web3FlowInfo from web3 import Web3 -from web3.middleware import geth_poa_middleware +from web3.middleware import ExtraDataToPOAMiddleware from web3.types import TxParams, TxReceipt from aleph.sdk.exceptions import InsufficientFundsError @@ -104,7 +104,7 @@ def connect_chain(self, chain: Optional[Chain] = None): self._provider = Web3(Web3.HTTPProvider(self.rpc)) if chain == Chain.BSC: self._provider.middleware_onion.inject( - geth_poa_middleware, "geth_poa", layer=0 + ExtraDataToPOAMiddleware, "geth_poa", layer=0 ) else: self.chain_id = None @@ -144,7 +144,7 @@ def sign_and_send() -> TxReceipt: signed_tx = self._provider.eth.account.sign_transaction( tx_params, self._account.key ) - tx_hash = self._provider.eth.send_raw_transaction(signed_tx.rawTransaction) + tx_hash = self._provider.eth.send_raw_transaction(signed_tx.raw_transaction) tx_receipt = self._provider.eth.wait_for_transaction_receipt( tx_hash, settings.TX_TIMEOUT ) diff --git a/src/aleph/sdk/evm_utils.py b/src/aleph/sdk/evm_utils.py index a425d580..62cb902b 100644 --- a/src/aleph/sdk/evm_utils.py +++ b/src/aleph/sdk/evm_utils.py @@ -9,7 +9,7 @@ from .conf import settings -MIN_ETH_BALANCE: float = 0.005 +MIN_ETH_BALANCE: float = 0.001 MIN_ETH_BALANCE_WEI = Decimal(to_wei(MIN_ETH_BALANCE, "ether")) BALANCEOF_ABI = """[{ "name": "balanceOf", From bb02aca692a60531cc64830764fc49ad4d514326 Mon Sep 17 00:00:00 2001 From: nesitor Date: Wed, 16 Apr 2025 17:44:17 +0200 Subject: [PATCH 086/122] Fix: Increase superfluid fork dependency version (#214) --- pyproject.toml | 6 +++--- src/aleph/sdk/account.py | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8b839052..150f8c9d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,8 +31,8 @@ dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", "aleph-message>=1", - "aleph-superfluid @ git+https://github.com/aleph-im/superfluid.py", - "base58==2.1.1", # Needed now as default with _load_account changement + "aleph-superfluid>=0.3", + "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", "coincurve>=19; python_version>='3.9'", "eth-abi>=5.0.1; python_version>='3.9'", @@ -40,7 +40,7 @@ dependencies = [ "jwcrypto==1.5.6", "pydantic>=2,<3", "pydantic-settings>=2", - "pynacl==1.5", # Needed now as default with _load_account changement + "pynacl==1.5", # Needed now as default with _load_account changement "python-magic", "typing-extensions", "web3>=7.10", diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 2c289fbf..07abc115 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -28,6 +28,7 @@ Chain.BOB: EVMAccount, Chain.CYBER: EVMAccount, Chain.DOT: DOTAccount, + Chain.ECLIPSE: SVMAccount, Chain.ETH: ETHAccount, Chain.FRAXTAL: EVMAccount, Chain.LINEA: EVMAccount, @@ -40,7 +41,6 @@ Chain.SONIC: EVMAccount, Chain.WORLDCHAIN: EVMAccount, Chain.ZORA: EVMAccount, - Chain.ECLIPSE: SVMAccount, } From c676a99939a34f1b0ebaae246da8ca8216d2fa98 Mon Sep 17 00:00:00 2001 From: Reza Rahemtola <49811529+RezaRahemtola@users.noreply.github.com> Date: Tue, 22 Apr 2025 09:41:01 +0200 Subject: [PATCH 087/122] fix: Pydantic v2 errors on unknown env vars (#216) --- src/aleph/sdk/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index b289cc2b..2854c6f6 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -228,7 +228,7 @@ class Settings(BaseSettings): DNS_RESOLVERS: ClassVar[List[str]] = ["9.9.9.9", "1.1.1.1"] model_config = SettingsConfigDict( - env_prefix="ALEPH_", case_sensitive=False, env_file=".env" + env_prefix="ALEPH_", case_sensitive=False, env_file=".env", extra="ignore" ) From 0821e0ce7b854b4850d1a4f3c483684c3358cb6f Mon Sep 17 00:00:00 2001 From: "Alie.E" Date: Wed, 7 May 2025 13:00:30 +0200 Subject: [PATCH 088/122] feat: use new endpoint to get message status (#215) --- src/aleph/sdk/client/http.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 3d42d490..c8dda3c7 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -516,7 +516,9 @@ async def get_program_price(self, item_hash: str) -> PriceResponse: async def get_message_status(self, item_hash: str) -> MessageStatus: """return Status of a message""" - async with self.http_session.get(f"/api/v0/messages/{item_hash}") as resp: + async with self.http_session.get( + f"/api/v0/messages/{item_hash}/status" + ) as resp: if resp.status == HTTPNotFound.status_code: raise MessageNotFoundError(f"No such hash {item_hash}") resp.raise_for_status() From b3dd4fad05c0bb002c90b7f98d01de3261ff9a5c Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Wed, 7 May 2025 13:21:11 +0200 Subject: [PATCH 089/122] Fix: add missing supported EVM Chains in chain_account_map (#217) * Fix: add missing supported EVM Chains * fix: typo * fix: typo ':' instead of '=' --- src/aleph/sdk/account.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 07abc115..262c54a0 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -26,15 +26,18 @@ Chain.BASE: ETHAccount, Chain.BLAST: EVMAccount, Chain.BOB: EVMAccount, + Chain.BSC: EVMAccount, Chain.CYBER: EVMAccount, Chain.DOT: DOTAccount, Chain.ECLIPSE: SVMAccount, Chain.ETH: ETHAccount, Chain.FRAXTAL: EVMAccount, + Chain.INK: EVMAccount, Chain.LINEA: EVMAccount, Chain.LISK: EVMAccount, Chain.METIS: EVMAccount, Chain.MODE: EVMAccount, + Chain.NEO: EVMAccount, Chain.OPTIMISM: EVMAccount, Chain.POL: EVMAccount, Chain.SOL: SOLAccount, From 3d39ca611eee34aed168d18f4140e2453a589e8f Mon Sep 17 00:00:00 2001 From: nesitor Date: Thu, 5 Jun 2025 16:52:56 +0200 Subject: [PATCH 090/122] Implement Unichain network (#219) * Feature: Implement Unichain network * Fix: Add last `aleph-message` package version. --- pyproject.toml | 2 +- src/aleph/sdk/account.py | 1 + src/aleph/sdk/conf.py | 6 ++++++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 150f8c9d..3e965954 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=1", + "aleph-message>=1.0.1", "aleph-superfluid>=0.3", "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 262c54a0..00939bfc 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -42,6 +42,7 @@ Chain.POL: EVMAccount, Chain.SOL: SOLAccount, Chain.SONIC: EVMAccount, + Chain.UNICHAIN: EVMAccount, Chain.WORLDCHAIN: EVMAccount, Chain.ZORA: EVMAccount, } diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 2854c6f6..86058074 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -168,6 +168,10 @@ class Settings(BaseSettings): chain_id=146, rpc="https://rpc.soniclabs.com", ), + Chain.UNICHAIN: ChainInfo( + chain_id=130, + rpc="https://mainnet.unichain.org", + ), Chain.WORLDCHAIN: ChainInfo( chain_id=480, rpc="https://worldchain-mainnet.gateway.tenderly.co", @@ -195,6 +199,7 @@ class Settings(BaseSettings): CHAINS_OPTIMISM_ACTIVE: Optional[bool] = None CHAINS_POL_ACTIVE: Optional[bool] = None CHAINS_SONIC_ACTIVE: Optional[bool] = None + CHAINS_UNICHAIN_ACTIVE: Optional[bool] = None CHAINS_WORLDCHAIN_ACTIVE: Optional[bool] = None CHAINS_ZORA_ACTIVE: Optional[bool] = None @@ -215,6 +220,7 @@ class Settings(BaseSettings): CHAINS_OPTIMISM_RPC: Optional[str] = None CHAINS_POL_RPC: Optional[str] = None CHAINS_SONIC_RPC: Optional[str] = None + CHAINS_UNICHAIN_RPC: Optional[str] = None CHAINS_WORLDCHAIN_RPC: Optional[str] = None CHAINS_ZORA_RPC: Optional[str] = None From 122d81ae9226669a3aac179792ae709e685d9e27 Mon Sep 17 00:00:00 2001 From: nesitor Date: Wed, 11 Jun 2025 21:18:05 +0200 Subject: [PATCH 091/122] Feature: Implement 2 new messages statuses and 3 new EVM chains. (#220) --- pyproject.toml | 2 +- src/aleph/sdk/client/http.py | 15 +++++++++++++++ src/aleph/sdk/conf.py | 16 ++++++++++++++++ src/aleph/sdk/exceptions.py | 12 ++++++++++++ 4 files changed, 44 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 3e965954..faf272d8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=1.0.1", + "aleph-message>=1.0.2", "aleph-superfluid>=0.3", "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index c8dda3c7..bd3090e3 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -39,6 +39,8 @@ ForgottenMessageError, InvalidHashError, MessageNotFoundError, + RemovedMessageError, + ResourceNotFoundError, ) from ..query.filters import MessageFilter, PostFilter from ..query.responses import MessagesResponse, Post, PostsResponse, PriceResponse @@ -231,6 +233,9 @@ async def download_file_to_buffer( ) else: raise FileTooLarge(f"The file from {file_hash} is too large") + if response.status == 404: + raise ResourceNotFoundError() + return None async def download_file_ipfs_to_buffer( self, @@ -400,6 +405,10 @@ async def get_message( raise ForgottenMessageError( f"The requested message {message_raw['item_hash']} has been forgotten by {', '.join(message_raw['forgotten_by'])}" ) + if message_raw["status"] == "removed": + raise RemovedMessageError( + f"The requested message {message_raw['item_hash']} has been removed by {', '.join(message_raw['reason'])}" + ) message = parse_message(message_raw["message"]) if message_type: expected_type = get_message_type_value(message_type) @@ -429,6 +438,10 @@ async def get_message_error( raise ForgottenMessageError( f"The requested message {message_raw['item_hash']} has been forgotten by {', '.join(message_raw['forgotten_by'])}" ) + if message_raw["status"] == "removed": + raise RemovedMessageError( + f"The requested message {message_raw['item_hash']} has been removed by {', '.join(message_raw['reason'])}" + ) if message_raw["status"] != "rejected": return None return { @@ -558,6 +571,8 @@ async def get_stored_content( resp = f"Message not found: {item_hash}" except ForgottenMessageError: resp = f"Message forgotten: {item_hash}" + except RemovedMessageError as e: + resp = f"Message resources not available {item_hash}: {str(e)}" return ( result if result diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 86058074..57b0383f 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -136,10 +136,26 @@ class Settings(BaseSettings): rpc="https://eth-mainnet.public.blastapi.io", token="0x27702a26126e0B3702af63Ee09aC4d1A084EF628", ), + Chain.ETHERLINK: ChainInfo( + chain_id=42793, + rpc="https://node.mainnet.etherlink.com", + ), Chain.FRAXTAL: ChainInfo( chain_id=252, rpc="https://rpc.frax.com", ), + Chain.HYPE: ChainInfo( + chain_id=999, + rpc="https://rpc.hyperliquid.xyz/evm", + ), + Chain.INK: ChainInfo( + chain_id=57073, + rpc="https://rpc-gel.inkonchain.com", + ), + Chain.LENS: ChainInfo( + chain_id=232, + rpc="https://rpc.lens.xyz", + ), Chain.LINEA: ChainInfo( chain_id=59144, rpc="https://linea-rpc.publicnode.com", diff --git a/src/aleph/sdk/exceptions.py b/src/aleph/sdk/exceptions.py index 05ed755f..ae0f634a 100644 --- a/src/aleph/sdk/exceptions.py +++ b/src/aleph/sdk/exceptions.py @@ -69,6 +69,18 @@ class ForgottenMessageError(QueryError): pass +class RemovedMessageError(QueryError): + """The requested message was removed""" + + pass + + +class ResourceNotFoundError(QueryError): + """A message resource was expected but could not be found.""" + + pass + + class InsufficientFundsError(Exception): """Raised when the account does not have enough funds to perform an action""" From f258bb90b945b9a83332732d9e532ed59d3c0be0 Mon Sep 17 00:00:00 2001 From: nesitor Date: Thu, 26 Jun 2025 10:32:56 +0200 Subject: [PATCH 092/122] Feature: Implement 2 new EVM chains. (#222) --- pyproject.toml | 7 ++++--- src/aleph/sdk/account.py | 2 ++ src/aleph/sdk/conf.py | 18 ++++++++++++++++++ 3 files changed, 24 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index faf272d8..08da27e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,9 +30,10 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=1.0.2", + # "aleph-message>=1.0.2", + "aleph-message @ git+https://github.com/aleph-im/aleph-message@andres-feature-implement_more_evm_chains", "aleph-superfluid>=0.3", - "base58==2.1.1", # Needed now as default with _load_account changement + "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", "coincurve>=19; python_version>='3.9'", "eth-abi>=5.0.1; python_version>='3.9'", @@ -40,7 +41,7 @@ dependencies = [ "jwcrypto==1.5.6", "pydantic>=2,<3", "pydantic-settings>=2", - "pynacl==1.5", # Needed now as default with _load_account changement + "pynacl==1.5", # Needed now as default with _load_account changement "python-magic", "typing-extensions", "web3>=7.10", diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 00939bfc..6af5e32c 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -22,6 +22,7 @@ chain_account_map: Dict[Chain, Type[T]] = { # type: ignore Chain.ARBITRUM: EVMAccount, + Chain.AURORA: EVMAccount, Chain.AVAX: ETHAccount, Chain.BASE: ETHAccount, Chain.BLAST: EVMAccount, @@ -41,6 +42,7 @@ Chain.OPTIMISM: EVMAccount, Chain.POL: EVMAccount, Chain.SOL: SOLAccount, + Chain.SOMNIA: EVMAccount, Chain.SONIC: EVMAccount, Chain.UNICHAIN: EVMAccount, Chain.WORLDCHAIN: EVMAccount, diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 57b0383f..50b38182 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -101,6 +101,10 @@ class Settings(BaseSettings): chain_id=42161, rpc="https://arbitrum-one.publicnode.com", ), + Chain.AURORA: ChainInfo( + chain_id=1313161554, + rpc="https://mainnet.aurora.dev", + ), Chain.AVAX: ChainInfo( chain_id=43114, rpc="https://api.avax.network/ext/bc/C/rpc", @@ -180,6 +184,10 @@ class Settings(BaseSettings): chain_id=137, rpc="https://polygon.gateway.tenderly.co", ), + Chain.SOMNIA: ChainInfo( + chain_id=50312, + rpc="https://dream-rpc.somnia.network", + ), Chain.SONIC: ChainInfo( chain_id=146, rpc="https://rpc.soniclabs.com", @@ -204,16 +212,21 @@ class Settings(BaseSettings): CHAINS_BASE_ACTIVE: Optional[bool] = None CHAINS_BSC_ACTIVE: Optional[bool] = None CHAINS_ARBITRUM_ACTIVE: Optional[bool] = None + CHAINS_AURORA_ACTIVE: Optional[bool] = None CHAINS_BLAST_ACTIVE: Optional[bool] = None CHAINS_BOB_ACTIVE: Optional[bool] = None CHAINS_CYBER_ACTIVE: Optional[bool] = None + CHAINS_ETHERLINK_ACTIVE: Optional[bool] = None CHAINS_FRAXTAL_ACTIVE: Optional[bool] = None + CHAINS_HYPE_ACTIVE: Optional[bool] = None + CHAINS_LENS_ACTIVE: Optional[bool] = None CHAINS_LINEA_ACTIVE: Optional[bool] = None CHAINS_LISK_ACTIVE: Optional[bool] = None CHAINS_METIS_ACTIVE: Optional[bool] = None CHAINS_MODE_ACTIVE: Optional[bool] = None CHAINS_OPTIMISM_ACTIVE: Optional[bool] = None CHAINS_POL_ACTIVE: Optional[bool] = None + CHAINS_SOMNIA_ACTIVE: Optional[bool] = None CHAINS_SONIC_ACTIVE: Optional[bool] = None CHAINS_UNICHAIN_ACTIVE: Optional[bool] = None CHAINS_WORLDCHAIN_ACTIVE: Optional[bool] = None @@ -225,16 +238,21 @@ class Settings(BaseSettings): CHAINS_BASE_RPC: Optional[str] = None CHAINS_BSC_RPC: Optional[str] = None CHAINS_ARBITRUM_RPC: Optional[str] = None + CHAINS_AURORA_RPC: Optional[str] = None CHAINS_BLAST_RPC: Optional[str] = None CHAINS_BOB_RPC: Optional[str] = None CHAINS_CYBER_RPC: Optional[str] = None + CHAINS_ETHERLINK_RPC: Optional[str] = None CHAINS_FRAXTAL_RPC: Optional[str] = None + CHAINS_HYPE_RPC: Optional[str] = None + CHAINS_LENS_RPC: Optional[str] = None CHAINS_LINEA_RPC: Optional[str] = None CHAINS_LISK_RPC: Optional[str] = None CHAINS_METIS_RPC: Optional[str] = None CHAINS_MODE_RPC: Optional[str] = None CHAINS_OPTIMISM_RPC: Optional[str] = None CHAINS_POL_RPC: Optional[str] = None + CHAINS_SOMNIA_RPC: Optional[str] = None CHAINS_SONIC_RPC: Optional[str] = None CHAINS_UNICHAIN_RPC: Optional[str] = None CHAINS_WORLDCHAIN_RPC: Optional[str] = None From cb15f7e59a1743984191c2015fb6920721efb73a Mon Sep 17 00:00:00 2001 From: nesitor Date: Thu, 26 Jun 2025 12:58:56 +0200 Subject: [PATCH 093/122] Feature: Added last aleph-message version with 2 new EVM chains (#223) --- pyproject.toml | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 08da27e2..f29a5163 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,10 +30,9 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - # "aleph-message>=1.0.2", - "aleph-message @ git+https://github.com/aleph-im/aleph-message@andres-feature-implement_more_evm_chains", + "aleph-message>=1.0.3", "aleph-superfluid>=0.3", - "base58==2.1.1", # Needed now as default with _load_account changement + "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", "coincurve>=19; python_version>='3.9'", "eth-abi>=5.0.1; python_version>='3.9'", @@ -41,7 +40,7 @@ dependencies = [ "jwcrypto==1.5.6", "pydantic>=2,<3", "pydantic-settings>=2", - "pynacl==1.5", # Needed now as default with _load_account changement + "pynacl==1.5", # Needed now as default with _load_account changement "python-magic", "typing-extensions", "web3>=7.10", From 30a2a2e8692b16de5aa0913d1dd4abdec4927fd5 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Thu, 26 Jun 2025 17:31:34 +0200 Subject: [PATCH 094/122] Feature: ipv4 and missing service (#221) * Feature: base client service * Feature: CRN service for client * Feature: DNS service for client * Feature: Scheduler service for client * Feature: Port_forwarder service for client * Feature: new settings `DNS_API` `CRN_URL_UPDATE` `CRN_LIST_URL` `CRN_VERSION` `SCHEDULER_URL` * feature: new Exception for custom service * fix: parse_obj is deprecated need to use model_validate * feat: utils func sanitize_url * feature: Utils client service * fix: lint issue http_port_forwarder.py * feature: Services types * Feature: AlephHttpClient load default service and allow new method `register_service` to add any service on top * Feature: AuthenticatedAlephHttpClient load default service and allow new method `register_authenticated_service` to add any service on top * feat: new unit test for client services * fix: import * fix: domains service not existing yet * fix: unit test * fix: remove domains for units test service * refactor: No __init__ needed * Refactor: renaming class / change folder struct * fix: port forwarder import * fix: linting format import * Feature: client.dns.get_public_dns_by_host * fix: this functions not used / wrong place * fix: linting FMT issue * feat: use new filter on dns api ?item_hash= * fix: get_scheduler_node become get_nodes since we already on client.scheduler * fix: rename get_ports to get_address_ports, get_port to get_ports * fix: we should also ensure that the mlessage is not being removed when getting instance allocations * fix: new unit test, some name change * fix: remove unit test for now will fix them --- src/aleph/sdk/chains/remote.py | 4 +- src/aleph/sdk/client/authenticated_http.py | 10 +- src/aleph/sdk/client/http.py | 16 +- src/aleph/sdk/client/services/__init__.py | 0 .../services/authenticated_port_forwarder.py | 190 ++++++++ src/aleph/sdk/client/services/base.py | 42 ++ src/aleph/sdk/client/services/crn.py | 138 ++++++ src/aleph/sdk/client/services/dns.py | 54 +++ src/aleph/sdk/client/services/instance.py | 146 ++++++ .../sdk/client/services/port_forwarder.py | 44 ++ src/aleph/sdk/client/services/scheduler.py | 54 +++ src/aleph/sdk/conf.py | 8 + src/aleph/sdk/exceptions.py | 70 +++ src/aleph/sdk/types.py | 193 +++++++- src/aleph/sdk/utils.py | 22 + tests/unit/conftest.py | 2 +- tests/unit/services/__init__.py | 0 tests/unit/services/mocks.py | 345 ++++++++++++++ tests/unit/services/test_base_service.py | 46 ++ tests/unit/test_services.py | 445 ++++++++++++++++++ 20 files changed, 1822 insertions(+), 7 deletions(-) create mode 100644 src/aleph/sdk/client/services/__init__.py create mode 100644 src/aleph/sdk/client/services/authenticated_port_forwarder.py create mode 100644 src/aleph/sdk/client/services/base.py create mode 100644 src/aleph/sdk/client/services/crn.py create mode 100644 src/aleph/sdk/client/services/dns.py create mode 100644 src/aleph/sdk/client/services/instance.py create mode 100644 src/aleph/sdk/client/services/port_forwarder.py create mode 100644 src/aleph/sdk/client/services/scheduler.py create mode 100644 tests/unit/services/__init__.py create mode 100644 tests/unit/services/mocks.py create mode 100644 tests/unit/services/test_base_service.py create mode 100644 tests/unit/test_services.py diff --git a/src/aleph/sdk/chains/remote.py b/src/aleph/sdk/chains/remote.py index 917cf39b..931b68f3 100644 --- a/src/aleph/sdk/chains/remote.py +++ b/src/aleph/sdk/chains/remote.py @@ -52,7 +52,7 @@ async def from_crypto_host( session = aiohttp.ClientSession(connector=connector) async with session.get(f"{host}/properties") as response: - await response.raise_for_status() + response.raise_for_status() data = await response.json() properties = AccountProperties(**data) @@ -75,7 +75,7 @@ def private_key(self): async def sign_message(self, message: Dict) -> Dict: """Sign a message inplace.""" async with self._session.post(f"{self._host}/sign", json=message) as response: - await response.raise_for_status() + response.raise_for_status() return await response.json() async def sign_raw(self, buffer: bytes) -> bytes: diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 2975e112..ae4b6b04 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -38,6 +38,7 @@ from ..utils import extended_json_encoder, make_instance_content, make_program_content from .abstract import AuthenticatedAlephClient from .http import AlephHttpClient +from .services.authenticated_port_forwarder import AuthenticatedPortForwarder logger = logging.getLogger(__name__) @@ -81,6 +82,13 @@ def __init__( ) self.account = account + async def __aenter__(self): + await super().__aenter__() + # Override services with authenticated versions + self.port_forwarder = AuthenticatedPortForwarder(self) + + return self + async def ipfs_push(self, content: Mapping) -> str: """ Push arbitrary content as JSON to the IPFS service. @@ -392,7 +400,7 @@ async def create_store( if extra_fields is not None: values.update(extra_fields) - content = StoreContent.parse_obj(values) + content = StoreContent.model_validate(values) message, status, _ = await self.submit( content=content.model_dump(exclude_none=True), diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index bd3090e3..a433e48d 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -33,6 +33,12 @@ from aleph_message.status import MessageStatus from pydantic import ValidationError +from aleph.sdk.client.services.crn import Crn +from aleph.sdk.client.services.dns import DNS +from aleph.sdk.client.services.instance import Instance +from aleph.sdk.client.services.port_forwarder import PortForwarder +from aleph.sdk.client.services.scheduler import Scheduler + from ..conf import settings from ..exceptions import ( FileTooLarge, @@ -123,6 +129,13 @@ async def __aenter__(self): ) ) + # Initialize default services + self.dns = DNS(self) + self.port_forwarder = PortForwarder(self) + self.crn = Crn(self) + self.scheduler = Scheduler(self) + self.instance = Instance(self) + return self async def __aexit__(self, exc_type, exc_val, exc_tb): @@ -139,7 +152,8 @@ async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: resp.raise_for_status() result = await resp.json() data = result.get("data", dict()) - return data.get(key) + final_result = data.get(key) + return final_result async def fetch_aggregates( self, address: str, keys: Optional[Iterable[str]] = None diff --git a/src/aleph/sdk/client/services/__init__.py b/src/aleph/sdk/client/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/aleph/sdk/client/services/authenticated_port_forwarder.py b/src/aleph/sdk/client/services/authenticated_port_forwarder.py new file mode 100644 index 00000000..765ac2f1 --- /dev/null +++ b/src/aleph/sdk/client/services/authenticated_port_forwarder.py @@ -0,0 +1,190 @@ +from typing import TYPE_CHECKING, Optional, Tuple + +from aleph_message.models import AggregateMessage, ItemHash +from aleph_message.status import MessageStatus + +from aleph.sdk.client.services.base import AggregateConfig +from aleph.sdk.client.services.port_forwarder import PortForwarder +from aleph.sdk.exceptions import MessageNotProcessed, NotAuthorize +from aleph.sdk.types import AllForwarders, Ports +from aleph.sdk.utils import safe_getattr + +if TYPE_CHECKING: + from aleph.sdk.client.abstract import AuthenticatedAlephClient + + +class AuthenticatedPortForwarder(PortForwarder): + """ + Authenticated Port Forwarder services with create and update capabilities + """ + + def __init__(self, client: "AuthenticatedAlephClient"): + super().__init__(client) + + async def _verify_status_processed_and_ownership( + self, item_hash: ItemHash + ) -> Tuple[AggregateMessage, MessageStatus]: + """ + Verify that the message is well processed (and not rejected / pending), + This also verify the ownership of the message + """ + message: AggregateMessage + status: MessageStatus + message, status = await self._client.get_message( + item_hash=item_hash, + with_status=True, + ) + + # We ensure message is not Rejected (Might not be processed yet) + if status not in [MessageStatus.PROCESSED, MessageStatus.PENDING]: + raise MessageNotProcessed(item_hash=item_hash, status=status) + + message_content = safe_getattr(message, "content") + address = safe_getattr(message_content, "address") + + if ( + not hasattr(self._client, "account") + or address != self._client.account.get_address() + ): + current_address = ( + self._client.account.get_address() + if hasattr(self._client, "account") + else "unknown" + ) + raise NotAuthorize( + item_hash=item_hash, + target_address=address, + current_address=current_address, + ) + return message, status + + async def get_address_ports( + self, address: Optional[str] = None + ) -> AggregateConfig[AllForwarders]: + """ + Get all port forwarding configurations for an address + + Args: + address: The address to fetch configurations for. + If None, uses the authenticated client's account address. + + Returns: + Port forwarding configurations + """ + if address is None: + if not hasattr(self._client, "account") or not self._client.account: + raise ValueError("No account provided and client is not authenticated") + address = self._client.account.get_address() + + return await super().get_address_ports(address=address) + + async def get_ports( + self, item_hash: ItemHash = None, address: Optional[str] = None + ) -> Optional[Ports]: + """ + Get port forwarding configuration for a specific item hash + + Args: + address: The address to fetch configurations for. + If None, uses the authenticated client's account address. + item_hash: The hash of the item to get configuration for + + Returns: + Port configuration if found, otherwise empty Ports object + """ + if address is None: + if not hasattr(self._client, "account") or not self._client.account: + raise ValueError("No account provided and client is not authenticated") + address = self._client.account.get_address() + + if item_hash is None: + raise ValueError("item_hash must be provided") + + return await super().get_ports(address=address, item_hash=item_hash) + + async def create_ports( + self, item_hash: ItemHash, ports: Ports + ) -> Tuple[AggregateMessage, MessageStatus]: + """ + Create a new port forwarding configuration for an item hash + + Args: + item_hash: The hash of the item (instance/program/IPFS website) + ports: Dictionary mapping port numbers to PortFlags + + Returns: + Dictionary with the result of the operation + """ + if not hasattr(self._client, "account") or not self._client.account: + raise ValueError("An account is required for this operation") + + # Pre Check + # _, _ = await self._verify_status_processed_and_ownership(item_hash=item_hash) + + content = {str(item_hash): ports.model_dump()} + + # Check if create_aggregate exists on the client + return await self._client.create_aggregate( # type: ignore + key=self.aggregate_key, content=content + ) + + async def update_ports( + self, item_hash: ItemHash, ports: Ports + ) -> Tuple[AggregateMessage, MessageStatus]: + """ + Update an existing port forwarding configuration for an item hash + + Args: + item_hash: The hash of the item (instance/program/IPFS website) + ports: Dictionary mapping port numbers to PortFlags + + Returns: + Dictionary with the result of the operation + """ + if not hasattr(self._client, "account") or not self._client.account: + raise ValueError("An account is required for this operation") + + # Pre Check + # _, _ = await self._verify_status_processed_and_ownership(item_hash=item_hash) + + content = {} + + content[str(item_hash)] = ports.model_dump() + + message, status = await self._client.create_aggregate( # type: ignore + key=self.aggregate_key, content=content + ) + + return message, status + + async def delete_ports( + self, item_hash: ItemHash + ) -> Tuple[AggregateMessage, MessageStatus]: + """ + Delete port forwarding configuration for an item hash + + Args: + item_hash: The hash of the item (instance/program/IPFS website) to delete configuration for + + Returns: + Dictionary with the result of the operation + """ + if not hasattr(self._client, "account") or not self._client.account: + raise ValueError("An account is required for this operation") + + # Pre Check + # _, _ = await self._verify_status_processed_and_ownership(item_hash=item_hash) + + # Get the Port Config of the item_hash + port: Optional[Ports] = await self.get_ports(item_hash=item_hash) + if not port: + raise + + content = {} + content[str(item_hash)] = port.model_dump() + + # Create a new aggregate with the updated content + message, status = await self._client.create_aggregate( # type: ignore + key=self.aggregate_key, content=content + ) + return message, status diff --git a/src/aleph/sdk/client/services/base.py b/src/aleph/sdk/client/services/base.py new file mode 100644 index 00000000..7459d7f6 --- /dev/null +++ b/src/aleph/sdk/client/services/base.py @@ -0,0 +1,42 @@ +from abc import ABC +from typing import TYPE_CHECKING, Generic, List, Optional, Type, TypeVar + +from pydantic import BaseModel + +if TYPE_CHECKING: + from aleph.sdk.client.http import AlephHttpClient + + +T = TypeVar("T", bound=BaseModel) + + +class AggregateConfig(BaseModel, Generic[T]): + """ + A generic container for "aggregate" data of type T. + - `data` will be either None or a list of T-instances. + """ + + data: Optional[List[T]] = None + + +class BaseService(ABC, Generic[T]): + aggregate_key: str + model_cls: Type[T] + + def __init__(self, client: "AlephHttpClient"): + self._client = client + self.model_cls: Type[T] + + async def get_config(self, address: str): + + aggregate_data = await self._client.fetch_aggregate( + address=address, key=self.aggregate_key + ) + + if aggregate_data: + model_instance = self.model_cls.model_validate(aggregate_data) + config = AggregateConfig[T](data=[model_instance]) + else: + config = AggregateConfig[T](data=None) + + return config diff --git a/src/aleph/sdk/client/services/crn.py b/src/aleph/sdk/client/services/crn.py new file mode 100644 index 00000000..3317644a --- /dev/null +++ b/src/aleph/sdk/client/services/crn.py @@ -0,0 +1,138 @@ +from typing import TYPE_CHECKING, Dict, Optional, Union + +import aiohttp +from aiohttp.client_exceptions import ClientResponseError +from aleph_message.models import ItemHash + +from aleph.sdk.conf import settings +from aleph.sdk.exceptions import MethodNotAvailableOnCRN, VmNotFoundOnHost +from aleph.sdk.types import CrnExecutionV1, CrnExecutionV2, CrnV1List, CrnV2List +from aleph.sdk.utils import sanitize_url + +if TYPE_CHECKING: + from aleph.sdk.client.http import AlephHttpClient + + +class Crn: + """ + This services allow interact with CRNS API + TODO: ADD + /about/executions/details + /about/executions/records + /about/usage/system + /about/certificates + /about/capability + /about/config + /status/check/fastapi + /status/check/fastapi/legacy + /status/check/host + /status/check/version + /status/check/ipv6 + /status/config + """ + + def __init__(self, client: "AlephHttpClient"): + self._client = client + + async def get_last_crn_version(self): + """ + Fetch Last version tag from aleph-vm github repo + """ + # Create a new session for external domain requests + async with aiohttp.ClientSession() as session: + async with session.get(settings.CRN_VERSION_URL) as resp: + resp.raise_for_status() + data = await resp.json() + return data.get("tag_name") + + async def get_crns_list(self, only_active: bool = True) -> dict: + """ + Query a persistent VM running on aleph.im to retrieve list of CRNs: + https://crns-list.aleph.sh/crns.json + + Parameters + ---------- + only_active : bool + If True (the default), only return active CRNs (i.e. `filter_inactive=false`). + If False, return all CRNs (i.e. `filter_inactive=true`). + + Returns + ------- + dict + The parsed JSON response from /crns.json. + """ + # We want filter_inactive = (not only_active) + # Convert bool to string for the query parameter + filter_inactive_str = str(not only_active).lower() + params = {"filter_inactive": filter_inactive_str} + + # Create a new session for external domain requests + async with aiohttp.ClientSession() as session: + async with session.get( + sanitize_url(settings.CRN_LIST_URL), params=params + ) as resp: + resp.raise_for_status() + return await resp.json() + + async def get_active_vms_v2(self, crn_address: str) -> CrnV2List: + endpoint = "/v2/about/executions/list" + + full_url = sanitize_url(crn_address + endpoint) + + async with aiohttp.ClientSession() as session: + async with session.get(full_url) as resp: + resp.raise_for_status() + raw = await resp.json() + vm_mmap = CrnV2List.model_validate(raw) + return vm_mmap + + async def get_active_vms_v1(self, crn_address: str) -> CrnV1List: + endpoint = "/about/executions/list" + + full_url = sanitize_url(crn_address + endpoint) + + async with aiohttp.ClientSession() as session: + async with session.get(full_url) as resp: + resp.raise_for_status() + raw = await resp.json() + vm_map = CrnV1List.model_validate(raw) + return vm_map + + async def get_active_vms(self, crn_address: str) -> Union[CrnV2List, CrnV1List]: + try: + return await self.get_active_vms_v2(crn_address) + except ClientResponseError as e: + if e.status == 404: + return await self.get_active_vms_v1(crn_address) + raise + + async def get_vm( + self, crn_address: str, item_hash: ItemHash + ) -> Optional[Union[CrnExecutionV1, CrnExecutionV2]]: + vms = await self.get_active_vms(crn_address) + + vm_map: Dict[ItemHash, Union[CrnExecutionV1, CrnExecutionV2]] = vms.root + + if item_hash not in vm_map: + return None + + return vm_map[item_hash] + + async def update_instance_config(self, crn_address: str, item_hash: ItemHash): + vm = await self.get_vm(crn_address, item_hash) + + if not vm: + raise VmNotFoundOnHost(crn_url=crn_address, item_hash=item_hash) + + # CRN have two week to upgrade their node, + # So if the CRN does not have the update + # We can't update config + if isinstance(vm, CrnExecutionV1): + raise MethodNotAvailableOnCRN() + + full_url = sanitize_url(crn_address + f"/control/{item_hash}/update") + + async with aiohttp.ClientSession() as session: + async with session.post(full_url) as resp: + resp.raise_for_status() + return await resp.json() diff --git a/src/aleph/sdk/client/services/dns.py b/src/aleph/sdk/client/services/dns.py new file mode 100644 index 00000000..95132390 --- /dev/null +++ b/src/aleph/sdk/client/services/dns.py @@ -0,0 +1,54 @@ +from typing import TYPE_CHECKING, List, Optional + +import aiohttp +from aleph_message.models import ItemHash + +from aleph.sdk.conf import settings +from aleph.sdk.types import Dns, DnsListAdapter +from aleph.sdk.utils import sanitize_url + +if TYPE_CHECKING: + from aleph.sdk.client.http import AlephHttpClient + + +class DNS: + """ + This Service mostly made to get active dns for instance: + `https://api.dns.public.aleph.sh/instances/list` + """ + + def __init__(self, client: "AlephHttpClient"): + self._client = client + + async def get_public_dns(self) -> List[Dns]: + """ + Get all the public dns ha + """ + async with aiohttp.ClientSession() as session: + async with session.get(sanitize_url(settings.DNS_API)) as resp: + resp.raise_for_status() + raw = await resp.json() + + return DnsListAdapter.validate_json(raw) + + async def get_public_dns_by_host(self, crn_hostname): + """ + Get all the public dns with filter on crn_url + """ + async with aiohttp.ClientSession() as session: + async with session.get( + sanitize_url(settings.DNS_API), params={"crn_url": crn_hostname} + ) as resp: + resp.raise_for_status() + raw = await resp.json() + + return DnsListAdapter.validate_json(raw) + + async def get_dns_for_instance(self, vm_hash: ItemHash) -> Optional[List[Dns]]: + async with aiohttp.ClientSession() as session: + async with session.get( + sanitize_url(settings.DNS_API), params={"item_hash": vm_hash} + ) as resp: + resp.raise_for_status() + raw = await resp.json() + return DnsListAdapter.validate_json(raw) diff --git a/src/aleph/sdk/client/services/instance.py b/src/aleph/sdk/client/services/instance.py new file mode 100644 index 00000000..1636cb62 --- /dev/null +++ b/src/aleph/sdk/client/services/instance.py @@ -0,0 +1,146 @@ +import asyncio +from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Union + +from aleph_message.models import InstanceMessage, ItemHash, MessageType, PaymentType +from aleph_message.status import MessageStatus + +from aleph.sdk.query.filters import MessageFilter +from aleph.sdk.query.responses import MessagesResponse + +if TYPE_CHECKING: + from aleph.sdk.client.http import AlephHttpClient + +from aleph.sdk.types import ( + CrnExecutionV1, + CrnExecutionV2, + InstanceAllocationsInfo, + InstanceManual, + InstancesExecutionList, + InstanceWithScheduler, +) +from aleph.sdk.utils import safe_getattr, sanitize_url + + +class Instance: + """ + This is utils functions that used multiple Service + exemple getting info about Allocations / exeuction of any instances (hold or not) + """ + + def __init__(self, client: "AlephHttpClient"): + self._client = client + + async def get_name_of_executable(self, item_hash: ItemHash) -> Optional[str]: + try: + message: Any = await self._client.get_message(item_hash=item_hash) + if hasattr(message, "content") and hasattr(message.content, "metadata"): + return message.content.metadata.get("name") + elif isinstance(message, dict): + # Handle dictionary response format + if "content" in message and isinstance(message["content"], dict): + if "metadata" in message["content"] and isinstance( + message["content"]["metadata"], dict + ): + return message["content"]["metadata"].get("name") + return None + except Exception: + return None + + async def get_instance_allocation_info( + self, msg: InstanceMessage, crn_list: dict + ) -> Tuple[InstanceMessage, Union[InstanceManual, InstanceWithScheduler]]: + vm_hash = msg.item_hash + payment_type = safe_getattr(msg, "content.payment.type.value") + firmware = safe_getattr(msg, "content.environment.trusted_execution.firmware") + has_gpu = safe_getattr(msg, "content.requirements.gpu") + + is_hold = payment_type == PaymentType.hold.value + is_conf = bool(firmware and len(firmware) == 64) + + if is_hold and not is_conf and not has_gpu: + alloc = await self._client.scheduler.get_allocation(vm_hash) + info = InstanceWithScheduler(source="scheduler", allocations=alloc) + else: + crn_hash = safe_getattr(msg, "content.requirements.node.node_hash") + if isinstance(crn_list, list): + node = next((n for n in crn_list if n.get("hash") == crn_hash), None) + url = sanitize_url(node.get("address")) if node else "" + else: + node = crn_list.get(crn_hash) + url = sanitize_url(node.get("address")) if node else "" + + info = InstanceManual(source="manual", crn_url=url) + return msg, info + + async def get_instances(self, address: str) -> List[InstanceMessage]: + resp: MessagesResponse = await self._client.get_messages( + message_filter=MessageFilter( + message_types=[MessageType.instance], + addresses=[address], + ), + page_size=100, + ) + return resp.messages + + async def get_instances_allocations(self, messages_list, only_processed=True): + crn_list_response = await self._client.crn.get_crns_list() + crn_list = crn_list_response.get("crns", {}) + + tasks = [] + for msg in messages_list: + if only_processed: + status = await self._client.get_message_status(msg.item_hash) + if ( + status != MessageStatus.PROCESSED + and status != MessageStatus.REMOVING + ): + continue + tasks.append(self.get_instance_allocation_info(msg, crn_list)) + + results = await asyncio.gather(*tasks) + + mapping = {ItemHash(msg.item_hash): info for msg, info in results} + + return InstanceAllocationsInfo.model_validate(mapping) + + async def get_instance_executions_info( + self, instances: InstanceAllocationsInfo + ) -> InstancesExecutionList: + async def _fetch( + item_hash: ItemHash, + alloc: Union[InstanceManual, InstanceWithScheduler], + ) -> tuple[str, Optional[Union[CrnExecutionV1, CrnExecutionV2]]]: + """Retrieve the execution record for an item hash.""" + if isinstance(alloc, InstanceManual): + crn_url = sanitize_url(alloc.crn_url) + else: + crn_url = sanitize_url(alloc.allocations.node.url) + + if not crn_url: + return str(item_hash), None + + try: + execution = await self._client.crn.get_vm( + item_hash=item_hash, + crn_address=crn_url, + ) + return str(item_hash), execution + except Exception: + return str(item_hash), None + + fetch_tasks = [] + msg_hash_map = {} + + for item_hash, alloc in instances.root.items(): + fetch_tasks.append(_fetch(item_hash, alloc)) + msg_hash_map[str(item_hash)] = item_hash + + results = await asyncio.gather(*fetch_tasks) + + mapping = { + ItemHash(msg_hash): exec_info + for msg_hash, exec_info in results + if msg_hash is not None and exec_info is not None + } + + return InstancesExecutionList.model_validate(mapping) diff --git a/src/aleph/sdk/client/services/port_forwarder.py b/src/aleph/sdk/client/services/port_forwarder.py new file mode 100644 index 00000000..923d0931 --- /dev/null +++ b/src/aleph/sdk/client/services/port_forwarder.py @@ -0,0 +1,44 @@ +from typing import TYPE_CHECKING, Optional + +from aleph_message.models import ItemHash + +from aleph.sdk.client.services.base import AggregateConfig, BaseService +from aleph.sdk.types import AllForwarders, Ports + +if TYPE_CHECKING: + pass + + +class PortForwarder(BaseService[AllForwarders]): + """ + Ports Forwarder Logic + """ + + aggregate_key = "port-forwarding" + model_cls = AllForwarders + + def __init__(self, client): + super().__init__(client=client) + + async def get_address_ports(self, address: str) -> AggregateConfig[AllForwarders]: + result = await self.get_config(address=address) + return result + + async def get_ports(self, item_hash: ItemHash, address: str) -> Optional[Ports]: + """ + Get Ports Forwarder of Instance / Program / IPFS website from aggregate + """ + ports_config: AggregateConfig[AllForwarders] = await self.get_address_ports( + address=address + ) + + if ports_config.data is None: + return Ports(ports={}) + + for forwarder in ports_config.data: + ports_map = forwarder.root + + if str(item_hash) in ports_map: + return ports_map[str(item_hash)] + + return Ports(ports={}) diff --git a/src/aleph/sdk/client/services/scheduler.py b/src/aleph/sdk/client/services/scheduler.py new file mode 100644 index 00000000..765ee2bd --- /dev/null +++ b/src/aleph/sdk/client/services/scheduler.py @@ -0,0 +1,54 @@ +from typing import TYPE_CHECKING + +import aiohttp +from aleph_message.models import ItemHash + +from aleph.sdk.conf import settings +from aleph.sdk.types import AllocationItem, SchedulerNodes, SchedulerPlan +from aleph.sdk.utils import sanitize_url + +if TYPE_CHECKING: + from aleph.sdk.client.http import AlephHttpClient + + +class Scheduler: + """ + This Service is made to interact with scheduler API: + `https://scheduler.api.aleph.cloud/` + """ + + def __init__(self, client: "AlephHttpClient"): + self._client = client + + async def get_plan(self) -> SchedulerPlan: + url = f"{sanitize_url(settings.SCHEDULER_URL)}/api/v0/plan" + + async with aiohttp.ClientSession() as session: + async with session.get(url) as resp: + resp.raise_for_status() + raw = await resp.json() + + return SchedulerPlan.model_validate(raw) + + async def get_nodes(self) -> SchedulerNodes: + url = f"{sanitize_url(settings.SCHEDULER_URL)}/api/v0/nodes" + + async with aiohttp.ClientSession() as session: + async with session.get(url) as resp: + resp.raise_for_status() + raw = await resp.json() + + return SchedulerNodes.model_validate(raw) + + async def get_allocation(self, vm_hash: ItemHash) -> AllocationItem: + """ + Fetch allocation information for a given VM hash. + """ + url = f"{sanitize_url(settings.SCHEDULER_URL)}/api/v0/allocation/{vm_hash}" + + async with aiohttp.ClientSession() as session: + async with session.get(url) as resp: + resp.raise_for_status() + payload = await resp.json() + + return AllocationItem.model_validate(payload) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 50b38182..fc852417 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -84,6 +84,14 @@ class Settings(BaseSettings): IPFS_GATEWAY: ClassVar[str] = "https://ipfs.aleph.cloud/ipfs/" CRN_URL_FOR_PROGRAMS: ClassVar[str] = "https://dchq.staging.aleph.sh/" + DNS_API: ClassVar[str] = "https://api.dns.public.aleph.sh/instances/list" + CRN_URL_UPDATE: ClassVar[str] = "{crn_url}/control/machine/{vm_hash}/update" + CRN_LIST_URL: ClassVar[str] = "https://crns-list.aleph.sh/crns.json" + CRN_VERSION_URL: ClassVar[str] = ( + "https://api.github.com/repos/aleph-im/aleph-vm/releases/latest" + ) + SCHEDULER_URL: ClassVar[str] = "https://scheduler.api.aleph.cloud/" + # Web3Provider settings TOKEN_DECIMALS: ClassVar[int] = 18 TX_TIMEOUT: ClassVar[int] = 60 * 3 diff --git a/src/aleph/sdk/exceptions.py b/src/aleph/sdk/exceptions.py index ae0f634a..c960f5a8 100644 --- a/src/aleph/sdk/exceptions.py +++ b/src/aleph/sdk/exceptions.py @@ -1,5 +1,7 @@ from abc import ABC +from aleph_message.status import MessageStatus + from .types import TokenType from .utils import displayable_amount @@ -22,6 +24,74 @@ class MultipleMessagesError(QueryError): pass +class MessageNotProcessed(Exception): + """ + The resources that you arte trying to interact is not processed + """ + + item_hash: str + status: MessageStatus + + def __init__( + self, + item_hash: str, + status: MessageStatus, + ): + self.item_hash = item_hash + self.status = status + super().__init__( + f"Resources {item_hash} is not processed : {self.status.value}" + ) + + +class NotAuthorize(Exception): + """ + Request not authorize, this could happens for exemple in Ports Forwarding + if u try to setup ports for a vm who is not yours + """ + + item_hash: str + target_address: str + current_address: str + + def __init__(self, item_hash: str, target_address, current_address): + self.item_hash = item_hash + self.target_address = target_address + self.current_address = current_address + super().__init__( + f"Operations not authorize on resources {self.item_hash} \nTarget address : {self.target_address} \nCurrent address : {self.current_address}" + ) + + +class VmNotFoundOnHost(Exception): + """ + The VM not found on the host, + The Might might not be processed yet / wrong CRN_URL + """ + + item_hash: str + crn_url: str + + def __init__( + self, + item_hash: str, + crn_url, + ): + self.item_hash = item_hash + self.crn_url = crn_url + + super().__init__(f"Vm : {self.item_hash} not found on crn : {self.crn_url}") + + +class MethodNotAvailableOnCRN(Exception): + """ + If this error appears that means CRN you trying to interact is outdated and does + not handle this feature + """ + + pass + + class BroadcastError(Exception): """ Data could not be broadcast to the aleph.im network. diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index cf23f19d..6c1ae561 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -1,8 +1,10 @@ from abc import abstractmethod +from datetime import datetime from enum import Enum -from typing import Dict, Optional, Protocol, TypeVar +from typing import Any, Dict, List, Literal, Optional, Protocol, TypeVar, Union -from pydantic import BaseModel, Field +from aleph_message.models import ItemHash +from pydantic import BaseModel, Field, RootModel, TypeAdapter, field_validator __all__ = ("StorageEnum", "Account", "AccountFromPrivateKey", "GenericMessage") @@ -100,3 +102,190 @@ class TokenType(str, Enum): GAS = "GAS" ALEPH = "ALEPH" + + +# Scheduler +class Period(BaseModel): + start_timestamp: datetime + duration_seconds: float + + +class PlanItem(BaseModel): + persistent_vms: List[ItemHash] = Field(default_factory=list) + instances: List[ItemHash] = Field(default_factory=list) + on_demand_vms: List[ItemHash] = Field(default_factory=list) + jobs: List[str] = Field(default_factory=list) # adjust type if needed + + @field_validator( + "persistent_vms", "instances", "on_demand_vms", "jobs", mode="before" + ) + @classmethod + def coerce_to_list(cls, v: Any) -> List[Any]: + # Treat None or empty dict as empty list + if v is None or (isinstance(v, dict) and not v): + return [] + return v + + +class SchedulerPlan(BaseModel): + period: Period + plan: Dict[str, PlanItem] + + model_config = { + "populate_by_name": True, + } + + +class NodeItem(BaseModel): + node_id: str + url: str + ipv6: Optional[str] = None + supports_ipv6: bool + + +class SchedulerNodes(BaseModel): + nodes: List[NodeItem] + + model_config = { + "populate_by_name": True, + } + + def get_url(self, node_id: str) -> Optional[str]: + """ + Return the URL for the given node_id, or None if not found. + """ + for node in self.nodes: + if node.node_id == node_id: + return node + return None + + +class AllocationItem(BaseModel): + vm_hash: ItemHash + vm_type: str + vm_ipv6: Optional[str] = None + period: Period + node: NodeItem + + model_config = { + "populate_by_name": True, + } + + +class InstanceWithScheduler(BaseModel): + source: Literal["scheduler"] + allocations: AllocationItem # Case Scheduler + + +class InstanceManual(BaseModel): + source: Literal["manual"] + crn_url: str # Case + + +class InstanceAllocationsInfo( + RootModel[Dict[ItemHash, Union[InstanceManual, InstanceWithScheduler]]] +): + """ + RootModel holding mapping ItemHash to its Allocations. + Uses item_hash as the key instead of InstanceMessage objects to avoid hashability issues. + """ + + pass + + +# CRN Executions + + +class Networking(BaseModel): + ipv4: str + ipv6: str + + +class CrnExecutionV1(BaseModel): + networking: Networking + + +class PortMapping(BaseModel): + host: int + tcp: bool + udp: bool + + +class NetworkingV2(BaseModel): + ipv4_network: str + host_ipv4: str + ipv6_network: str + ipv6_ip: str + mapped_ports: Dict[str, PortMapping] + + +class VmStatus(BaseModel): + defined_at: Optional[datetime] + preparing_at: Optional[datetime] + prepared_at: Optional[datetime] + starting_at: Optional[datetime] + started_at: Optional[datetime] + stopping_at: Optional[datetime] + stopped_at: Optional[datetime] + + +class CrnExecutionV2(BaseModel): + networking: NetworkingV2 + status: VmStatus + running: bool + + +class CrnV1List(RootModel[Dict[ItemHash, CrnExecutionV1]]): + """ + V1: a dict whose keys are ItemHash (strings) + and whose values are VmItemV1 (just `networking`). + """ + + pass + + +class CrnV2List(RootModel[Dict[ItemHash, CrnExecutionV2]]): + """ + A RootModel whose root is a dict mapping each item‐hash (string) + to a CrnExecutionV2, exactly matching your JSON structure. + """ + + pass + + +class InstancesExecutionList( + RootModel[Dict[ItemHash, Union[CrnExecutionV1, CrnExecutionV2]]] +): + """ + A Root Model representing Instances Message hashes and their Executions. + Uses ItemHash as keys to avoid hashability issues with InstanceMessage objects. + """ + + pass + + +class IPV4(BaseModel): + public: str + local: str + + +class Dns(BaseModel): + name: str + item_hash: ItemHash + ipv4: Optional[IPV4] + ipv6: str + + +DnsListAdapter = TypeAdapter(list[Dns]) + + +class PortFlags(BaseModel): + tcp: bool + udp: bool + + +class Ports(BaseModel): + ports: Dict[int, PortFlags] + + +AllForwarders = RootModel[Dict[ItemHash, Ports]] diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index 31b2be8d..19a3aa57 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -25,6 +25,7 @@ Union, get_args, ) +from urllib.parse import urlparse from uuid import UUID from zipfile import BadZipFile, ZipFile @@ -591,3 +592,24 @@ def make_program_content( authorized_keys=[], payment=payment, ) + + +def sanitize_url(url: str) -> str: + """ + Sanitize a URL by removing the trailing slash and ensuring it's properly formatted. + + Args: + url: The URL to sanitize + + Returns: + The sanitized URL + """ + # Remove trailing slash if present + url = url.rstrip("/") + + # Ensure URL has a proper scheme + parsed = urlparse(url) + if not parsed.scheme: + url = f"https://{url}" + + return url diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 385d2836..3ad0a4ad 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -166,7 +166,7 @@ async def __aenter__(self): async def __aexit__(self, exc_type, exc_val, exc_tb): ... - async def raise_for_status(self): ... + def raise_for_status(self): ... @property def status(self): diff --git a/tests/unit/services/__init__.py b/tests/unit/services/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/unit/services/mocks.py b/tests/unit/services/mocks.py new file mode 100644 index 00000000..86f473b7 --- /dev/null +++ b/tests/unit/services/mocks.py @@ -0,0 +1,345 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from ..conftest import make_custom_mock_response + +FAKE_CRN_GPU_HASH = "abcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabcabca" +FAKE_CRN_GPU_ADDRESS = "0xBCABCABCABCABCABCABCABCABCABCABCABCABCAB" +FAKE_CRN_GPU_URL = "https://test.gpu.crn.com" + +FAKE_CRN_CONF_HASH = "defdefdefdefdefdefdefdefdefdefdefdefdefdefdefdefdefdefdefdefdefd" +FAKE_CRN_CONF_ADDRESS = "0xDEfDEfDEfDEfDEfDEfDEfDEfDEfDEfDEfDEfDEfDEf" +FAKE_CRN_CONF_URL = "https://test.conf.crn" + +FAKE_CRN_BASIC_HASH = "aaaabbbbccccddddeeeeffff1111222233334444555566667777888899990000" +FAKE_CRN_BASIC_ADDRESS = "0xAAAABBBBCCCCDDDDEEEEFFFF1111222233334444" +FAKE_CRN_BASIC_URL = "https://test.basic.crn.com" + + +@pytest.fixture +def vm_status_v2(): + return { + "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef": { + "networking": { + "ipv4_network": "192.168.0.0/24", + "host_ipv4": "192.168.0.1", + "ipv6_network": "2001:db8::/64", + "ipv6_ip": "2001:db8::1", + "mapped_ports": {}, + }, + "status": { + "defined_at": "2023-01-01T00:00:00Z", + "started_at": "2023-01-01T00:00:00Z", + "preparing_at": "2023-01-01T00:00:00Z", + "prepared_at": "2023-01-01T00:00:00Z", + "starting_at": "2023-01-01T00:00:00Z", + "stopping_at": "2023-01-01T00:00:00Z", + "stopped_at": "2023-01-01T00:00:00Z", + }, + "running": True, + } + } + + +@pytest.fixture +def vm_status_v1(): + return { + "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef": { + "networking": {"ipv4": "192.168.0.1", "ipv6": "2001:db8::1"} + } + } + + +@pytest.fixture +def mock_crn_list(): + """Create a mock CRN list for testing.""" + return [ + { + "hash": FAKE_CRN_GPU_HASH, + "name": "Test GPU Instance", + "time": 1739525120.505, + "type": "compute", + "owner": FAKE_CRN_GPU_ADDRESS, + "score": 0.964502797686815, + "banner": "", + "locked": True, + "parent": FAKE_CRN_GPU_HASH, + "reward": FAKE_CRN_GPU_ADDRESS, + "status": "linked", + "address": FAKE_CRN_GPU_URL, + "manager": "", + "picture": "", + "authorized": "", + "description": "", + "performance": 0, + "multiaddress": "", + "score_updated": True, + "stream_reward": FAKE_CRN_GPU_ADDRESS, + "inactive_since": None, + "decentralization": 0.852680607762069, + "registration_url": "", + "terms_and_conditions": "", + "config_from_crn": True, + "debug_config_from_crn_at": "2025-06-18T12:09:03.843059+00:00", + "debug_config_from_crn_error": "None", + "debug_usage_from_crn_at": "2025-06-18T12:09:03.843059+00:00", + "usage_from_crn_error": "None", + "version": "1.6.0-rc1", + "payment_receiver_address": FAKE_CRN_GPU_ADDRESS, + "gpu_support": True, + "confidential_support": False, + "qemu_support": True, + "system_usage": { + "cpu": { + "count": 20, + "load_average": { + "load1": 0.357421875, + "load5": 0.31982421875, + "load15": 0.34912109375, + }, + "core_frequencies": {"min": 800, "max": 4280}, + }, + "mem": {"total_kB": 67219530, "available_kB": 61972037}, + "disk": {"total_kB": 1853812338, "available_kB": 1320664518}, + "period": { + "start_timestamp": "2025-06-18T12:09:00Z", + "duration_seconds": 60, + }, + "properties": { + "cpu": { + "architecture": "x86_64", + "vendor": "GenuineIntel", + "features": [], + } + }, + "gpu": { + "devices": [ + { + "vendor": "NVIDIA", + "model": "RTX 4000 ADA", + "device_name": "AD104GL [RTX 4000 SFF Ada Generation]", + "device_class": "0300", + "pci_host": "01:00.0", + "device_id": "10de:27b0", + "compatible": True, + } + ], + "available_devices": [ + { + "vendor": "NVIDIA", + "model": "RTX 4000 ADA", + "device_name": "AD104GL [RTX 4000 SFF Ada Generation]", + "device_class": "0300", + "pci_host": "01:00.0", + "device_id": "10de:27b0", + "compatible": True, + } + ], + }, + "active": True, + }, + "compatible_gpus": [ + { + "vendor": "NVIDIA", + "model": "RTX 4000 ADA", + "device_name": "AD104GL [RTX 4000 SFF Ada Generation]", + "device_class": "0300", + "pci_host": "01:00.0", + "device_id": "10de:27b0", + "compatible": True, + } + ], + "compatible_available_gpus": [ + { + "vendor": "NVIDIA", + "model": "RTX 4000 ADA", + "device_name": "AD104GL [RTX 4000 SFF Ada Generation]", + "device_class": "0300", + "pci_host": "01:00.0", + "device_id": "10de:27b0", + "compatible": True, + } + ], + "ipv6_check": {"host": True, "vm": True}, + }, + { + "hash": FAKE_CRN_CONF_HASH, + "name": "Test Conf CRN", + "time": 1739296606.021, + "type": "compute", + "owner": FAKE_CRN_CONF_ADDRESS, + "score": 0.964334395009276, + "banner": "", + "locked": False, + "parent": FAKE_CRN_CONF_HASH, + "reward": FAKE_CRN_CONF_ADDRESS, + "status": "linked", + "address": FAKE_CRN_CONF_URL, + "manager": "", + "picture": "", + "authorized": "", + "description": "", + "performance": 0, + "multiaddress": "", + "score_updated": False, + "stream_reward": FAKE_CRN_CONF_ADDRESS, + "inactive_since": None, + "decentralization": 0.994724704221032, + "registration_url": "", + "terms_and_conditions": "", + "config_from_crn": False, + "debug_config_from_crn_at": "2025-06-18T12:09:03.951298+00:00", + "debug_config_from_crn_error": "None", + "debug_usage_from_crn_at": "2025-06-18T12:09:03.951298+00:00", + "usage_from_crn_error": "None", + "version": "1.5.1", + "payment_receiver_address": FAKE_CRN_CONF_ADDRESS, + "gpu_support": False, + "confidential_support": True, + "qemu_support": True, + "system_usage": { + "cpu": { + "count": 224, + "load_average": { + "load1": 3.8466796875, + "load5": 3.9228515625, + "load15": 3.82080078125, + }, + "core_frequencies": {"min": 1500, "max": 2200}, + }, + "mem": {"total_kB": 807728145, "available_kB": 630166945}, + "disk": {"total_kB": 14971880235, "available_kB": 152975388}, + "period": { + "start_timestamp": "2025-06-18T12:09:00Z", + "duration_seconds": 60, + }, + "properties": { + "cpu": { + "architecture": "x86_64", + "vendor": "AuthenticAMD", + "features": ["sev", "sev_es"], + } + }, + "gpu": {"devices": [], "available_devices": []}, + "active": True, + }, + "compatible_gpus": [], + "compatible_available_gpus": [], + "ipv6_check": {"host": True, "vm": True}, + }, + { + "hash": FAKE_CRN_BASIC_HASH, + "name": "Test Basic CRN", + "time": 1687179700.242, + "type": "compute", + "owner": FAKE_CRN_BASIC_ADDRESS, + "score": 0.979808976368904, + "banner": FAKE_CRN_BASIC_HASH, + "locked": False, + "parent": FAKE_CRN_BASIC_HASH, + "reward": FAKE_CRN_BASIC_ADDRESS, + "status": "linked", + "address": FAKE_CRN_BASIC_URL, + "manager": FAKE_CRN_BASIC_ADDRESS, + "picture": FAKE_CRN_BASIC_HASH, + "authorized": "", + "description": "", + "performance": 0, + "multiaddress": "", + "score_updated": True, + "stream_reward": FAKE_CRN_BASIC_ADDRESS, + "inactive_since": None, + "decentralization": 0.93953628188216, + "registration_url": "", + "terms_and_conditions": "", + "config_from_crn": True, + "debug_config_from_crn_at": "2025-06-18T12:08:59.599676+00:00", + "debug_config_from_crn_error": "None", + "debug_usage_from_crn_at": "2025-06-18T12:08:59.599676+00:00", + "usage_from_crn_error": "None", + "version": "1.5.1", + "payment_receiver_address": FAKE_CRN_BASIC_ADDRESS, + "gpu_support": False, + "confidential_support": False, + "qemu_support": True, + "system_usage": { + "cpu": { + "count": 32, + "load_average": {"load1": 0, "load5": 0.01513671875, "load15": 0}, + "core_frequencies": {"min": 1200, "max": 3400}, + }, + "mem": {"total_kB": 270358832, "available_kB": 266152607}, + "disk": {"total_kB": 1005067972, "available_kB": 919488466}, + "period": { + "start_timestamp": "2025-06-18T12:09:00Z", + "duration_seconds": 60, + }, + "properties": { + "cpu": { + "architecture": "x86_64", + "vendor": "GenuineIntel", + "features": [], + } + }, + "gpu": {"devices": [], "available_devices": []}, + "active": True, + }, + "compatible_gpus": [], + "compatible_available_gpus": [], + "ipv6_check": {"host": True, "vm": False}, + }, + ] + + +def make_mock_aiohttp_session(mocked_json_response): + mock_response = AsyncMock() + mock_response.json.return_value = mocked_json_response + mock_response.raise_for_status.return_value = None + + session = MagicMock() + + session_cm = AsyncMock() + session_cm.__aenter__.return_value = session + + get_cm = AsyncMock() + get_cm.__aenter__.return_value = mock_response + + post_cm = AsyncMock() + post_cm.__aenter__.return_value = mock_response + + session.get = MagicMock(return_value=get_cm) + session.post = MagicMock(return_value=post_cm) + + return session_cm + + +def make_mock_get_active_vms_parametrized(v2_fails, expected_payload): + session = MagicMock() + + def get(url, *args, **kwargs): + mock_resp = None + if "/v2/about/executions/list" in url and v2_fails: + mock_resp = make_custom_mock_response(expected_payload, 404) + else: + mock_resp = make_custom_mock_response(expected_payload) + + mock_ctx = AsyncMock() + mock_ctx.__aenter__.return_value = mock_resp + return mock_ctx + + def post(url, *args, **kwargs): + if "/update" in url: + return make_custom_mock_response( + {"status": "ok", "msg": "VM not starting yet"}, 200 + ) + return None + + session.get = MagicMock(side_effect=get) + + session.post = MagicMock(side_effect=post) + + session_cm = AsyncMock() + session_cm.__aenter__.return_value = session + + return session_cm diff --git a/tests/unit/services/test_base_service.py b/tests/unit/services/test_base_service.py new file mode 100644 index 00000000..6c07dd50 --- /dev/null +++ b/tests/unit/services/test_base_service.py @@ -0,0 +1,46 @@ +from typing import Optional +from unittest.mock import AsyncMock + +import pytest +from pydantic import BaseModel + +from aleph.sdk.client.services.base import AggregateConfig, BaseService + + +class DummyModel(BaseModel): + foo: str + bar: Optional[int] + + +class DummyService(BaseService[DummyModel]): + aggregate_key = "dummy_key" + model_cls = DummyModel + + +@pytest.mark.asyncio +async def test_get_config_with_data(): + mock_client = AsyncMock() + mock_data = {"foo": "hello", "bar": 123} + mock_client.fetch_aggregate.return_value = mock_data + + service = DummyService(mock_client) + + result = await service.get_config("0xSOME_ADDRESS") + + assert isinstance(result, AggregateConfig) + assert result.data is not None + assert isinstance(result.data[0], DummyModel) + assert result.data[0].foo == "hello" + assert result.data[0].bar == 123 + + +@pytest.mark.asyncio +async def test_get_config_with_no_data(): + mock_client = AsyncMock() + mock_client.fetch_aggregate.return_value = None + + service = DummyService(mock_client) + result = await service.get_config("0xSOME_ADDRESS") + + assert isinstance(result, AggregateConfig) + assert result.data is None diff --git a/tests/unit/test_services.py b/tests/unit/test_services.py new file mode 100644 index 00000000..762fceea --- /dev/null +++ b/tests/unit/test_services.py @@ -0,0 +1,445 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import aiohttp +import pytest + +from aleph.sdk import AlephHttpClient, AuthenticatedAlephHttpClient +from aleph.sdk.client.services.authenticated_port_forwarder import ( + AuthenticatedPortForwarder, + PortForwarder, +) +from aleph.sdk.client.services.crn import Crn +from aleph.sdk.client.services.dns import DNS +from aleph.sdk.client.services.instance import Instance +from aleph.sdk.client.services.scheduler import Scheduler +from aleph.sdk.types import ( + IPV4, + AllocationItem, + Dns, + PortFlags, + Ports, + SchedulerNodes, + SchedulerPlan, +) + + +@pytest.mark.asyncio +async def test_aleph_http_client_services_loading(): + """Test that services are properly loaded in AlephHttpClient's __aenter__""" + with patch("aiohttp.ClientSession") as mock_session: + mock_session_instance = AsyncMock() + mock_session.return_value = mock_session_instance + + client = AlephHttpClient(api_server="http://localhost") + + async def mocked_aenter(): + client._http_session = mock_session_instance + client.dns = DNS(client) + client.port_forwarder = PortForwarder(client) + client.crn = Crn(client) + client.scheduler = Scheduler(client) + client.instance = Instance(client) + return client + + with patch.object(client, "__aenter__", mocked_aenter), patch.object( + client, "__aexit__", AsyncMock() + ): + async with client: + assert isinstance(client.dns, DNS) + assert isinstance(client.port_forwarder, PortForwarder) + assert isinstance(client.crn, Crn) + assert isinstance(client.scheduler, Scheduler) + assert isinstance(client.instance, Instance) + + assert client.dns._client == client + assert client.port_forwarder._client == client + assert client.crn._client == client + assert client.scheduler._client == client + assert client.instance._client == client + + +@pytest.mark.asyncio +async def test_authenticated_http_client_services_loading(ethereum_account): + """Test that authenticated services are properly loaded in AuthenticatedAlephHttpClient's __aenter__""" + with patch("aiohttp.ClientSession") as mock_session: + mock_session_instance = AsyncMock() + mock_session.return_value = mock_session_instance + + client = AuthenticatedAlephHttpClient( + account=ethereum_account, api_server="http://localhost" + ) + + async def mocked_aenter(): + client._http_session = mock_session_instance + client.dns = DNS(client) + client.port_forwarder = AuthenticatedPortForwarder(client) + client.crn = Crn(client) + client.scheduler = Scheduler(client) + client.instance = Instance(client) + return client + + with patch.object(client, "__aenter__", mocked_aenter), patch.object( + client, "__aexit__", AsyncMock() + ): + async with client: + assert isinstance(client.dns, DNS) + assert isinstance(client.port_forwarder, AuthenticatedPortForwarder) + assert isinstance(client.crn, Crn) + assert isinstance(client.scheduler, Scheduler) + assert isinstance(client.instance, Instance) + + assert client.dns._client == client + assert client.port_forwarder._client == client + assert client.crn._client == client + assert client.scheduler._client == client + assert client.instance._client == client + + +def mock_aiohttp_session(response_data, raise_error=False, error_status=404): + """ + Creates a mock for aiohttp.ClientSession that properly handles async context managers. + + Args: + response_data: The data to return from the response's json() method + raise_error: Whether to raise an aiohttp.ClientResponseError + error_status: The HTTP status code to use if raising an error + + Returns: + A tuple of (patch_target, mock_session_context, mock_session, mock_response) + """ + # Mock the response object + mock_response = MagicMock() + + if raise_error: + # Set up raise_for_status to raise an exception + error = aiohttp.ClientResponseError( + request_info=MagicMock(), + history=tuple(), + status=error_status, + message="Not Found" if error_status == 404 else "Error", + ) + mock_response.raise_for_status = MagicMock(side_effect=error) + else: + # Normal case - just return the data + mock_response.raise_for_status = MagicMock() + mock_response.json = AsyncMock(return_value=response_data) + + # Mock the context manager for session.get + mock_context_manager = MagicMock() + mock_context_manager.__aenter__ = AsyncMock(return_value=mock_response) + mock_context_manager.__aexit__ = AsyncMock(return_value=None) + + # Mock the session's get method to return our context manager + mock_session = MagicMock() + mock_session.get = MagicMock(return_value=mock_context_manager) + mock_session.post = MagicMock(return_value=mock_context_manager) + + # Mock the ClientSession context manager + mock_session_context = MagicMock() + mock_session_context.__aenter__ = AsyncMock(return_value=mock_session) + mock_session_context.__aexit__ = AsyncMock(return_value=None) + + return "aiohttp.ClientSession", mock_session_context, mock_session, mock_response + + +@pytest.mark.asyncio +async def test_authenticated_port_forwarder_create_port_forward(ethereum_account): + """Test the create_port method in AuthenticatedPortForwarder""" + mock_client = MagicMock() + mock_client.http_session = AsyncMock() + mock_client.account = ethereum_account + + auth_port_forwarder = AuthenticatedPortForwarder(mock_client) + + ports = Ports(ports={80: PortFlags(tcp=True, udp=False)}) + + mock_message = MagicMock() + mock_status = MagicMock() + + # Setup the mock for create_aggregate + mock_client.create_aggregate = AsyncMock(return_value=(mock_message, mock_status)) + + # Mock the _verify_status_processed_and_ownership method + with patch.object( + auth_port_forwarder, + "_verify_status_processed_and_ownership", + AsyncMock(return_value=(mock_message, mock_status)), + ): + # Call the actual method + result_message, result_status = await auth_port_forwarder.create_ports( + item_hash="test_hash", ports=ports + ) + + # Verify create_aggregate was called + mock_client.create_aggregate.assert_called_once() + + # Check the parameters passed to create_aggregate + call_args = mock_client.create_aggregate.call_args + assert call_args[1]["key"] == "port-forwarding" + assert "test_hash" in call_args[1]["content"] + + # Verify the method returns what create_aggregate returns + assert result_message == mock_message + assert result_status == mock_status + + +@pytest.mark.asyncio +async def test_authenticated_port_forwarder_update_port(ethereum_account): + """Test the update_port method in AuthenticatedPortForwarder""" + mock_client = MagicMock() + mock_client.http_session = AsyncMock() + mock_client.account = ethereum_account + + auth_port_forwarder = AuthenticatedPortForwarder(mock_client) + + ports = Ports(ports={80: PortFlags(tcp=True, udp=False)}) + + mock_message = MagicMock() + mock_status = MagicMock() + + # Setup the mock for create_aggregate + mock_client.create_aggregate = AsyncMock(return_value=(mock_message, mock_status)) + + # Mock the _verify_status_processed_and_ownership method + with patch.object( + auth_port_forwarder, + "_verify_status_processed_and_ownership", + AsyncMock(return_value=(mock_message, mock_status)), + ): + # Call the actual method + result_message, result_status = await auth_port_forwarder.update_ports( + item_hash="test_hash", ports=ports + ) + + # Verify create_aggregate was called + mock_client.create_aggregate.assert_called_once() + + # Check the parameters passed to create_aggregate + call_args = mock_client.create_aggregate.call_args + assert call_args[1]["key"] == "port-forwarding" + assert "test_hash" in call_args[1]["content"] + + # Verify the method returns what create_aggregate returns + assert result_message == mock_message + assert result_status == mock_status + + +@pytest.mark.asyncio +async def test_dns_service_get_public_dns(): + """Test the DNSService get_public_dns method""" + mock_client = MagicMock() + dns_service = DNS(mock_client) + + # Mock the DnsListAdapter with a valid 64-character hash for ItemHash + mock_dns_list = [ + Dns( + name="test.aleph.sh", + item_hash="b236db23bf5ad005ad7f5d82eed08a68a925020f0755b2a59c03f784499198eb", + ipv6="2001:db8::1", + ipv4=IPV4(public="192.0.2.1", local="10.0.0.1"), + ) + ] + + # Patch DnsListAdapter.validate_json to return our mock DNS list + with patch( + "aleph.sdk.types.DnsListAdapter.validate_json", return_value=mock_dns_list + ): + # Set up mock for aiohttp.ClientSession to return a string (which is what validate_json expects) + patch_target, mock_session_context, _, _ = mock_aiohttp_session( + '["dummy json string"]' + ) + + # Patch the ClientSession constructor + with patch(patch_target, return_value=mock_session_context): + result = await dns_service.get_public_dns() + + assert len(result) == 1 + assert result[0].name == "test.aleph.sh" + assert ( + result[0].item_hash + == "b236db23bf5ad005ad7f5d82eed08a68a925020f0755b2a59c03f784499198eb" + ) + assert result[0].ipv6 == "2001:db8::1" + assert result[0].ipv4 is not None and result[0].ipv4.public == "192.0.2.1" + + +@pytest.mark.asyncio +async def test_crn_service_get_last_crn_version(): + """Test the CrnService get_last_crn_version method""" + mock_client = MagicMock() + crn_service = Crn(mock_client) + + # Set up mock for aiohttp.ClientSession + patch_target, mock_session_context, _, _ = mock_aiohttp_session( + {"tag_name": "v1.2.3"} + ) + + # Patch the ClientSession constructor + with patch(patch_target, return_value=mock_session_context): + result = await crn_service.get_last_crn_version() + assert result == "v1.2.3" + + +@pytest.mark.asyncio +async def test_scheduler_service_get_plan(): + """Test the SchedulerService get_plan method""" + mock_client = MagicMock() + scheduler_service = Scheduler(mock_client) + + mock_plan_data = { + "period": {"start_timestamp": "2023-01-01T00:00:00Z", "duration_seconds": 3600}, + "plan": { + "node1": { + "persistent_vms": [ + "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "fedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210", + ], + "instances": [], + "on_demand_vms": [], + "jobs": [], + } + }, + } + + # Set up mock for aiohttp.ClientSession + patch_target, mock_session_context, _, _ = mock_aiohttp_session(mock_plan_data) + + # Patch the ClientSession constructor + with patch(patch_target, return_value=mock_session_context): + result = await scheduler_service.get_plan() + assert isinstance(result, SchedulerPlan) + assert "node1" in result.plan + assert ( + "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef" + in result.plan["node1"].persistent_vms + ) + + +@pytest.mark.asyncio +async def test_scheduler_service_get_scheduler_node(): + """Test the SchedulerService get_scheduler_node method""" + mock_client = MagicMock() + scheduler_service = Scheduler(mock_client) + + mock_nodes_data = { + "nodes": [ + { + "node_id": "node1", + "url": "https://node1.aleph.im", + "ipv6": "2001:db8::1", + "supports_ipv6": True, + }, + { + "node_id": "node2", + "url": "https://node2.aleph.im", + "ipv6": None, + "supports_ipv6": False, + }, + ] + } + + # Set up mock for aiohttp.ClientSession + patch_target, mock_session_context, _, _ = mock_aiohttp_session(mock_nodes_data) + + # Patch the ClientSession constructor + with patch(patch_target, return_value=mock_session_context): + result = await scheduler_service.get_nodes() + assert isinstance(result, SchedulerNodes) + assert len(result.nodes) == 2 + assert result.nodes[0].node_id == "node1" + assert result.nodes[1].url == "https://node2.aleph.im" + + +@pytest.mark.asyncio +async def test_scheduler_service_get_allocation(): + """Test the SchedulerService get_allocation method""" + mock_client = MagicMock() + scheduler_service = Scheduler(mock_client) + + mock_allocation_data = { + "vm_hash": "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "vm_type": "instance", + "vm_ipv6": "2001:db8::1", + "period": {"start_timestamp": "2023-01-01T00:00:00Z", "duration_seconds": 3600}, + "node": { + "node_id": "node1", + "url": "https://node1.aleph.im", + "ipv6": "2001:db8::1", + "supports_ipv6": True, + }, + } + + # Set up mock for aiohttp.ClientSession + patch_target, mock_session_context, _, _ = mock_aiohttp_session( + mock_allocation_data + ) + + # Patch the ClientSession constructor + with patch(patch_target, return_value=mock_session_context): + result = await scheduler_service.get_allocation( + "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef" + ) + assert isinstance(result, AllocationItem) + assert ( + result.vm_hash + == "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef" + ) + assert result.node.node_id == "node1" + + +@pytest.mark.asyncio +async def test_utils_service_get_name_of_executable(): + """Test the UtilsService get_name_of_executable method""" + mock_client = MagicMock() + utils_service = Instance(mock_client) + + # Mock a message with metadata.name + mock_message = MagicMock() + mock_message.content.metadata = {"name": "test-executable"} + + # Set up the client mock to return the message + mock_client.get_message = AsyncMock(return_value=mock_message) + + # Test successful case + result = await utils_service.get_name_of_executable("hash1") + assert result == "test-executable" + + # Test with dict response + mock_client.get_message = AsyncMock( + return_value={"content": {"metadata": {"name": "dict-executable"}}} + ) + + result = await utils_service.get_name_of_executable("hash2") + assert result == "dict-executable" + + # Test with exception + mock_client.get_message = AsyncMock(side_effect=Exception("Test exception")) + + result = await utils_service.get_name_of_executable("hash3") + assert result is None + + +@pytest.mark.asyncio +async def test_utils_service_get_instances(): + """Test the UtilsService get_instances method""" + mock_client = MagicMock() + utils_service = Instance(mock_client) + + # Mock messages response + mock_messages = [MagicMock(), MagicMock()] + mock_response = MagicMock() + mock_response.messages = mock_messages + + # Set up the client mock + mock_client.get_messages = AsyncMock(return_value=mock_response) + + result = await utils_service.get_instances("0xaddress") + + # Check that get_messages was called with correct parameters + mock_client.get_messages.assert_called_once() + call_args = mock_client.get_messages.call_args[1] + assert call_args["page_size"] == 100 + assert call_args["message_filter"].addresses == ["0xaddress"] + + # Check result + assert result == mock_messages From 0fbbfb592b31cf27e2472c8623e9b76502cbeca6 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Tue, 12 Aug 2025 14:46:34 +0200 Subject: [PATCH 095/122] Fix: Client Services Scheduler to handle 404 (#224) * fix: `InstanceWithScheduler` to allow Optional `AllocationItem` * fix: scheduler service `get_allocation` to handle 404 * fix: instance service `get_instance_executions_info` to handle allocations being None --- src/aleph/sdk/client/services/instance.py | 2 ++ src/aleph/sdk/client/services/scheduler.py | 22 +++++++++++++--------- src/aleph/sdk/types.py | 4 +++- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/src/aleph/sdk/client/services/instance.py b/src/aleph/sdk/client/services/instance.py index 1636cb62..dbff386f 100644 --- a/src/aleph/sdk/client/services/instance.py +++ b/src/aleph/sdk/client/services/instance.py @@ -114,6 +114,8 @@ async def _fetch( if isinstance(alloc, InstanceManual): crn_url = sanitize_url(alloc.crn_url) else: + if not alloc.allocations: + return str(item_hash), None crn_url = sanitize_url(alloc.allocations.node.url) if not crn_url: diff --git a/src/aleph/sdk/client/services/scheduler.py b/src/aleph/sdk/client/services/scheduler.py index 765ee2bd..5aabc57e 100644 --- a/src/aleph/sdk/client/services/scheduler.py +++ b/src/aleph/sdk/client/services/scheduler.py @@ -1,6 +1,7 @@ -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional import aiohttp +from aiohttp import ClientResponseError from aleph_message.models import ItemHash from aleph.sdk.conf import settings @@ -40,15 +41,18 @@ async def get_nodes(self) -> SchedulerNodes: return SchedulerNodes.model_validate(raw) - async def get_allocation(self, vm_hash: ItemHash) -> AllocationItem: + async def get_allocation(self, vm_hash: ItemHash) -> Optional[AllocationItem]: """ Fetch allocation information for a given VM hash. """ url = f"{sanitize_url(settings.SCHEDULER_URL)}/api/v0/allocation/{vm_hash}" - - async with aiohttp.ClientSession() as session: - async with session.get(url) as resp: - resp.raise_for_status() - payload = await resp.json() - - return AllocationItem.model_validate(payload) + try: + async with aiohttp.ClientSession() as session: + async with session.get(url) as resp: + resp.raise_for_status() + payload = await resp.json() + return AllocationItem.model_validate(payload) + except ClientResponseError as e: + if e.status == 404: # Allocation can't be find on scheduler + return None + raise e diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 6c1ae561..31b94d4d 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -174,7 +174,9 @@ class AllocationItem(BaseModel): class InstanceWithScheduler(BaseModel): source: Literal["scheduler"] - allocations: AllocationItem # Case Scheduler + allocations: Optional[ + AllocationItem + ] # Case Scheduler (None == allocation can't be find on scheduler) class InstanceManual(BaseModel): From d9cd3945e81750499c503c90634f22ef27f05ab4 Mon Sep 17 00:00:00 2001 From: nesitor Date: Tue, 9 Sep 2025 16:49:16 +0200 Subject: [PATCH 096/122] Add support for credits payment method (#227) * Feature: Updated to last aleph-message version supporting credits payment. * Fix: Used new aleph-message release version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index f29a5163..c148831c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=1.0.3", + "aleph-message>=1.0.4", "aleph-superfluid>=0.3", "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", From b5f6cf04b6589510d811a01fb5672a4d32578390 Mon Sep 17 00:00:00 2001 From: nesitor Date: Tue, 9 Sep 2025 18:11:22 +0200 Subject: [PATCH 097/122] Fix: Solve MacOS building issue on CI (#230) --- .github/workflows/build-wheels.yml | 3 +-- README.md | 3 +-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 97d03fc1..440b53ca 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -41,8 +41,7 @@ jobs: if: startsWith(matrix.os, 'macos-') run: | brew update - brew tap cuber/homebrew-libsecp256k1 - brew install libsecp256k1 + brew install secp256k1 - name: Install required system packages only for Ubuntu Linux if: startsWith(matrix.os, 'ubuntu-') diff --git a/README.md b/README.md index 3d2aea9c..00ec940d 100644 --- a/README.md +++ b/README.md @@ -21,8 +21,7 @@ Using some chains may also require installing `libgmp3-dev`. ### macOs This project does not support Python 3.12 on macOS. Please use Python 3.11 instead. ```shell -$ brew tap cuber/homebrew-libsecp256k1 -$ brew install libsecp256k1 +$ brew install secp256k1 ``` ## Installation From 5e3b9276baab21b9b843c43713c9f67787902558 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Fri, 12 Sep 2025 12:53:03 +0200 Subject: [PATCH 098/122] Fix: get_stored_content should still allow to fetch a message with removing status (#229) --- src/aleph/sdk/client/http.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index a433e48d..d4428707 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -564,7 +564,7 @@ async def get_stored_content( message, status = await self.get_message( item_hash=ItemHash(item_hash), with_status=True ) - if status != MessageStatus.PROCESSED: + if status not in [MessageStatus.PROCESSED, MessageStatus.REMOVING]: resp = f"Invalid message status: {status}" elif message.type != MessageType.store: resp = f"Invalid message type: {message.type}" From 7bbeddd12d2649a61360815a21d6afc313ce2a31 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Tue, 16 Sep 2025 10:49:04 +0200 Subject: [PATCH 099/122] Feature: allow to update CRN_LIST_URL from .env (#232) --- src/aleph/sdk/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index fc852417..b2294274 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -86,7 +86,7 @@ class Settings(BaseSettings): DNS_API: ClassVar[str] = "https://api.dns.public.aleph.sh/instances/list" CRN_URL_UPDATE: ClassVar[str] = "{crn_url}/control/machine/{vm_hash}/update" - CRN_LIST_URL: ClassVar[str] = "https://crns-list.aleph.sh/crns.json" + CRN_LIST_URL: str = "https://crns-list.aleph.sh/crns.json" CRN_VERSION_URL: ClassVar[str] = ( "https://api.github.com/repos/aleph-im/aleph-vm/releases/latest" ) From 90b9da0c85330c3bdbf9dbb41c3d1e6d6c02c895 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Tue, 16 Sep 2025 10:51:32 +0200 Subject: [PATCH 100/122] Feature: Pricing Services (#226) * feat: pricing services client * feat: init of pricing services * fix: utils func to calculate compute unit as pyaleph do * feat: GPU utils func to get currents state of gpu on network * fix: mypy issue * feat: pricing aggregate.json for mocked * Feature: new DictLikeModel for CRNS * feature: new utils extract_valid_eth_address * Refactor: CrnList to use DictLikeModel instead of dit and apply logic for GPU / Fetch crn on it * Fix: linting issue * Feat: new method for pricing aggregate --- src/aleph/sdk/client/http.py | 3 +- src/aleph/sdk/client/services/crn.py | 187 +++++++++++++- src/aleph/sdk/client/services/pricing.py | 235 ++++++++++++++++++ src/aleph/sdk/client/services/scheduler.py | 1 - src/aleph/sdk/types.py | 54 +++- src/aleph/sdk/utils.py | 10 + tests/unit/services/pricing_aggregate.json | 273 +++++++++++++++++++++ tests/unit/services/test_pricing.py | 212 ++++++++++++++++ 8 files changed, 966 insertions(+), 9 deletions(-) create mode 100644 src/aleph/sdk/client/services/pricing.py create mode 100644 tests/unit/services/pricing_aggregate.json create mode 100644 tests/unit/services/test_pricing.py diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index d4428707..723a43bf 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -37,6 +37,7 @@ from aleph.sdk.client.services.dns import DNS from aleph.sdk.client.services.instance import Instance from aleph.sdk.client.services.port_forwarder import PortForwarder +from aleph.sdk.client.services.pricing import Pricing from aleph.sdk.client.services.scheduler import Scheduler from ..conf import settings @@ -135,7 +136,7 @@ async def __aenter__(self): self.crn = Crn(self) self.scheduler = Scheduler(self) self.instance = Instance(self) - + self.pricing = Pricing(self) return self async def __aexit__(self, exc_type, exc_val, exc_tb): diff --git a/src/aleph/sdk/client/services/crn.py b/src/aleph/sdk/client/services/crn.py index 3317644a..e8d57c8c 100644 --- a/src/aleph/sdk/client/services/crn.py +++ b/src/aleph/sdk/client/services/crn.py @@ -1,18 +1,187 @@ -from typing import TYPE_CHECKING, Dict, Optional, Union +from typing import TYPE_CHECKING, Dict, List, Optional, Union import aiohttp from aiohttp.client_exceptions import ClientResponseError from aleph_message.models import ItemHash +from pydantic import BaseModel from aleph.sdk.conf import settings from aleph.sdk.exceptions import MethodNotAvailableOnCRN, VmNotFoundOnHost -from aleph.sdk.types import CrnExecutionV1, CrnExecutionV2, CrnV1List, CrnV2List -from aleph.sdk.utils import sanitize_url +from aleph.sdk.types import ( + CrnExecutionV1, + CrnExecutionV2, + CrnV1List, + CrnV2List, + DictLikeModel, +) +from aleph.sdk.utils import extract_valid_eth_address, sanitize_url if TYPE_CHECKING: from aleph.sdk.client.http import AlephHttpClient +class GPU(BaseModel): + vendor: str + model: str + device_name: str + device_class: str + pci_host: str + compatible: bool + + +class NetworkGPUS(BaseModel): + total_gpu_count: int + available_gpu_count: int + available_gpu_list: dict[str, List[GPU]] # str = node_url + used_gpu_list: dict[str, List[GPU]] # str = node_url + + +class CRN(DictLikeModel): + # This Model work as dict but where we can type what we need / apply logic on top + + # Simplify search + hash: str + name: str + address: str + + gpu_support: Optional[bool] = False + confidential_support: Optional[bool] = False + qemu_support: Optional[bool] = False + + version: Optional[str] = "0.0.0" + payment_receiver_address: Optional[str] # Can be None if not configured + + +class CrnList(DictLikeModel): + crns: list[CRN] = [] + + @classmethod + def from_api(cls, payload: dict) -> "CrnList": + raw_list = payload.get("crns", []) + crn_list = [ + CRN.model_validate(item) if not isinstance(item, CRN) else item + for item in raw_list + ] + return cls(crns=crn_list) + + def find_gpu_on_network(self): + gpu_count: int = 0 + available_gpu_count: int = 0 + + compatible_gpu: Dict[str, List[GPU]] = {} + available_compatible_gpu: Dict[str, List[GPU]] = {} + + for crn_ in self.crns: + if not crn_.gpu_support: + continue + + # Extracts used GPU + for gpu in crn_.get("compatible_gpus", []): + compatible_gpu[crn_.address] = [] + compatible_gpu[crn_.address].append(GPU.model_validate(gpu)) + gpu_count += 1 + + # Extracts available GPU + for gpu in crn_.get("compatible_available_gpus", []): + available_compatible_gpu[crn_.address] = [] + available_compatible_gpu[crn_.address].append(GPU.model_validate(gpu)) + gpu_count += 1 + available_gpu_count += 1 + + return NetworkGPUS( + total_gpu_count=gpu_count, + available_gpu_count=available_gpu_count, + used_gpu_list=compatible_gpu, + available_gpu_list=available_compatible_gpu, + ) + + def filter_crn( + self, + latest_crn_version: bool = False, + ipv6: bool = False, + stream_address: bool = False, + confidential: bool = False, + gpu: bool = False, + ) -> list[CRN]: + """Filter compute resource node list, unfiltered by default. + Args: + latest_crn_version (bool): Filter by latest crn version. + ipv6 (bool): Filter invalid IPv6 configuration. + stream_address (bool): Filter invalid payment receiver address. + confidential (bool): Filter by confidential computing support. + gpu (bool): Filter by GPU support. + Returns: + list[CRN]: List of compute resource nodes. (if no filter applied, return all) + """ + # current_crn_version = await fetch_latest_crn_version() + # Relax current filter to allow use aleph-vm versions since 1.5.1. + # TODO: Allow to specify that option on settings aggregate on maybe on GitHub + current_crn_version = "1.5.1" + + filtered_crn: list[CRN] = [] + for crn_ in self.crns: + # Check crn version + if latest_crn_version and (crn_.version or "0.0.0") < current_crn_version: + continue + + # Filter with ipv6 check + if ipv6: + ipv6_check = crn_.get("ipv6_check") + if not ipv6_check or not all(ipv6_check.values()): + continue + + if stream_address and not extract_valid_eth_address( + crn_.payment_receiver_address or "" + ): + continue + + # Confidential Filter + if confidential and not crn_.confidential_support: + continue + + # Filter with GPU / Available GPU + available_gpu = crn_.get("compatible_available_gpus") + if gpu and (not crn_.gpu_support or not available_gpu): + continue + + filtered_crn.append(crn_) + return filtered_crn + + # Find CRN by address + def find_crn_by_address(self, address: str) -> Optional[CRN]: + for crn_ in self.crns: + if crn_.address == sanitize_url(address): + return crn_ + return None + + # Find CRN by hash + def find_crn_by_hash(self, crn_hash: str) -> Optional[CRN]: + for crn_ in self.crns: + if crn_.hash == crn_hash: + return crn_ + return None + + def find_crn( + self, + address: Optional[str] = None, + crn_hash: Optional[str] = None, + ) -> Optional[CRN]: + """Find CRN by address or hash (both optional, address priority) + + Args: + address (Optional[str], optional): url of the node. Defaults to None. + crn_hash (Optional[str], optional): hash of the nodes. Defaults to None. + + Returns: + Optional[CRN]: CRN object or None if not found + """ + if address: + return self.find_crn_by_address(address) + if crn_hash: + return self.find_crn_by_hash(crn_hash) + return None + + class Crn: """ This services allow interact with CRNS API @@ -45,7 +214,7 @@ async def get_last_crn_version(self): data = await resp.json() return data.get("tag_name") - async def get_crns_list(self, only_active: bool = True) -> dict: + async def get_crns_list(self, only_active: bool = True) -> CrnList: """ Query a persistent VM running on aleph.im to retrieve list of CRNs: https://crns-list.aleph.sh/crns.json @@ -72,7 +241,7 @@ async def get_crns_list(self, only_active: bool = True) -> dict: sanitize_url(settings.CRN_LIST_URL), params=params ) as resp: resp.raise_for_status() - return await resp.json() + return CrnList.from_api(await resp.json()) async def get_active_vms_v2(self, crn_address: str) -> CrnV2List: endpoint = "/v2/about/executions/list" @@ -136,3 +305,11 @@ async def update_instance_config(self, crn_address: str, item_hash: ItemHash): async with session.post(full_url) as resp: resp.raise_for_status() return await resp.json() + + # Gpu Functions Helper + async def fetch_gpu_on_network( + self, + only_active: bool = True, + ) -> NetworkGPUS: + crn_list = await self.get_crns_list(only_active) + return crn_list.find_gpu_on_network() diff --git a/src/aleph/sdk/client/services/pricing.py b/src/aleph/sdk/client/services/pricing.py new file mode 100644 index 00000000..acf7c214 --- /dev/null +++ b/src/aleph/sdk/client/services/pricing.py @@ -0,0 +1,235 @@ +import logging +import math +from enum import Enum +from typing import TYPE_CHECKING, Dict, List, Optional, Union + +from aleph.sdk.client.services.base import BaseService + +if TYPE_CHECKING: + pass + +from decimal import Decimal + +from pydantic import BaseModel, RootModel + +logger = logging.getLogger(__name__) + + +class PricingEntity(str, Enum): + STORAGE = "storage" + WEB3_HOSTING = "web3_hosting" + PROGRAM = "program" + PROGRAM_PERSISTENT = "program_persistent" + INSTANCE = "instance" + INSTANCE_CONFIDENTIAL = "instance_confidential" + INSTANCE_GPU_STANDARD = "instance_gpu_standard" + INSTANCE_GPU_PREMIUM = "instance_gpu_premium" + + +class GroupEntity(str, Enum): + STORAGE = "storage" + WEBSITE = "website" + PROGRAM = "program" + INSTANCE = "instance" + CONFIDENTIAL = "confidential" + GPU = "gpu" + ALL = "all" + + +class Price(BaseModel): + payg: Optional[Decimal] = None + holding: Optional[Decimal] = None + fixed: Optional[Decimal] = None + + +class ComputeUnit(BaseModel): + vcpus: int + memory_mib: int + disk_mib: int + + +class TierComputedSpec(ComputeUnit): + ... + gpu_model: Optional[str] + vram: Optional[int] + + +class Tier(BaseModel): + id: str + compute_units: int + vram: Optional[int] = None + model: Optional[str] = None + + def extract_tier_id(self) -> str: + return self.id.split("-", 1)[-1] + + +class PricingPerEntity(BaseModel): + price: Dict[str, Union[Price, Decimal]] + compute_unit: Optional[ComputeUnit] = None + tiers: Optional[List[Tier]] = None + + def _get_nb_compute_units( + self, + vcpus: int = 1, + memory_mib: int = 2048, + ) -> Optional[int]: + if self.compute_unit: + memory = math.ceil(memory_mib / self.compute_unit.memory_mib) + nb_compute = vcpus if vcpus >= memory else memory + return nb_compute + return None + + def get_closest_tier( + self, + vcpus: Optional[int] = None, + memory_mib: Optional[int] = None, + compute_unit: Optional[int] = None, + ): + """Get Closest tier for Program / Instance""" + + # We Calculate Compute Unit requested based on vcpus and memory + computed_cu = None + if vcpus is not None and memory_mib is not None: + computed_cu = self._get_nb_compute_units(vcpus=vcpus, memory_mib=memory_mib) + elif vcpus is not None and self.compute_unit is not None: + computed_cu = self._get_nb_compute_units( + vcpus=vcpus, memory_mib=self.compute_unit.memory_mib + ) + elif memory_mib is not None and self.compute_unit is not None: + computed_cu = self._get_nb_compute_units( + vcpus=self.compute_unit.vcpus, memory_mib=memory_mib + ) + + # Case where Vcpus or memory is given but also a number of CU (case on aleph-client) + cu: Optional[int] = None + if computed_cu is not None and compute_unit is not None: + if computed_cu != compute_unit: + logger.warning( + f"Mismatch in compute units: from CPU/RAM={computed_cu}, given={compute_unit}. " + f"Choosing {max(computed_cu, compute_unit)}." + ) + cu = max(computed_cu, compute_unit) # We trust the bigger trier + else: + cu = compute_unit if compute_unit is not None else computed_cu + + # now tier found + if cu is None: + return None + + # With CU available, choose the closest one + candidates = self.tiers + if candidates is None: + return None + + best_tier = min( + candidates, + key=lambda t: (abs(t.compute_units - cu), -t.compute_units), + ) + return best_tier + + def get_services_specs( + self, + tier: Tier, + ) -> TierComputedSpec: + """ + Calculate ammount of vram / cpu / disk | + gpu model / vram if it GPU instance + """ + if self.compute_unit is None: + raise ValueError("ComputeUnit is required to get service specs") + + cpu = tier.compute_units * self.compute_unit.vcpus + memory_mib = tier.compute_units * self.compute_unit.memory_mib + disk = ( + tier.compute_units * self.compute_unit.disk_mib + ) # Min value disk can be increased + + # Gpu Specs + gpu = None + vram = None + if tier.model and tier.vram: + gpu = tier.model + vram = tier.vram + + return TierComputedSpec( + vcpus=cpu, + memory_mib=memory_mib, + disk_mib=disk, + gpu_model=gpu, + vram=vram, + ) + + +class PricingModel(RootModel[Dict[PricingEntity, PricingPerEntity]]): + def __iter__(self): + return iter(self.root) + + def __getitem__(self, item): + return self.root[item] + + +PRICING_GROUPS: dict[str, list[PricingEntity]] = { + GroupEntity.STORAGE: [PricingEntity.STORAGE], + GroupEntity.WEBSITE: [PricingEntity.WEB3_HOSTING], + GroupEntity.PROGRAM: [PricingEntity.PROGRAM, PricingEntity.PROGRAM_PERSISTENT], + GroupEntity.INSTANCE: [PricingEntity.INSTANCE], + GroupEntity.CONFIDENTIAL: [PricingEntity.INSTANCE_CONFIDENTIAL], + GroupEntity.GPU: [ + PricingEntity.INSTANCE_GPU_STANDARD, + PricingEntity.INSTANCE_GPU_PREMIUM, + ], + GroupEntity.ALL: list(PricingEntity), +} + +PAYG_GROUP: list[PricingEntity] = [ + PricingEntity.INSTANCE, + PricingEntity.INSTANCE_CONFIDENTIAL, + PricingEntity.INSTANCE_GPU_STANDARD, + PricingEntity.INSTANCE_GPU_PREMIUM, +] + + +class Pricing(BaseService[PricingModel]): + """ + This Service handle logic around Pricing + """ + + aggregate_key = "pricing" + model_cls = PricingModel + + def __init__(self, client): + super().__init__(client=client) + + # Config from aggregate + async def get_pricing_aggregate( + self, + ) -> PricingModel: + result = await self.get_config( + address="0xFba561a84A537fCaa567bb7A2257e7142701ae2A" + ) + return result.data[0] + + async def get_pricing_for_services( + self, services: List[PricingEntity], pricing_info: Optional[PricingModel] = None + ) -> Dict[PricingEntity, PricingPerEntity]: + """ + Get pricing information for requested services + + Args: + services: List of pricing entities to get information for + pricing_info: Optional pre-fetched pricing aggregate + + Returns: + Dictionary with pricing information for requested services + """ + if ( + not pricing_info + ): # Avoid reloading aggregate info if there is already fetched + pricing_info = await self.get_pricing_aggregate() + + result = {} + for service in services: + if service in pricing_info: + result[service] = pricing_info[service] + + return result diff --git a/src/aleph/sdk/client/services/scheduler.py b/src/aleph/sdk/client/services/scheduler.py index 5aabc57e..fdfaa5bc 100644 --- a/src/aleph/sdk/client/services/scheduler.py +++ b/src/aleph/sdk/client/services/scheduler.py @@ -28,7 +28,6 @@ async def get_plan(self) -> SchedulerPlan: async with session.get(url) as resp: resp.raise_for_status() raw = await resp.json() - return SchedulerPlan.model_validate(raw) async def get_nodes(self) -> SchedulerNodes: diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 31b94d4d..83b17151 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -1,10 +1,27 @@ from abc import abstractmethod from datetime import datetime from enum import Enum -from typing import Any, Dict, List, Literal, Optional, Protocol, TypeVar, Union +from typing import ( + Any, + Dict, + Iterator, + List, + Literal, + Optional, + Protocol, + TypeVar, + Union, +) from aleph_message.models import ItemHash -from pydantic import BaseModel, Field, RootModel, TypeAdapter, field_validator +from pydantic import ( + BaseModel, + ConfigDict, + Field, + RootModel, + TypeAdapter, + field_validator, +) __all__ = ("StorageEnum", "Account", "AccountFromPrivateKey", "GenericMessage") @@ -291,3 +308,36 @@ class Ports(BaseModel): AllForwarders = RootModel[Dict[ItemHash, Ports]] + + +class DictLikeModel(BaseModel): + """ + Base class: behaves like a dict while still being a Pydantic model. + """ + + # allow extra fields + validate on assignment + model_config = ConfigDict(extra="allow", validate_assignment=True) + + def __getitem__(self, key: str) -> Any: + return getattr(self, key) + + def __setitem__(self, key: str, value: Any) -> None: + setattr(self, key, value) + + def __iter__(self) -> Iterator[str]: + return iter(self.model_dump().keys()) + + def __contains__(self, key: str) -> bool: + return hasattr(self, key) + + def keys(self): + return self.model_dump().keys() + + def values(self): + return self.model_dump().values() + + def items(self): + return self.model_dump().items() + + def get(self, key: str, default=None): + return getattr(self, key, default) diff --git a/src/aleph/sdk/utils.py b/src/aleph/sdk/utils.py index 19a3aa57..94bc3bb9 100644 --- a/src/aleph/sdk/utils.py +++ b/src/aleph/sdk/utils.py @@ -6,6 +6,7 @@ import json import logging import os +import re import subprocess from datetime import date, datetime, time from decimal import Context, Decimal, InvalidOperation @@ -613,3 +614,12 @@ def sanitize_url(url: str) -> str: url = f"https://{url}" return url + + +def extract_valid_eth_address(address: str) -> str: + if address: + pattern = r"0x[a-fA-F0-9]{40}" + match = re.search(pattern, address) + if match: + return match.group(0) + return "" diff --git a/tests/unit/services/pricing_aggregate.json b/tests/unit/services/pricing_aggregate.json new file mode 100644 index 00000000..2da0dbb8 --- /dev/null +++ b/tests/unit/services/pricing_aggregate.json @@ -0,0 +1,273 @@ +{ + "address": "0xFba561a84A537fCaa567bb7A2257e7142701ae2A", + "data": { + "pricing": { + "program": { + "price": { + "storage": { + "payg": "0.000000977", + "holding": "0.05" + }, + "compute_unit": { + "payg": "0.011", + "holding": "200" + } + }, + "tiers": [ + { + "id": "tier-1", + "compute_units": 1 + }, + { + "id": "tier-2", + "compute_units": 2 + }, + { + "id": "tier-3", + "compute_units": 4 + }, + { + "id": "tier-4", + "compute_units": 6 + }, + { + "id": "tier-5", + "compute_units": 8 + }, + { + "id": "tier-6", + "compute_units": 12 + } + ], + "compute_unit": { + "vcpus": 1, + "disk_mib": 2048, + "memory_mib": 2048 + } + }, + "storage": { + "price": { + "storage": { + "holding": "0.333333333" + } + } + }, + "instance": { + "price": { + "storage": { + "payg": "0.000000977", + "holding": "0.05" + }, + "compute_unit": { + "payg": "0.055", + "holding": "1000" + } + }, + "tiers": [ + { + "id": "tier-1", + "compute_units": 1 + }, + { + "id": "tier-2", + "compute_units": 2 + }, + { + "id": "tier-3", + "compute_units": 4 + }, + { + "id": "tier-4", + "compute_units": 6 + }, + { + "id": "tier-5", + "compute_units": 8 + }, + { + "id": "tier-6", + "compute_units": 12 + } + ], + "compute_unit": { + "vcpus": 1, + "disk_mib": 20480, + "memory_mib": 2048 + } + }, + "web3_hosting": { + "price": { + "fixed": 50, + "storage": { + "holding": "0.333333333" + } + } + }, + "program_persistent": { + "price": { + "storage": { + "payg": "0.000000977", + "holding": "0.05" + }, + "compute_unit": { + "payg": "0.055", + "holding": "1000" + } + }, + "tiers": [ + { + "id": "tier-1", + "compute_units": 1 + }, + { + "id": "tier-2", + "compute_units": 2 + }, + { + "id": "tier-3", + "compute_units": 4 + }, + { + "id": "tier-4", + "compute_units": 6 + }, + { + "id": "tier-5", + "compute_units": 8 + }, + { + "id": "tier-6", + "compute_units": 12 + } + ], + "compute_unit": { + "vcpus": 1, + "disk_mib": 20480, + "memory_mib": 2048 + } + }, + "instance_gpu_premium": { + "price": { + "storage": { + "payg": "0.000000977" + }, + "compute_unit": { + "payg": "0.56" + } + }, + "tiers": [ + { + "id": "tier-1", + "vram": 81920, + "model": "A100", + "compute_units": 16 + }, + { + "id": "tier-2", + "vram": 81920, + "model": "H100", + "compute_units": 24 + } + ], + "compute_unit": { + "vcpus": 1, + "disk_mib": 61440, + "memory_mib": 6144 + } + }, + "instance_confidential": { + "price": { + "storage": { + "payg": "0.000000977", + "holding": "0.05" + }, + "compute_unit": { + "payg": "0.11", + "holding": "2000" + } + }, + "tiers": [ + { + "id": "tier-1", + "compute_units": 1 + }, + { + "id": "tier-2", + "compute_units": 2 + }, + { + "id": "tier-3", + "compute_units": 4 + }, + { + "id": "tier-4", + "compute_units": 6 + }, + { + "id": "tier-5", + "compute_units": 8 + }, + { + "id": "tier-6", + "compute_units": 12 + } + ], + "compute_unit": { + "vcpus": 1, + "disk_mib": 20480, + "memory_mib": 2048 + } + }, + "instance_gpu_standard": { + "price": { + "storage": { + "payg": "0.000000977" + }, + "compute_unit": { + "payg": "0.28" + } + }, + "tiers": [ + { + "id": "tier-1", + "vram": 20480, + "model": "RTX 4000 ADA", + "compute_units": 3 + }, + { + "id": "tier-2", + "vram": 24576, + "model": "RTX 3090", + "compute_units": 4 + }, + { + "id": "tier-3", + "vram": 24576, + "model": "RTX 4090", + "compute_units": 6 + }, + { + "id": "tier-3", + "vram": 32768, + "model": "RTX 5090", + "compute_units": 8 + }, + { + "id": "tier-4", + "vram": 49152, + "model": "L40S", + "compute_units": 12 + } + ], + "compute_unit": { + "vcpus": 1, + "disk_mib": 61440, + "memory_mib": 6144 + } + } + } + }, + "info": { + + } +} \ No newline at end of file diff --git a/tests/unit/services/test_pricing.py b/tests/unit/services/test_pricing.py new file mode 100644 index 00000000..ab6f7981 --- /dev/null +++ b/tests/unit/services/test_pricing.py @@ -0,0 +1,212 @@ +import json +from decimal import Decimal +from pathlib import Path +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from aleph.sdk.client.http import AlephHttpClient +from aleph.sdk.client.services.pricing import ( + PAYG_GROUP, + PRICING_GROUPS, + GroupEntity, + Price, + Pricing, + PricingEntity, + PricingModel, + PricingPerEntity, +) + + +@pytest.fixture +def pricing_aggregate(): + """Load the pricing aggregate JSON file for testing.""" + json_path = Path(__file__).parent / "pricing_aggregate.json" + with open(json_path, "r") as f: + data = json.load(f) + return data + + +@pytest.fixture +def mock_client(pricing_aggregate): + """Create a real client with mocked HTTP responses.""" + # Create a mock response for the http session get method + mock_response = AsyncMock() + mock_response.raise_for_status.return_value = None + mock_response.json.return_value = pricing_aggregate + + # Create an async context manager for the mock response + mock_context = AsyncMock() + mock_context.__aenter__.return_value = mock_response + + # Create a mock HTTP session + mock_session = AsyncMock() + mock_session.get = MagicMock(return_value=mock_context) + + client = AlephHttpClient(api_server="http://localhost") + client._http_session = mock_session + + return client + + +@pytest.mark.asyncio +async def test_get_pricing_aggregate(mock_client): + """Test fetching the pricing aggregate data.""" + pricing_service = Pricing(mock_client) + result = await pricing_service.get_pricing_aggregate() + + # Check the result is a PricingModel + assert isinstance(result, PricingModel) + + assert PricingEntity.STORAGE in result + assert PricingEntity.PROGRAM in result + assert PricingEntity.INSTANCE in result + + storage_entity = result[PricingEntity.STORAGE] + assert isinstance(storage_entity, PricingPerEntity) + assert "storage" in storage_entity.price + storage_price = storage_entity.price["storage"] + assert isinstance(storage_price, Price) # Add type assertion for mypy + assert storage_price.holding == Decimal("0.333333333") + + # Check program entity has correct compute unit details + program_entity = result[PricingEntity.PROGRAM] + assert isinstance(program_entity, PricingPerEntity) + assert program_entity.compute_unit is not None # Ensure compute_unit is not None + assert program_entity.compute_unit.vcpus == 1 + assert program_entity.compute_unit.memory_mib == 2048 + assert program_entity.compute_unit.disk_mib == 2048 + + # Check tiers in instance entity + instance_entity = result[PricingEntity.INSTANCE] + assert instance_entity.tiers is not None # Ensure tiers is not None + assert len(instance_entity.tiers) == 6 + assert instance_entity.tiers[0].id == "tier-1" + assert instance_entity.tiers[0].compute_units == 1 + + +@pytest.mark.asyncio +async def test_get_pricing_for_services(mock_client): + """Test fetching pricing for specific services.""" + pricing_service = Pricing(mock_client) + + # Test Case 1: Get pricing for storage and program services + services = [PricingEntity.STORAGE, PricingEntity.PROGRAM] + result = await pricing_service.get_pricing_for_services(services) + + # Check the result contains only the requested entities + assert len(result) == 2 + assert PricingEntity.STORAGE in result + assert PricingEntity.PROGRAM in result + assert PricingEntity.INSTANCE not in result + + # Verify specific pricing data + storage_price = result[PricingEntity.STORAGE].price["storage"] + assert isinstance(storage_price, Price) # Ensure it's a Price object + assert storage_price.holding == Decimal("0.333333333") + + compute_price = result[PricingEntity.PROGRAM].price["compute_unit"] + assert isinstance(compute_price, Price) # Ensure it's a Price object + assert compute_price.payg == Decimal("0.011") + assert compute_price.holding == Decimal("200") + + # Test Case 2: Using pre-fetched pricing aggregate + pricing_info = await pricing_service.get_pricing_aggregate() + result2 = await pricing_service.get_pricing_for_services(services, pricing_info) + + # Results should be the same + assert result[PricingEntity.STORAGE].price == result2[PricingEntity.STORAGE].price + assert result[PricingEntity.PROGRAM].price == result2[PricingEntity.PROGRAM].price + + # Test Case 3: Empty services list + empty_result = await pricing_service.get_pricing_for_services([]) + assert isinstance(empty_result, dict) + assert len(empty_result) == 0 + + # Test Case 4: Web3 hosting service + web3_result = await pricing_service.get_pricing_for_services( + [PricingEntity.WEB3_HOSTING] + ) + assert len(web3_result) == 1 + assert PricingEntity.WEB3_HOSTING in web3_result + assert web3_result[PricingEntity.WEB3_HOSTING].price["fixed"] == Decimal("50") + + # Test Case 5: GPU services have specific properties + gpu_services = [ + PricingEntity.INSTANCE_GPU_STANDARD, + PricingEntity.INSTANCE_GPU_PREMIUM, + ] + gpu_result = await pricing_service.get_pricing_for_services(gpu_services) + assert len(gpu_result) == 2 + # Check GPU models are present + standard_tiers = gpu_result[PricingEntity.INSTANCE_GPU_STANDARD].tiers + premium_tiers = gpu_result[PricingEntity.INSTANCE_GPU_PREMIUM].tiers + assert standard_tiers is not None + assert premium_tiers is not None + assert standard_tiers[0].model == "RTX 4000 ADA" + assert premium_tiers[1].model == "H100" + + +@pytest.mark.asyncio +async def test_get_pricing_for_gpu_services(mock_client): + """Test fetching pricing for GPU services.""" + pricing_service = Pricing(mock_client) + + # Test with GPU services + gpu_services = [ + PricingEntity.INSTANCE_GPU_STANDARD, + PricingEntity.INSTANCE_GPU_PREMIUM, + ] + result = await pricing_service.get_pricing_for_services(gpu_services) + + # Check that both GPU services are returned + assert len(result) == 2 + assert PricingEntity.INSTANCE_GPU_STANDARD in result + assert PricingEntity.INSTANCE_GPU_PREMIUM in result + + # Verify GPU standard pricing and details + gpu_standard = result[PricingEntity.INSTANCE_GPU_STANDARD] + compute_unit_price = gpu_standard.price["compute_unit"] + assert isinstance(compute_unit_price, Price) + assert compute_unit_price.payg == Decimal("0.28") + + standard_tiers = gpu_standard.tiers + assert standard_tiers is not None + assert len(standard_tiers) == 5 + assert standard_tiers[0].model == "RTX 4000 ADA" + assert standard_tiers[0].vram == 20480 + + # Verify GPU premium pricing and details + gpu_premium = result[PricingEntity.INSTANCE_GPU_PREMIUM] + premium_compute_price = gpu_premium.price["compute_unit"] + assert isinstance(premium_compute_price, Price) + assert premium_compute_price.payg == Decimal("0.56") + + premium_tiers = gpu_premium.tiers + assert premium_tiers is not None + assert len(premium_tiers) == 2 + assert premium_tiers[1].model == "H100" + assert premium_tiers[1].vram == 81920 + + +@pytest.mark.asyncio +async def test_pricing_groups(): + """Test the pricing groups constants.""" + # Check that all pricing entities are covered in PRICING_GROUPS + all_entities = set() + for group_entities in PRICING_GROUPS.values(): + for entity in group_entities: + all_entities.add(entity) + + # All PricingEntity values should be in some group + for entity in PricingEntity: + assert entity in all_entities + + # Check ALL group contains all entities + assert set(PRICING_GROUPS[GroupEntity.ALL]) == set(PricingEntity) + + # Check PAYG_GROUP contains expected entities + assert PricingEntity.INSTANCE in PAYG_GROUP + assert PricingEntity.INSTANCE_CONFIDENTIAL in PAYG_GROUP + assert PricingEntity.INSTANCE_GPU_STANDARD in PAYG_GROUP + assert PricingEntity.INSTANCE_GPU_PREMIUM in PAYG_GROUP From 4eb207c30e5371b5c0efbc22f9a95425fa3958c5 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Thu, 18 Sep 2025 17:09:15 +0200 Subject: [PATCH 101/122] Feat: estimated gas based on tx (#209) * fix: use estimate_gas in can_transact for accurate limit checks * feat: implement _simulate_create_tx_flow to estimate gas for specific Superfluid transactions * feat: add can_transact check in _execute_operation_with_account to prevent underfunded tx * fix: remove unnecessary can_start_flow check in create_flow * fix: should use MIN_ETH_BALANCE_WEI instead of MIN_ETH_BALANCE * fix: ensure _provider exist while using can_transact * fix: return false if error got returned while trying to estimate the gas cost in _simulate_create_tx_flow * Fix: gas estimations + error handling for gas / Aleph token * fix: linting error `hatch` * Feature: gas estimations unit test * fix: linting * fix: mypy cannot assign method --- src/aleph/sdk/chains/ethereum.py | 34 +++-- src/aleph/sdk/connectors/superfluid.py | 43 +++++-- tests/unit/test_gas_estimation.py | 168 +++++++++++++++++++++++++ 3 files changed, 225 insertions(+), 20 deletions(-) create mode 100644 tests/unit/test_gas_estimation.py diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index 8815825e..02bebd8f 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -11,6 +11,7 @@ from eth_keys.exceptions import BadSignature as EthBadSignatureError from superfluid import Web3FlowInfo from web3 import Web3 +from web3.exceptions import ContractCustomError from web3.middleware import ExtraDataToPOAMiddleware from web3.types import TxParams, TxReceipt @@ -21,7 +22,6 @@ from ..connectors.superfluid import Superfluid from ..evm_utils import ( BALANCEOF_ABI, - MIN_ETH_BALANCE, MIN_ETH_BALANCE_WEI, FlowUpdate, from_wei_token, @@ -119,14 +119,34 @@ def connect_chain(self, chain: Optional[Chain] = None): def switch_chain(self, chain: Optional[Chain] = None): self.connect_chain(chain=chain) - def can_transact(self, block=True) -> bool: - balance = self.get_eth_balance() - valid = balance > MIN_ETH_BALANCE_WEI if self.chain else False + def can_transact(self, tx: TxParams, block=True) -> bool: + balance_wei = self.get_eth_balance() + try: + assert self._provider is not None + + estimated_gas = self._provider.eth.estimate_gas(tx) + + gas_price = tx.get("gasPrice", self._provider.eth.gas_price) + + if "maxFeePerGas" in tx: + max_fee = tx["maxFeePerGas"] + total_fee_wei = estimated_gas * max_fee + else: + total_fee_wei = estimated_gas * gas_price + + total_fee_wei = int(total_fee_wei * 1.2) + + except ContractCustomError: + total_fee_wei = MIN_ETH_BALANCE_WEI # Fallback if estimation fails + + required_fee_wei = total_fee_wei + (tx.get("value", 0)) + + valid = balance_wei > required_fee_wei if self.chain else False if not valid and block: raise InsufficientFundsError( token_type=TokenType.GAS, - required_funds=MIN_ETH_BALANCE, - available_funds=float(from_wei_token(balance)), + required_funds=float(from_wei_token(required_fee_wei)), + available_funds=float(from_wei_token(balance_wei)), ) return valid @@ -136,7 +156,6 @@ async def _sign_and_send_transaction(self, tx_params: TxParams) -> str: @param tx_params - Transaction parameters @returns - str - Transaction hash """ - self.can_transact() def sign_and_send() -> TxReceipt: if self._provider is None: @@ -144,6 +163,7 @@ def sign_and_send() -> TxReceipt: signed_tx = self._provider.eth.account.sign_transaction( tx_params, self._account.key ) + tx_hash = self._provider.eth.send_raw_transaction(signed_tx.raw_transaction) tx_receipt = self._provider.eth.wait_for_transaction_receipt( tx_hash, settings.TX_TIMEOUT diff --git a/src/aleph/sdk/connectors/superfluid.py b/src/aleph/sdk/connectors/superfluid.py index 76bbf907..cd971b74 100644 --- a/src/aleph/sdk/connectors/superfluid.py +++ b/src/aleph/sdk/connectors/superfluid.py @@ -5,6 +5,7 @@ from eth_utils import to_normalized_address from superfluid import CFA_V1, Operation, Web3FlowInfo +from web3.exceptions import ContractCustomError from aleph.sdk.evm_utils import ( FlowUpdate, @@ -37,6 +38,32 @@ def __init__(self, account: ETHAccount): self.super_token = str(get_super_token_address(account.chain)) self.cfaV1Instance = CFA_V1(account.rpc, account.chain_id) + def _simulate_create_tx_flow(self, flow: Decimal, block=True) -> bool: + try: + operation = self.cfaV1Instance.create_flow( + sender=self.normalized_address, + receiver=to_normalized_address( + "0x0000000000000000000000000000000000000001" + ), # Fake Address we do not sign/send this transactions + super_token=self.super_token, + flow_rate=int(to_wei_token(flow)), + ) + + populated_transaction = operation._get_populated_transaction_request( + self.account.rpc, self.account._account.key + ) + return self.account.can_transact(tx=populated_transaction, block=block) + except ContractCustomError as e: + if getattr(e, "data", None) == "0xea76c9b3": + balance = self.account.get_super_token_balance() + MIN_FLOW_4H = to_wei_token(flow) * Decimal(self.MIN_4_HOURS) + raise InsufficientFundsError( + token_type=TokenType.ALEPH, + required_funds=float(from_wei_token(MIN_FLOW_4H)), + available_funds=float(from_wei_token(balance)), + ) + return False + async def _execute_operation_with_account(self, operation: Operation) -> str: """ Execute an operation using the provided ETHAccount @@ -46,26 +73,16 @@ async def _execute_operation_with_account(self, operation: Operation) -> str: populated_transaction = operation._get_populated_transaction_request( self.account.rpc, self.account._account.key ) + self.account.can_transact(tx=populated_transaction) + return await self.account._sign_and_send_transaction(populated_transaction) def can_start_flow(self, flow: Decimal, block=True) -> bool: """Check if the account has enough funds to start a Superfluid flow of the given size.""" - valid = False - if self.account.can_transact(block=block): - balance = self.account.get_super_token_balance() - MIN_FLOW_4H = to_wei_token(flow) * Decimal(self.MIN_4_HOURS) - valid = balance > MIN_FLOW_4H - if not valid and block: - raise InsufficientFundsError( - token_type=TokenType.ALEPH, - required_funds=float(from_wei_token(MIN_FLOW_4H)), - available_funds=float(from_wei_token(balance)), - ) - return valid + return self._simulate_create_tx_flow(flow=flow, block=block) async def create_flow(self, receiver: str, flow: Decimal) -> str: """Create a Superfluid flow between two addresses.""" - self.can_start_flow(flow) return await self._execute_operation_with_account( operation=self.cfaV1Instance.create_flow( sender=self.normalized_address, diff --git a/tests/unit/test_gas_estimation.py b/tests/unit/test_gas_estimation.py new file mode 100644 index 00000000..abbd8c5c --- /dev/null +++ b/tests/unit/test_gas_estimation.py @@ -0,0 +1,168 @@ +from decimal import Decimal +from unittest.mock import MagicMock, patch + +import pytest +from aleph_message.models import Chain +from web3.exceptions import ContractCustomError +from web3.types import TxParams + +from aleph.sdk.chains.ethereum import ETHAccount +from aleph.sdk.connectors.superfluid import Superfluid +from aleph.sdk.exceptions import InsufficientFundsError +from aleph.sdk.types import TokenType + + +@pytest.fixture +def mock_eth_account(): + private_key = b"\x01" * 32 + account = ETHAccount( + private_key, + chain=Chain.ETH, + ) + account._provider = MagicMock() + account._provider.eth = MagicMock() + account._provider.eth.gas_price = 20_000_000_000 # 20 Gwei + account._provider.eth.estimate_gas = MagicMock( + return_value=100_000 + ) # 100k gas units + + # Mock get_eth_balance to return a specific balance + with patch.object(account, "get_eth_balance", return_value=10**18): # 1 ETH + yield account + + +@pytest.fixture +def mock_superfluid(mock_eth_account): + superfluid = Superfluid(mock_eth_account) + superfluid.cfaV1Instance = MagicMock() + superfluid.cfaV1Instance.create_flow = MagicMock() + superfluid.super_token = "0xsupertokenaddress" + superfluid.normalized_address = "0xsenderaddress" + + # Mock the operation + operation = MagicMock() + operation._get_populated_transaction_request = MagicMock( + return_value={"value": 0, "gas": 100000, "gasPrice": 20_000_000_000} + ) + superfluid.cfaV1Instance.create_flow.return_value = operation + + return superfluid + + +class TestGasEstimation: + def test_can_transact_with_sufficient_funds(self, mock_eth_account): + tx = TxParams({"to": "0xreceiver", "value": 0}) + + # Should pass with 1 ETH balance against ~0.002 ETH gas cost + assert mock_eth_account.can_transact(tx=tx, block=True) is True + + def test_can_transact_with_insufficient_funds(self, mock_eth_account): + tx = TxParams({"to": "0xreceiver", "value": 0}) + + # Set balance to almost zero + with patch.object(mock_eth_account, "get_eth_balance", return_value=1000): + # Should raise InsufficientFundsError + with pytest.raises(InsufficientFundsError) as exc_info: + mock_eth_account.can_transact(tx=tx, block=True) + + assert exc_info.value.token_type == TokenType.GAS + + def test_can_transact_with_legacy_gas_price(self, mock_eth_account): + tx = TxParams( + {"to": "0xreceiver", "value": 0, "gasPrice": 30_000_000_000} # 30 Gwei + ) + + # Should use the tx's gasPrice instead of default + mock_eth_account.can_transact(tx=tx, block=True) + + # It should have used the tx's gasPrice for calculation + mock_eth_account._provider.eth.estimate_gas.assert_called_once() + + def test_can_transact_with_eip1559_gas(self, mock_eth_account): + tx = TxParams( + {"to": "0xreceiver", "value": 0, "maxFeePerGas": 40_000_000_000} # 40 Gwei + ) + + # Should use the tx's maxFeePerGas + mock_eth_account.can_transact(tx=tx, block=True) + + # It should have used the tx's maxFeePerGas for calculation + mock_eth_account._provider.eth.estimate_gas.assert_called_once() + + def test_can_transact_with_contract_error(self, mock_eth_account): + tx = TxParams({"to": "0xreceiver", "value": 0}) + + # Make estimate_gas throw a ContractCustomError + mock_eth_account._provider.eth.estimate_gas.side_effect = ContractCustomError( + "error" + ) + + # Should fallback to MIN_ETH_BALANCE_WEI + mock_eth_account.can_transact(tx=tx, block=True) + + # It should have called estimate_gas + mock_eth_account._provider.eth.estimate_gas.assert_called_once() + + +class TestSuperfluidFlowEstimation: + @pytest.mark.asyncio + async def test_simulate_create_tx_flow_success( + self, mock_superfluid, mock_eth_account + ): + # Patch the can_transact method to simulate a successful transaction + with patch.object(mock_eth_account, "can_transact", return_value=True): + result = mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) + assert result is True + + # Verify the flow was correctly simulated but not executed + mock_superfluid.cfaV1Instance.create_flow.assert_called_once() + assert "0x0000000000000000000000000000000000000001" in str( + mock_superfluid.cfaV1Instance.create_flow.call_args + ) + + @pytest.mark.asyncio + async def test_simulate_create_tx_flow_contract_error( + self, mock_superfluid, mock_eth_account + ): + # Setup a contract error code for insufficient deposit + error = ContractCustomError("Insufficient deposit") + error.data = "0xea76c9b3" # This is the specific error code checked in the code + + # Mock can_transact to throw the error + with patch.object(mock_eth_account, "can_transact", side_effect=error): + # Also mock get_super_token_balance for the error case + with patch.object( + mock_eth_account, "get_super_token_balance", return_value=0 + ): + # Should raise InsufficientFundsError for ALEPH token + with pytest.raises(InsufficientFundsError) as exc_info: + mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) + + assert exc_info.value.token_type == TokenType.ALEPH + + @pytest.mark.asyncio + async def test_simulate_create_tx_flow_other_error( + self, mock_superfluid, mock_eth_account + ): + # Setup a different contract error code + error = ContractCustomError("Other error") + error.data = "0xsomeothercode" + + # Mock can_transact to throw the error + with patch.object(mock_eth_account, "can_transact", side_effect=error): + # Should return False for other errors + result = mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) + assert result is False + + @pytest.mark.asyncio + async def test_can_start_flow_uses_simulation(self, mock_superfluid): + # Mock _simulate_create_tx_flow to verify it's called + with patch.object( + mock_superfluid, "_simulate_create_tx_flow", return_value=True + ) as mock_simulate: + result = mock_superfluid.can_start_flow(Decimal("0.00000005")) + + assert result is True + mock_simulate.assert_called_once_with( + flow=Decimal("0.00000005"), block=True + ) From bdc8019585ee257c5af9d73362e9eeaa81b5534e Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Thu, 18 Sep 2025 17:09:44 +0200 Subject: [PATCH 102/122] Feature: Voucher integrations (#225) * feat: new field in conf for voucher * feat: Vouchers BaseModel # Conflicts: # src/aleph/sdk/types.py * Feat: voucher integrations * Feat: AuthenticatedVoucher integrations * fix: move fixture / metdata for voucher to conftest.py * fix: add vouchers and authenticated voucher to client * fix: remove debug print * Fix: only mock the get_posts from client and use real client * Fix: linting issue * Fix: import sort issue * refactor: using with patch.objects instead of direcly assign method for mypy * Fix: rename VOUCHER_SENDER to VOUCHER_ORIGIN_ADDRESS * fix: Vouchers import * fix: types.py lint issue * fix: get_stored_content should fetch data when removing --- src/aleph/sdk/client/authenticated_http.py | 3 +- src/aleph/sdk/client/http.py | 3 + .../client/services/authenticated_voucher.py | 62 +++++++ src/aleph/sdk/client/services/voucher.py | 164 ++++++++++++++++++ src/aleph/sdk/conf.py | 6 + src/aleph/sdk/types.py | 27 +++ tests/unit/conftest.py | 62 +++++++ .../services/test_authenticated_voucher.py | 111 ++++++++++++ tests/unit/services/test_voucher.py | 120 +++++++++++++ 9 files changed, 557 insertions(+), 1 deletion(-) create mode 100644 src/aleph/sdk/client/services/authenticated_voucher.py create mode 100644 src/aleph/sdk/client/services/voucher.py create mode 100644 tests/unit/services/test_authenticated_voucher.py create mode 100644 tests/unit/services/test_voucher.py diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index ae4b6b04..4528a5b7 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -39,6 +39,7 @@ from .abstract import AuthenticatedAlephClient from .http import AlephHttpClient from .services.authenticated_port_forwarder import AuthenticatedPortForwarder +from .services.authenticated_voucher import AuthenticatedVoucher logger = logging.getLogger(__name__) @@ -86,7 +87,7 @@ async def __aenter__(self): await super().__aenter__() # Override services with authenticated versions self.port_forwarder = AuthenticatedPortForwarder(self) - + self.voucher = AuthenticatedVoucher(self) return self async def ipfs_push(self, content: Mapping) -> str: diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 723a43bf..3b1bb635 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -39,6 +39,7 @@ from aleph.sdk.client.services.port_forwarder import PortForwarder from aleph.sdk.client.services.pricing import Pricing from aleph.sdk.client.services.scheduler import Scheduler +from aleph.sdk.client.services.voucher import Vouchers from ..conf import settings from ..exceptions import ( @@ -137,6 +138,8 @@ async def __aenter__(self): self.scheduler = Scheduler(self) self.instance = Instance(self) self.pricing = Pricing(self) + self.voucher = Vouchers(self) + return self async def __aexit__(self, exc_type, exc_val, exc_tb): diff --git a/src/aleph/sdk/client/services/authenticated_voucher.py b/src/aleph/sdk/client/services/authenticated_voucher.py new file mode 100644 index 00000000..48d7d73d --- /dev/null +++ b/src/aleph/sdk/client/services/authenticated_voucher.py @@ -0,0 +1,62 @@ +from typing import TYPE_CHECKING, Optional, overload + +from typing_extensions import override + +from aleph.sdk.types import Voucher + +from .voucher import Vouchers + +if TYPE_CHECKING: + from aleph.sdk.client.abstract import AuthenticatedAlephClient + + +class AuthenticatedVoucher(Vouchers): + """ + This service is same logic than Vouchers but allow to don't pass address + to use account address + """ + + def __init__(self, client: "AuthenticatedAlephClient"): + super().__init__(client) + + @overload + def _resolve_address(self, address: str) -> str: ... + + @overload + def _resolve_address(self, address: None) -> str: ... + + @override + def _resolve_address(self, address: Optional[str] = None) -> str: + """ + Resolve the address to use. Prefer the provided address, fallback to account. + """ + if address: + return address + if self._client.account: + return self._client.account.get_address() + + raise ValueError("No address provided and no account configured") + + @override + async def get_vouchers(self, address: Optional[str] = None) -> list[Voucher]: + """ + Retrieve all vouchers for the account / specific address, across EVM and Solana chains. + """ + address = address or self._client.account.get_address() + return await super().get_vouchers(address=address) + + @override + async def get_evm_vouchers(self, address: Optional[str] = None) -> list[Voucher]: + """ + Retrieve vouchers specific to EVM chains for a specific address. + """ + address = address or self._client.account.get_address() + return await super().get_evm_vouchers(address=address) + + @override + async def get_solana_vouchers(self, address: Optional[str] = None) -> list[Voucher]: + """ + Fetch Solana vouchers for a specific address. + """ + address = address or self._client.account.get_address() + return await super().get_solana_vouchers(address=address) diff --git a/src/aleph/sdk/client/services/voucher.py b/src/aleph/sdk/client/services/voucher.py new file mode 100644 index 00000000..eef351c4 --- /dev/null +++ b/src/aleph/sdk/client/services/voucher.py @@ -0,0 +1,164 @@ +from typing import Optional + +import aiohttp +from aiohttp import ClientResponseError +from aleph_message.models import Chain + +from aleph.sdk.conf import settings +from aleph.sdk.query.filters import PostFilter +from aleph.sdk.query.responses import Post, PostsResponse +from aleph.sdk.types import Voucher, VoucherMetadata + + +class Vouchers: + """ + This service is made to fetch voucher (SOL / EVM) + """ + + def __init__(self, client): + self._client = client + + # Utils + def _resolve_address(self, address: str) -> str: + return address # Not Authenticated client so address need to be given + + async def _fetch_voucher_update(self): + """ + Fetch the latest EVM voucher update (unfiltered). + """ + + post_filter = PostFilter( + types=["vouchers-update"], addresses=[settings.VOUCHER_ORIGIN_ADDRESS] + ) + vouchers_post: PostsResponse = await self._client.get_posts( + post_filter=post_filter, page_size=1 + ) + + if not vouchers_post.posts: + return [] + + message_post: Post = vouchers_post.posts[0] + + nft_vouchers = message_post.content.get("nft_vouchers", {}) + return list(nft_vouchers.items()) # [(voucher_id, voucher_data)] + + async def _fetch_solana_voucher_list(self): + """ + Fetch full Solana voucher registry (unfiltered). + """ + try: + async with aiohttp.ClientSession() as session: + async with session.get(settings.VOUCHER_SOL_REGISTRY) as resp: + resp.raise_for_status() + return await resp.json() + except ClientResponseError: + return {} + + async def fetch_voucher_metadata( + self, metadata_id: str + ) -> Optional[VoucherMetadata]: + """ + Fetch metadata for a given voucher. + """ + url = f"https://claim.twentysix.cloud/sbt/metadata/{metadata_id}.json" + try: + async with aiohttp.ClientSession() as session: + async with session.get(url) as resp: + resp.raise_for_status() + data = await resp.json() + return VoucherMetadata.model_validate(data) + except ClientResponseError: + return None + + async def get_solana_vouchers(self, address: str) -> list[Voucher]: + """ + Fetch Solana vouchers for a specific address. + """ + resolved_address = self._resolve_address(address=address) + vouchers: list[Voucher] = [] + + registry_data = await self._fetch_solana_voucher_list() + + claimed_tickets = registry_data.get("claimed_tickets", {}) + batches = registry_data.get("batches", {}) + + for ticket_hash, ticket_data in claimed_tickets.items(): + claimer = ticket_data.get("claimer") + if claimer != resolved_address: + continue + + batch_id = ticket_data.get("batch_id") + metadata_id = None + + if str(batch_id) in batches: + metadata_id = batches[str(batch_id)].get("metadata_id") + + if metadata_id: + metadata = await self.fetch_voucher_metadata(metadata_id) + if metadata: + voucher = Voucher( + id=ticket_hash, + metadata_id=metadata_id, + name=metadata.name, + description=metadata.description, + external_url=metadata.external_url, + image=metadata.image, + icon=metadata.icon, + attributes=metadata.attributes, + ) + vouchers.append(voucher) + + return vouchers + + async def get_evm_vouchers(self, address: str) -> list[Voucher]: + """ + Retrieve vouchers specific to EVM chains for a specific address. + """ + resolved_address = self._resolve_address(address=address) + vouchers: list[Voucher] = [] + + nft_vouchers = await self._fetch_voucher_update() + for voucher_id, voucher_data in nft_vouchers: + if voucher_data.get("claimer") != resolved_address: + continue + + metadata_id = voucher_data.get("metadata_id") + metadata = await self.fetch_voucher_metadata(metadata_id) + if not metadata: + continue + + voucher = Voucher( + id=voucher_id, + metadata_id=metadata_id, + name=metadata.name, + description=metadata.description, + external_url=metadata.external_url, + image=metadata.image, + icon=metadata.icon, + attributes=metadata.attributes, + ) + vouchers.append(voucher) + return vouchers + + async def fetch_vouchers_by_chain(self, chain: Chain, address: str): + if chain == Chain.SOL: + return await self.get_solana_vouchers(address=address) + else: + return await self.get_evm_vouchers(address=address) + + async def get_vouchers(self, address: str) -> list[Voucher]: + """ + Retrieve all vouchers for the account / specific adress, across EVM and Solana chains. + """ + vouchers = [] + + # Get EVM vouchers + if address.startswith("0x") and len(address) == 42: + evm_vouchers = await self.get_evm_vouchers(address=address) + vouchers.extend(evm_vouchers) + else: + # Get Solana vouchers + solana_vouchers = await self.get_solana_vouchers(address=address) + vouchers.extend(solana_vouchers) + + return vouchers diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index b2294274..02e8ec85 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -92,6 +92,12 @@ class Settings(BaseSettings): ) SCHEDULER_URL: ClassVar[str] = "https://scheduler.api.aleph.cloud/" + VOUCHER_METDATA_TEMPLATE_URL: str = ( + "https://claim.twentysix.cloud/sbt/metadata/{}.json" + ) + VOUCHER_SOL_REGISTRY: str = "https://api.claim.twentysix.cloud/v1/registry/sol" + VOUCHER_ORIGIN_ADDRESS: str = "0xB34f25f2c935bCA437C061547eA12851d719dEFb" + # Web3Provider settings TOKEN_DECIMALS: ClassVar[int] = 18 TX_TIMEOUT: ClassVar[int] = 60 * 3 diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 83b17151..8ece7ede 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -1,5 +1,6 @@ from abc import abstractmethod from datetime import datetime +from decimal import Decimal from enum import Enum from typing import ( Any, @@ -341,3 +342,29 @@ def items(self): def get(self, key: str, default=None): return getattr(self, key, default) + + +class VoucherAttribute(BaseModel): + value: Union[str, Decimal] + trait_type: str = Field(..., alias="trait_type") + display_type: Optional[str] = Field(None, alias="display_type") + + +class VoucherMetadata(BaseModel): + name: str + description: str + external_url: str + image: str + icon: str + attributes: list[VoucherAttribute] + + +class Voucher(BaseModel): + id: str + metadata_id: str + name: str + description: str + external_url: str + image: str + icon: str + attributes: list[VoucherAttribute] diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 3ad0a4ad..5086703b 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -306,3 +306,65 @@ def post(self, *_args, **_kwargs): client._http_session = http_session return client + + +@pytest.fixture +def make_mock_aiohttp_session(): + def _make(mocked_json_response): + mock_response = AsyncMock() + mock_response.json.return_value = mocked_json_response + mock_response.raise_for_status.return_value = None + + session = MagicMock() + + get_cm = AsyncMock() + get_cm.__aenter__.return_value = mock_response + session.get.return_value = get_cm + + session_cm = AsyncMock() + session_cm.__aenter__.return_value = session + return session_cm + + return _make + + +# Constants needed for voucher tests +MOCK_ADDRESS = "0x1234567890123456789012345678901234567890" +MOCK_SOLANA_ADDRESS = "abcdefghijklmnopqrstuvwxyz123456789" +MOCK_METADATA_ID = "metadata123" +MOCK_VOUCHER_ID = "voucher123" +MOCK_METADATA = { + "name": "Test Voucher", + "description": "A test voucher", + "external_url": "https://example.com", + "image": "https://example.com/image.png", + "icon": "https://example.com/icon.png", + "attributes": [ + {"trait_type": "Test Trait", "value": "Test Value"}, + {"trait_type": "Numeric Trait", "value": "123", "display_type": "number"}, + ], +} + +MOCK_EVM_VOUCHER_DATA = [ + (MOCK_VOUCHER_ID, {"claimer": MOCK_ADDRESS, "metadata_id": MOCK_METADATA_ID}) +] + +MOCK_SOLANA_REGISTRY = { + "claimed_tickets": { + "solticket123": {"claimer": MOCK_SOLANA_ADDRESS, "batch_id": "batch123"} + }, + "batches": {"batch123": {"metadata_id": MOCK_METADATA_ID}}, +} + + +@pytest.fixture +def mock_post_response(): + mock_post = MagicMock() + mock_post.content = { + "nft_vouchers": { + MOCK_VOUCHER_ID: {"claimer": MOCK_ADDRESS, "metadata_id": MOCK_METADATA_ID} + } + } + posts_response = MagicMock() + posts_response.posts = [mock_post] + return posts_response diff --git a/tests/unit/services/test_authenticated_voucher.py b/tests/unit/services/test_authenticated_voucher.py new file mode 100644 index 00000000..bb83ea74 --- /dev/null +++ b/tests/unit/services/test_authenticated_voucher.py @@ -0,0 +1,111 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest + +from aleph.sdk.client.services.authenticated_voucher import AuthenticatedVoucher + +from ..conftest import ( + MOCK_ADDRESS, + MOCK_METADATA, + MOCK_SOLANA_ADDRESS, + MOCK_SOLANA_REGISTRY, + MOCK_VOUCHER_ID, +) + + +def test_resolve_address_with_argument(): + client = MagicMock() + service = AuthenticatedVoucher(client=client) + assert service._resolve_address(address="custom-address") == "custom-address" + + +def test_resolve_address_with_account_fallback(): + mock_account = MagicMock() + mock_account.get_address.return_value = MOCK_ADDRESS + + client = MagicMock() + client.account = mock_account + + service = AuthenticatedVoucher(client=client) + assert service._resolve_address(address=None) == MOCK_ADDRESS + mock_account.get_address.assert_called_once() + + +def test_resolve_address_no_address_no_account(): + client = MagicMock() + client.account = None + + service = AuthenticatedVoucher(client=client) + + with pytest.raises( + ValueError, match="No address provided and no account configured" + ): + service._resolve_address(address=None) + + +@pytest.mark.asyncio +async def test_get_vouchers_fallback_to_account( + make_mock_aiohttp_session, mock_post_response +): + mock_account = MagicMock() + mock_account.get_address.return_value = MOCK_ADDRESS + + mock_client = MagicMock() + mock_client.account = mock_account + mock_client.get_posts = AsyncMock(return_value=mock_post_response) + + service = AuthenticatedVoucher(client=mock_client) + + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + + with patch("aiohttp.ClientSession", return_value=metadata_session): + vouchers = await service.get_vouchers() + + assert len(vouchers) == 1 + assert vouchers[0].name == MOCK_METADATA["name"] + mock_account.get_address.assert_called_once() + + +@pytest.mark.asyncio +async def test_get_evm_vouchers_fallback_to_account( + make_mock_aiohttp_session, mock_post_response +): + mock_account = MagicMock() + mock_account.get_address.return_value = MOCK_ADDRESS + + mock_client = MagicMock() + mock_client.account = mock_account + mock_client.get_posts = AsyncMock(return_value=mock_post_response) + + service = AuthenticatedVoucher(client=mock_client) + + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + + with patch("aiohttp.ClientSession", return_value=metadata_session): + vouchers = await service.get_evm_vouchers() + + assert len(vouchers) == 1 + assert vouchers[0].id == MOCK_VOUCHER_ID + + +@pytest.mark.asyncio +async def test_get_solana_vouchers_fallback_to_account(make_mock_aiohttp_session): + mock_account = MagicMock() + mock_account.get_address.return_value = MOCK_SOLANA_ADDRESS + + mock_client = MagicMock() + mock_client.account = mock_account + + service = AuthenticatedVoucher(client=mock_client) + + registry_session = make_mock_aiohttp_session(MOCK_SOLANA_REGISTRY) + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + + with patch( + "aiohttp.ClientSession", side_effect=[registry_session, metadata_session] + ): + vouchers = await service.get_solana_vouchers() + + assert len(vouchers) == 1 + assert vouchers[0].id == "solticket123" + assert vouchers[0].name == MOCK_METADATA["name"] diff --git a/tests/unit/services/test_voucher.py b/tests/unit/services/test_voucher.py new file mode 100644 index 00000000..7519ad19 --- /dev/null +++ b/tests/unit/services/test_voucher.py @@ -0,0 +1,120 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from aleph_message.models import Chain + +from aleph.sdk.client.http import AlephHttpClient +from aleph.sdk.client.services.voucher import Vouchers + +from ..conftest import ( + MOCK_ADDRESS, + MOCK_METADATA, + MOCK_SOLANA_ADDRESS, + MOCK_SOLANA_REGISTRY, + MOCK_VOUCHER_ID, +) + + +@pytest.mark.asyncio +async def test_get_evm_vouchers(mock_post_response, make_mock_aiohttp_session): + client = AlephHttpClient(api_server="http://localhost") + + # Patch only the get_posts who is used to fetch voucher update for EVM + with patch.object(client, "get_posts", AsyncMock(return_value=mock_post_response)): + voucher_service = Vouchers(client=client) + + session = make_mock_aiohttp_session(MOCK_METADATA) + + # Here we patch the client sessions who gonna fetch the metdata of the NFT + with patch("aiohttp.ClientSession", return_value=session): + vouchers = await voucher_service.get_evm_vouchers(MOCK_ADDRESS) + + assert len(vouchers) == 1 + assert vouchers[0].id == MOCK_VOUCHER_ID + assert vouchers[0].name == MOCK_METADATA["name"] + + +@pytest.mark.asyncio +async def test_get_solana_vouchers(make_mock_aiohttp_session): + client = AlephHttpClient(api_server="http://localhost") + voucher_service = Vouchers(client=client) + + registry_session = make_mock_aiohttp_session(MOCK_SOLANA_REGISTRY) + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + + # Here we patch the fetch of the registry made on + # https://api.claim.twentysix.cloud/v1/registry/solanna + # and we also patch the fetch of the metadata + # https://claim.twentysix.cloud/sbt/metadata/{}.json + with patch( + "aiohttp.ClientSession", side_effect=[registry_session, metadata_session] + ): + vouchers = await voucher_service.get_solana_vouchers(MOCK_SOLANA_ADDRESS) + + assert len(vouchers) == 1 + assert vouchers[0].id == "solticket123" + assert vouchers[0].name == MOCK_METADATA["name"] + + +@pytest.mark.asyncio +async def test_fetch_vouchers_by_chain_for_evm( + mock_post_response, make_mock_aiohttp_session +): + client = AlephHttpClient(api_server="http://localhost") + with patch.object(client, "get_posts", AsyncMock(return_value=mock_post_response)): + voucher_service = Vouchers(client=client) + + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + with patch("aiohttp.ClientSession", return_value=metadata_session): + vouchers = await voucher_service.fetch_vouchers_by_chain( + Chain.ETH, MOCK_ADDRESS + ) + + assert len(vouchers) == 1 + assert vouchers[0].id == "voucher123" + + +@pytest.mark.asyncio +async def test_fetch_vouchers_by_chain_for_solana(make_mock_aiohttp_session): + mock_client = MagicMock() + voucher_service = Vouchers(client=mock_client) + + registry_session = make_mock_aiohttp_session(MOCK_SOLANA_REGISTRY) + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + + with patch( + "aiohttp.ClientSession", side_effect=[registry_session, metadata_session] + ): + vouchers = await voucher_service.fetch_vouchers_by_chain( + Chain.SOL, MOCK_SOLANA_ADDRESS + ) + + assert len(vouchers) == 1 + assert vouchers[0].id == "solticket123" + + +@pytest.mark.asyncio +async def test_get_vouchers_detects_chain( + make_mock_aiohttp_session, mock_post_response +): + client = AlephHttpClient(api_server="http://localhost") + with patch.object(client, "get_posts", AsyncMock(return_value=mock_post_response)): + voucher_service = Vouchers(client=client) + + # EVM + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + with patch("aiohttp.ClientSession", return_value=metadata_session): + vouchers = await voucher_service.get_vouchers(MOCK_ADDRESS) + assert len(vouchers) == 1 + assert vouchers[0].id == "voucher123" + + # Solana + registry_session = make_mock_aiohttp_session(MOCK_SOLANA_REGISTRY) + metadata_session = make_mock_aiohttp_session(MOCK_METADATA) + + with patch( + "aiohttp.ClientSession", side_effect=[registry_session, metadata_session] + ): + vouchers = await voucher_service.get_vouchers(MOCK_SOLANA_ADDRESS) + assert len(vouchers) == 1 + assert vouchers[0].id == "solticket123" From d96a6a8b5c58ec47f7586a171ce0108440d4a63f Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Thu, 18 Sep 2025 17:27:17 +0200 Subject: [PATCH 103/122] Feature: Credis System (#233) * feat: pricing services client * fix: utils func to calculate compute unit as pyaleph do * feat: GPU utils func to get currents state of gpu on network * feature: new utils extract_valid_eth_address * Refactor: CrnList to use DictLikeModel instead of dit and apply logic for GPU / Fetch crn on it * Feature: Response and filter for credits endpoints * Feature: new method to call api/v0/addresses/{address}/credit_balance and /api/v0/credit_balances * Feature: handle credits in pricing service * Fix: add credits field to mocked pricing aggregate * feature: new unit test for credits * Feature: new CREDIT element on TokenType enum * Missing credits for program persistent mocked aggregate * Feature: messages statues filter * Fix: ensure we got processed or REMOVING status when fetching for instance * Fix: remove CreditsFilter & replace it with new balance filter * Feature (replacing credit_balances): Credits history * Refactor: test_credits.py to verify that good network request are made instead of just verify the mocked data is well return * Feature: missing get_balances from sdk (with new field) * fix: linting issue * fix: remove unused `math` import * fix: removed useless assert * fix: remove api /credit_balance does not exist anymore --- src/aleph/sdk/client/http.py | 45 ++++++++++- src/aleph/sdk/client/services/instance.py | 1 + src/aleph/sdk/client/services/pricing.py | 1 + src/aleph/sdk/query/filters.py | 37 ++++++++- src/aleph/sdk/query/responses.py | 34 ++++++++ src/aleph/sdk/types.py | 1 + tests/unit/services/pricing_aggregate.json | 39 ++++++---- tests/unit/test_balance.py | 39 ++++++++++ tests/unit/test_credits.py | 90 ++++++++++++++++++++++ 9 files changed, 271 insertions(+), 16 deletions(-) create mode 100644 tests/unit/test_balance.py create mode 100644 tests/unit/test_credits.py diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 3b1bb635..f8eb674b 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -50,8 +50,15 @@ RemovedMessageError, ResourceNotFoundError, ) -from ..query.filters import MessageFilter, PostFilter -from ..query.responses import MessagesResponse, Post, PostsResponse, PriceResponse +from ..query.filters import BalanceFilter, MessageFilter, PostFilter +from ..query.responses import ( + BalanceResponse, + CreditsHistoryResponse, + MessagesResponse, + Post, + PostsResponse, + PriceResponse, +) from ..types import GenericMessage, StoredContent from ..utils import ( Writable, @@ -596,3 +603,37 @@ async def get_stored_content( if result else StoredContent(error=resp, filename=None, hash=None, url=None) ) + + async def get_credit_history( + self, + address: str, + page_size: int = 200, + page: int = 1, + ) -> CreditsHistoryResponse: + """Return List of credits balance for all addresses""" + + params = { + "page": str(page), + "pagination": str(page_size), + } + + async with self.http_session.get( + f"/api/v0/addresses/{address}/credit_history", params=params + ) as resp: + resp.raise_for_status() + result = await resp.json() + return CreditsHistoryResponse.model_validate(result) + + async def get_balances( + self, + address: str, + filter: Optional[BalanceFilter] = None, + ) -> BalanceResponse: + + async with self.http_session.get( + f"/api/v0/addresses/{address}/balance", + params=filter.as_http_params() if filter else None, + ) as resp: + resp.raise_for_status() + result = await resp.json() + return BalanceResponse.model_validate(result) diff --git a/src/aleph/sdk/client/services/instance.py b/src/aleph/sdk/client/services/instance.py index dbff386f..034ee4b9 100644 --- a/src/aleph/sdk/client/services/instance.py +++ b/src/aleph/sdk/client/services/instance.py @@ -77,6 +77,7 @@ async def get_instances(self, address: str) -> List[InstanceMessage]: message_filter=MessageFilter( message_types=[MessageType.instance], addresses=[address], + message_statuses=[MessageStatus.PROCESSED, MessageStatus.REMOVING], ), page_size=100, ) diff --git a/src/aleph/sdk/client/services/pricing.py b/src/aleph/sdk/client/services/pricing.py index acf7c214..9c19eb0e 100644 --- a/src/aleph/sdk/client/services/pricing.py +++ b/src/aleph/sdk/client/services/pricing.py @@ -40,6 +40,7 @@ class Price(BaseModel): payg: Optional[Decimal] = None holding: Optional[Decimal] = None fixed: Optional[Decimal] = None + credit: Optional[Decimal] = None class ComputeUnit(BaseModel): diff --git a/src/aleph/sdk/query/filters.py b/src/aleph/sdk/query/filters.py index 4caee5f5..18f8b3f7 100644 --- a/src/aleph/sdk/query/filters.py +++ b/src/aleph/sdk/query/filters.py @@ -2,7 +2,7 @@ from enum import Enum from typing import Dict, Iterable, Optional, Union -from aleph_message.models import MessageType +from aleph_message.models import Chain, MessageType from ..utils import _date_field_to_timestamp, enum_as_str, serialize_list @@ -56,6 +56,7 @@ class MessageFilter: def __init__( self, message_types: Optional[Iterable[MessageType]] = None, + message_statuses: Optional[Iterable[str]] = None, content_types: Optional[Iterable[str]] = None, content_keys: Optional[Iterable[str]] = None, refs: Optional[Iterable[str]] = None, @@ -82,6 +83,7 @@ def __init__( self.end_date = end_date self.sort_by = sort_by self.sort_order = sort_order + self.message_statuses = message_statuses def as_http_params(self) -> Dict[str, str]: """Convert the filters into a dict that can be used by an `aiohttp` client @@ -95,6 +97,7 @@ def as_http_params(self) -> Dict[str, str]: else None ), "contentTypes": serialize_list(self.content_types), + "message_statuses": serialize_list(self.message_statuses), "contentKeys": serialize_list(self.content_keys), "refs": serialize_list(self.refs), "addresses": serialize_list(self.addresses), @@ -193,3 +196,35 @@ def as_http_params(self) -> Dict[str, str]: result[key] = value return result + + +class BalanceFilter: + """ + A collection of filters that can be applied on Balance queries. + """ + + chain: Optional[Chain] + + def __init__( + self, + chain: Optional[Chain] = None, + ): + self.chain = chain + + def as_http_params(self) -> Dict[str, str]: + """Convert the filters into a dict that can be used by an `aiohttp` client + as `params` to build the HTTP query string. + """ + + partial_result = {"chain": enum_as_str(self.chain)} + + # Ensure all values are strings. + result: Dict[str, str] = {} + + # Drop empty values + for key, value in partial_result.items(): + if value: + assert isinstance(value, str), f"Value must be a string: `{value}`" + result[key] = value + + return result diff --git a/src/aleph/sdk/query/responses.py b/src/aleph/sdk/query/responses.py index 277a1bea..3872a3a2 100644 --- a/src/aleph/sdk/query/responses.py +++ b/src/aleph/sdk/query/responses.py @@ -1,5 +1,7 @@ from __future__ import annotations +import datetime as dt +from decimal import Decimal from typing import Any, Dict, List, Optional, Union from aleph_message.models import ( @@ -79,3 +81,35 @@ class PriceResponse(BaseModel): required_tokens: float payment_type: str + + +class CreditsHistoryResponse(PaginationResponse): + """Response from an aleph.im node API on the path /api/v0/credits""" + + address: str + credit_balances: List[CreditHistoryResponseItem] + pagination_item: str = "credit_history" + + +class CreditHistoryResponseItem(BaseModel): + amount: int + ratio: Optional[Decimal] = None + tx_hash: Optional[str] = None + token: Optional[str] = None + chain: Optional[str] = None + provider: Optional[str] = None + origin: Optional[str] = None + origin_ref: Optional[str] = None + payment_method: Optional[str] = None + credit_ref: str + credit_index: int + expiration_date: Optional[dt.datetime] = None + message_timestamp: dt.datetime + + +class BalanceResponse(BaseModel): + address: str + balance: Decimal + details: Optional[Dict[str, Decimal]] = None + locked_amount: Decimal + credit_balance: int = 0 diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 8ece7ede..e76aedbc 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -120,6 +120,7 @@ class TokenType(str, Enum): GAS = "GAS" ALEPH = "ALEPH" + CREDIT = "CREDIT" # Scheduler diff --git a/tests/unit/services/pricing_aggregate.json b/tests/unit/services/pricing_aggregate.json index 2da0dbb8..70f747ef 100644 --- a/tests/unit/services/pricing_aggregate.json +++ b/tests/unit/services/pricing_aggregate.json @@ -6,11 +6,13 @@ "price": { "storage": { "payg": "0.000000977", - "holding": "0.05" + "holding": "0.05", + "credit": "0.000000977" }, "compute_unit": { "payg": "0.011", - "holding": "200" + "holding": "200", + "credit": "0.011" } }, "tiers": [ @@ -48,7 +50,8 @@ "storage": { "price": { "storage": { - "holding": "0.333333333" + "holding": "0.333333333", + "credit": "0.333333333" } } }, @@ -56,11 +59,13 @@ "price": { "storage": { "payg": "0.000000977", - "holding": "0.05" + "holding": "0.05", + "credit": "0.000000977" }, "compute_unit": { "payg": "0.055", - "holding": "1000" + "holding": "1000", + "credit": "0.055" } }, "tiers": [ @@ -107,11 +112,13 @@ "price": { "storage": { "payg": "0.000000977", - "holding": "0.05" + "holding": "0.05", + "credit": "0.000000977" }, "compute_unit": { "payg": "0.055", - "holding": "1000" + "holding": "1000", + "credit": "0.055" } }, "tiers": [ @@ -149,10 +156,12 @@ "instance_gpu_premium": { "price": { "storage": { - "payg": "0.000000977" + "payg": "0.000000977", + "credit": "0.000000977" }, "compute_unit": { - "payg": "0.56" + "payg": "0.56", + "credit": "0.56" } }, "tiers": [ @@ -179,11 +188,13 @@ "price": { "storage": { "payg": "0.000000977", - "holding": "0.05" + "holding": "0.05", + "credit": "0.000000977" }, "compute_unit": { "payg": "0.11", - "holding": "2000" + "holding": "2000", + "credit": "0.11" } }, "tiers": [ @@ -221,10 +232,12 @@ "instance_gpu_standard": { "price": { "storage": { - "payg": "0.000000977" + "payg": "0.000000977", + "credit": "0.000000977" }, "compute_unit": { - "payg": "0.28" + "payg": "0.28", + "credit": "0.28" } }, "tiers": [ diff --git a/tests/unit/test_balance.py b/tests/unit/test_balance.py new file mode 100644 index 00000000..793baa7d --- /dev/null +++ b/tests/unit/test_balance.py @@ -0,0 +1,39 @@ +from unittest.mock import patch + +import pytest + +from aleph.sdk.query.responses import BalanceResponse +from tests.unit.conftest import make_mock_get_session + + +@pytest.mark.asyncio +async def test_get_balances(): + """ + Test that the get_balances method returns the correct BalanceResponse + for a specific address when called on the AlephHttpClient. + """ + address = "0xd463495a6FEaC9921FD0C3a595B81E7B2C02B24d" + + balance_data = { + "address": address, + "balance": 351.25, + "details": {"ETH": 100.5, "SOL": 250.75}, + "locked_amount": 50.0, + "credit_balance": 1000, + } + + mock_client = make_mock_get_session(balance_data) + + expected_url = f"/api/v0/addresses/{address}/balance" + # Adding type assertion to handle None case + assert mock_client._http_session is not None + with patch.object( + mock_client._http_session, "get", wraps=mock_client._http_session.get + ) as spy: + async with mock_client: + response = await mock_client.get_balances(address) + + # Verify the response + assert isinstance(response, BalanceResponse) + # Verify the balances command calls the correct URL + spy.assert_called_once_with(expected_url, params=None) diff --git a/tests/unit/test_credits.py b/tests/unit/test_credits.py new file mode 100644 index 00000000..6e4cbb90 --- /dev/null +++ b/tests/unit/test_credits.py @@ -0,0 +1,90 @@ +from unittest.mock import patch + +import pytest + +from aleph.sdk.query.responses import CreditsHistoryResponse +from tests.unit.conftest import make_mock_get_session + + +@pytest.mark.asyncio +async def test_get_credits_history(): + """ + Test credits history commands + """ + address = "0xd463495a6FEaC9921FD0C3a595B81E7B2C02B24d" + + # Mock data for credit history + credit_history_data = { + "address": address, + "credit_balances": [ + { + "amount": 1000, + "ratio": 1.0, + "tx_hash": "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", + "token": "ALEPH", + "chain": "ETH", + "provider": "gateway", + "origin": "purchase", + "origin_ref": "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", + "payment_method": "token", + "credit_ref": "init_credit_1", + "credit_index": 1, + "expiration_date": "2025-12-31T23:59:59Z", + "message_timestamp": "2023-01-01T12:00:00Z", + }, + { + "amount": -100, + "ratio": None, + "tx_hash": None, + "token": None, + "chain": None, + "provider": "node1.aleph.im", + "origin": "vm_usage", + "origin_ref": "vm_instance_123456", + "payment_method": None, + "credit_ref": "vm_consumption_1", + "credit_index": 2, + "expiration_date": None, + "message_timestamp": "2023-01-15T14:30:00Z", + }, + { + "amount": 500, + "ratio": 0.8, + "tx_hash": "0x9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedcba", + "token": "ALEPH", + "chain": "ETH", + "provider": "gateway", + "origin": "purchase", + "origin_ref": "0xfedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210", + "payment_method": "token", + "credit_ref": "add_credit_1", + "credit_index": 3, + "expiration_date": "2026-06-30T23:59:59Z", + "message_timestamp": "2023-02-01T09:15:00Z", + }, + ], + "pagination_page": 1, + "pagination_total": 1, + "pagination_per_page": 200, + "pagination_item": "credit_history", + } + + mock_client = make_mock_get_session(credit_history_data) + + # Test the method with a specific address + expected_url = f"/api/v0/addresses/{address}/credit_history" + # Adding type assertion to handle None case + assert mock_client._http_session is not None + with patch.object( + mock_client._http_session, "get", wraps=mock_client._http_session.get + ) as spy: + async with mock_client: + response = await mock_client.get_credit_history(address) + + # Verify the response + assert isinstance(response, CreditsHistoryResponse) + # Verify the credits history commands call the correct url + spy.assert_called_once_with( + expected_url, params={"page": "1", "pagination": "200"} + ) + assert len(response.credit_balances) == 3 From 07c6bd0d3ceb19aed04d3c28e1fd2c90ac1fb3ec Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Tue, 30 Sep 2025 13:30:12 +0200 Subject: [PATCH 104/122] Fix: `credit_balances` does not exist (#237) * fix: `credit_balances` does not exist anymore replaced by credit_history` * fix: docstring on get_credit_history --- src/aleph/sdk/client/http.py | 2 +- src/aleph/sdk/query/responses.py | 2 +- tests/unit/test_credits.py | 61 ++++++++++++-------------------- 3 files changed, 25 insertions(+), 40 deletions(-) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index f8eb674b..0ce33a7a 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -610,7 +610,7 @@ async def get_credit_history( page_size: int = 200, page: int = 1, ) -> CreditsHistoryResponse: - """Return List of credits balance for all addresses""" + """Return List of credits history for a specific addresses""" params = { "page": str(page), diff --git a/src/aleph/sdk/query/responses.py b/src/aleph/sdk/query/responses.py index 3872a3a2..b5958d47 100644 --- a/src/aleph/sdk/query/responses.py +++ b/src/aleph/sdk/query/responses.py @@ -87,7 +87,7 @@ class CreditsHistoryResponse(PaginationResponse): """Response from an aleph.im node API on the path /api/v0/credits""" address: str - credit_balances: List[CreditHistoryResponseItem] + credit_history: List[CreditHistoryResponseItem] pagination_item: str = "credit_history" diff --git a/tests/unit/test_credits.py b/tests/unit/test_credits.py index 6e4cbb90..fb2e6d95 100644 --- a/tests/unit/test_credits.py +++ b/tests/unit/test_credits.py @@ -16,51 +16,36 @@ async def test_get_credits_history(): # Mock data for credit history credit_history_data = { "address": address, - "credit_balances": [ + "credit_history": [ { - "amount": 1000, - "ratio": 1.0, - "tx_hash": "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef", - "token": "ALEPH", - "chain": "ETH", - "provider": "gateway", - "origin": "purchase", - "origin_ref": "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", - "payment_method": "token", - "credit_ref": "init_credit_1", - "credit_index": 1, - "expiration_date": "2025-12-31T23:59:59Z", - "message_timestamp": "2023-01-01T12:00:00Z", - }, - { - "amount": -100, + "amount": -22, "ratio": None, "tx_hash": None, "token": None, "chain": None, - "provider": "node1.aleph.im", - "origin": "vm_usage", - "origin_ref": "vm_instance_123456", - "payment_method": None, - "credit_ref": "vm_consumption_1", - "credit_index": 2, + "provider": "ALEPH", + "origin": None, + "origin_ref": "212f4825dd30e01f3801cdff1bdf8cd4d1c14ce2d31d695aee429d2ad0dfcba1", + "payment_method": "credit_expense", + "credit_ref": "cd77a7983af168941fd011427c6198b146ccd6f85077e0b593a4e7239d45fb11", + "credit_index": 0, "expiration_date": None, - "message_timestamp": "2023-01-15T14:30:00Z", + "message_timestamp": "2025-09-30T06:57:26.106000Z", }, { - "amount": 500, - "ratio": 0.8, - "tx_hash": "0x9876543210fedcba9876543210fedcba9876543210fedcba9876543210fedcba", - "token": "ALEPH", - "chain": "ETH", - "provider": "gateway", - "origin": "purchase", - "origin_ref": "0xfedcba9876543210fedcba9876543210fedcba9876543210fedcba9876543210", - "payment_method": "token", - "credit_ref": "add_credit_1", - "credit_index": 3, - "expiration_date": "2026-06-30T23:59:59Z", - "message_timestamp": "2023-02-01T09:15:00Z", + "amount": -22, + "ratio": None, + "tx_hash": None, + "token": None, + "chain": None, + "provider": "ALEPH", + "origin": None, + "origin_ref": "36ceb85fb570fc87a6b906dc89df39129a971de96cbc56250553cfb8d49487e3", + "payment_method": "credit_expense", + "credit_ref": "5881c8f813ea186b25a9a20d9bea46e2082c4d61c2b9e7d53bf8a164dc892b73", + "credit_index": 0, + "expiration_date": None, + "message_timestamp": "2025-09-30T02:57:07.673000Z", }, ], "pagination_page": 1, @@ -87,4 +72,4 @@ async def test_get_credits_history(): spy.assert_called_once_with( expected_url, params={"page": "1", "pagination": "200"} ) - assert len(response.credit_balances) == 3 + assert len(response.credit_history) == 2 From 03756d09ce26a7d23ecc6a12b280ba1af51dbcc8 Mon Sep 17 00:00:00 2001 From: nesitor Date: Thu, 2 Oct 2025 12:52:53 +0200 Subject: [PATCH 105/122] Fix: Solved wrong derivation path on message signing from Ledger device. (#238) --- src/aleph/sdk/wallets/ledger/ethereum.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/aleph/sdk/wallets/ledger/ethereum.py b/src/aleph/sdk/wallets/ledger/ethereum.py index 5dc40f03..18712a0a 100644 --- a/src/aleph/sdk/wallets/ledger/ethereum.py +++ b/src/aleph/sdk/wallets/ledger/ethereum.py @@ -68,7 +68,9 @@ async def sign_message(self, message: Dict) -> Dict: # TODO: Check why the code without a wallet uses `encode_defunct`. msghash: bytes = get_verification_buffer(message) - sig: SignedMessage = sign_message(msghash, dongle=self._device) + sig: SignedMessage = sign_message( + msghash, dongle=self._device, sender_path=self._account.path + ) signature: HexStr = sig.signature @@ -77,7 +79,9 @@ async def sign_message(self, message: Dict) -> Dict: async def sign_raw(self, buffer: bytes) -> bytes: """Sign a raw buffer.""" - sig: SignedMessage = sign_message(buffer, dongle=self._device) + sig: SignedMessage = sign_message( + buffer, dongle=self._device, sender_path=self._account.path + ) signature: HexStr = sig.signature return bytes_from_hex(signature) From dbc40a0ff2a06eb1d997c0e3ccb0b99905d4aa6a Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Wed, 8 Oct 2025 13:45:39 +0200 Subject: [PATCH 106/122] Fix: filter on crn list & port-forwarding (#239) * fix: allow None type for ports when they are not set / removed by front * fix: allow all node to be found * fix: get_program_price should now take priority on cost field if it exist * Feature: new field for PriceRespond `cost` instead of filling required_token with cost --- src/aleph/sdk/client/http.py | 9 ++++- src/aleph/sdk/client/services/crn.py | 3 +- src/aleph/sdk/query/responses.py | 3 +- src/aleph/sdk/types.py | 2 +- tests/unit/test_price.py | 52 ++++++++++++++++++++++++++++ 5 files changed, 64 insertions(+), 5 deletions(-) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 0ce33a7a..c1facba1 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -531,7 +531,10 @@ async def get_estimated_price( try: resp.raise_for_status() response_json = await resp.json() + cost = response_json.get("cost", None) + return PriceResponse( + cost=cost, required_tokens=response_json["required_tokens"], payment_type=response_json["payment_type"], ) @@ -543,8 +546,12 @@ async def get_program_price(self, item_hash: str) -> PriceResponse: try: resp.raise_for_status() response_json = await resp.json() + cost = response_json.get("cost", None) + required_tokens = response_json["required_tokens"] + return PriceResponse( - required_tokens=response_json["required_tokens"], + required_tokens=required_tokens, + cost=cost, payment_type=response_json["payment_type"], ) except aiohttp.ClientResponseError as e: diff --git a/src/aleph/sdk/client/services/crn.py b/src/aleph/sdk/client/services/crn.py index e8d57c8c..82cec51b 100644 --- a/src/aleph/sdk/client/services/crn.py +++ b/src/aleph/sdk/client/services/crn.py @@ -230,9 +230,8 @@ async def get_crns_list(self, only_active: bool = True) -> CrnList: dict The parsed JSON response from /crns.json. """ - # We want filter_inactive = (not only_active) # Convert bool to string for the query parameter - filter_inactive_str = str(not only_active).lower() + filter_inactive_str = str(only_active).lower() params = {"filter_inactive": filter_inactive_str} # Create a new session for external domain requests diff --git a/src/aleph/sdk/query/responses.py b/src/aleph/sdk/query/responses.py index b5958d47..6efade14 100644 --- a/src/aleph/sdk/query/responses.py +++ b/src/aleph/sdk/query/responses.py @@ -79,7 +79,8 @@ class MessagesResponse(PaginationResponse): class PriceResponse(BaseModel): """Response from an aleph.im node API on the path /api/v0/price/{item_hash}""" - required_tokens: float + required_tokens: Decimal + cost: Optional[str] = None payment_type: str diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index e76aedbc..b839a14b 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -309,7 +309,7 @@ class Ports(BaseModel): ports: Dict[int, PortFlags] -AllForwarders = RootModel[Dict[ItemHash, Ports]] +AllForwarders = RootModel[Dict[ItemHash, Optional[Ports]]] class DictLikeModel(BaseModel): diff --git a/tests/unit/test_price.py b/tests/unit/test_price.py index e60680f8..f2759193 100644 --- a/tests/unit/test_price.py +++ b/tests/unit/test_price.py @@ -1,3 +1,5 @@ +from decimal import Decimal + import pytest from aleph.sdk.exceptions import InvalidHashError @@ -21,6 +23,56 @@ async def test_get_program_price_valid(): assert response == expected +@pytest.mark.asyncio +async def test_get_program_price_cost_and_required_token(): + """ + Test that the get_program_price method returns the correct PriceResponse + when + 1 ) cost & required_token is here (priority to cost) who is a string that convert to decimal + 2 ) When only required_token is here who is a float that now would be to be convert to decimal + """ + # Case 1 + expected = { + "required_tokens": 0.001527777777777778, + "cost": "0.001527777777777777", + "payment_type": "credit", + } + + # Case 2 + expected_old = { + "required_tokens": 0.001527777777777778, + "payment_type": "credit", + } + + # Expected model using the cost field as the source of truth + expected_model = PriceResponse( + required_tokens=Decimal("0.001527777777777778"), + cost=expected["cost"], + payment_type=expected["payment_type"], + ) + + # Expected model for the old format + expected_model_old = PriceResponse( + required_tokens=Decimal(str(expected_old["required_tokens"])), + payment_type=expected_old["payment_type"], + ) + + mock_session = make_mock_get_session(expected) + mock_session_old = make_mock_get_session(expected_old) + + async with mock_session: + response = await mock_session.get_program_price("cacacacacacaca") + assert str(response.required_tokens) == str(expected_model.required_tokens) + assert response.cost == expected_model.cost + assert response.payment_type == expected_model.payment_type + + async with mock_session_old: + response = await mock_session_old.get_program_price("cacacacacacaca") + assert str(response.required_tokens) == str(expected_model_old.required_tokens) + assert response.cost == expected_model_old.cost + assert response.payment_type == expected_model_old.payment_type + + @pytest.mark.asyncio async def test_get_program_price_invalid(): """ From 681291df17f6ecc668142196dfaffd90e9b19548 Mon Sep 17 00:00:00 2001 From: nesitor Date: Wed, 8 Oct 2025 14:23:38 +0200 Subject: [PATCH 107/122] Upgrade `aleph_message` version to `1.0.5` (#240) * Fix: Upgrade `aleph_message` version to `1.0.5`. * Fix: Solve model_validator signature with the proper one --- pyproject.toml | 2 +- tests/unit/aleph_vm_authentication.py | 15 ++++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c148831c..3b2d3d16 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=1.0.4", + "aleph-message>=1.0.5", "aleph-superfluid>=0.3", "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", diff --git a/tests/unit/aleph_vm_authentication.py b/tests/unit/aleph_vm_authentication.py index c1710c16..d25b5177 100644 --- a/tests/unit/aleph_vm_authentication.py +++ b/tests/unit/aleph_vm_authentication.py @@ -16,6 +16,7 @@ from jwcrypto import jwk from jwcrypto.jwa import JWA from pydantic import BaseModel, ValidationError, field_validator, model_validator +from typing_extensions import Self from aleph.sdk.utils import bytes_from_hex @@ -76,28 +77,28 @@ def payload_must_be_hex(cls, value: bytes) -> bytes: return bytes_from_hex(value.decode()) @model_validator(mode="after") # type: ignore - def check_expiry(cls, values: SignedPubKeyHeader) -> SignedPubKeyHeader: + def check_expiry(self) -> Self: """Check that the token has not expired""" - payload: bytes = values.payload + payload: bytes = self.payload content = SignedPubKeyPayload.model_validate_json(payload) if not is_token_still_valid(content.expires): msg = "Token expired" raise ValueError(msg) - return values + return self @model_validator(mode="after") # type: ignore - def check_signature(cls, values: SignedPubKeyHeader) -> SignedPubKeyHeader: - signature: bytes = values.signature - payload: bytes = values.payload + def check_signature(self) -> Self: + signature: bytes = self.signature + payload: bytes = self.payload content = SignedPubKeyPayload.model_validate_json(payload) if not verify_wallet_signature(signature, payload.hex(), content.address): msg = "Invalid signature" raise ValueError(msg) - return values + return self @property def content(self) -> SignedPubKeyPayload: From 5f43f19245273b409d8d058d07a7554373e740f6 Mon Sep 17 00:00:00 2001 From: "Alie.E" Date: Mon, 27 Oct 2025 15:36:34 +0100 Subject: [PATCH 108/122] Fix domain ownership check (erc20 addr checksum format to lowercase) (#241) --- src/aleph/sdk/domain.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/aleph/sdk/domain.py b/src/aleph/sdk/domain.py index 525e6cef..79a0c5d4 100644 --- a/src/aleph/sdk/domain.py +++ b/src/aleph/sdk/domain.py @@ -207,7 +207,10 @@ async def check_domain( if entries: if record_type == "txt": for entry in entries: - if hasattr(entry, "text") and entry.text == record_value: + if ( + hasattr(entry, "text") + and str(entry.text).lower() == str(record_value).lower() + ): status[dns_rule.name] = True break elif ( From 3ade431bc512709d53939aa8f3bac610fdb5c0a9 Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Mon, 27 Oct 2025 23:16:04 +0100 Subject: [PATCH 109/122] ci: fix tests on macOS 14 + Python 3.9 (#242) Avoid compiling pyobjc-core and requiring a compiler for this specific combination. We now use the precompiled pyobjc wheel (<=11.0) to avoid the issue. --- .github/workflows/pytest.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.github/workflows/pytest.yml b/.github/workflows/pytest.yml index 75bc8193..c2e08466 100644 --- a/.github/workflows/pytest.yml +++ b/.github/workflows/pytest.yml @@ -34,6 +34,14 @@ jobs: echo "DYLD_LIBRARY_PATH=$(brew --prefix libsodium)/lib" >> $GITHUB_ENV if: runner.os == 'macOS' + # Workaround to avoid building pyobjc-core on macOS14 + Python 3.9. Support for Python 3.9 will be dropped + # once we support a more recent version of Python on functions. + - name: Avoid building pyobjc-core on macOS+Py3.9 + if: runner.os == 'macOS' && matrix.python-version == '3.9' + run: | + echo "pyobjc-core<12" > /tmp/constraints.txt + echo "PIP_CONSTRAINT=/tmp/constraints.txt" >> $GITHUB_ENV + - name: "Install Hatch" run: | python3 -m venv /tmp/venv From b79a11c55c8edae569f8de0895c0558cea8bb20e Mon Sep 17 00:00:00 2001 From: nesitor Date: Wed, 26 Nov 2025 16:34:16 +0100 Subject: [PATCH 110/122] Implement Ledger (#231) * Problem: Ledger wallet users cannot use Aleph to send transactions. Solution: Implement Ledger use on SDK to allow using them. * Fix: Solved linting and types issues for code quality. * Fix: Solved issue calling Ledger for supervisor. * Fix: Try to not pass the private_key bytes to not sign automatically the messages. * Fix: Solve enum values issue. * Fix: Solve enum values issue again. * Fix: Specified enum type to serialize. * Fix: Solved wrong signing address when a derivation_path is used. * fix: linting issue * fix: remove commented old code for ledger account loading * fix: `CHAIN` and `CURVE` on LedgerETHAccount aren't needed * Fix: handle common error using ledger (ledgerError / OsError) * fix: linting issue * fix: re enable use_enum_values for MainConfiguration * Refactor: AccountType have now imported / hardware, and new field / model validator to ensure retro compatibility * Feature: New HardwareAccount account protocol * Refactor: Split logic from ETHAccount to BaseEthAccount, EthAccount is the Account using Private key * Refactor: LedgerETHAccount use BaseEthAccount instead of ETHAccount * Refactor: superfluid connectors to be compatible either with EthAccount and LedgerEthAccount * Refactor: account.py to be able to handle more Account type than AccountFromPrivateKey * Fix: make Account Protocol runtime-checkable to differentiate between protocols * fix: rename AccountLike to AccountTypes * fix: ensure provider is set for get_eth_balance * fix: on superfluid.py force rpc to be present or raise ValueError * fix: allow AccountFromPrivateKey and HardwareAccount to be checkable on runtime * Update src/aleph/sdk/wallets/ledger/ethereum.py Co-authored-by: Olivier Desenfans * Fix: use Type2Transaction for ledger * fix: chainId and gasprice in _get_populated_transaction_request isn't needed * Feature: allow user to setup derivation_path for ledger account * Fix: avoid storing path as "None" on account config * fix: linting issue --------- Co-authored-by: Andres D. Molins Co-authored-by: 1yam Co-authored-by: 1yam <40899431+1yam@users.noreply.github.com> Co-authored-by: Olivier Desenfans --- pyproject.toml | 2 + src/aleph/sdk/account.py | 120 +++++++++++----- src/aleph/sdk/chains/ethereum.py | 169 +++++++++++++---------- src/aleph/sdk/conf.py | 80 ++++++++++- src/aleph/sdk/connectors/superfluid.py | 68 ++++++--- src/aleph/sdk/types.py | 34 ++++- src/aleph/sdk/wallets/ledger/ethereum.py | 90 +++++++++++- tests/unit/test_gas_estimation.py | 70 ++++++---- 8 files changed, 469 insertions(+), 164 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3b2d3d16..24012413 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,8 @@ dependencies = [ "eth-abi>=5.0.1; python_version>='3.9'", "eth-typing>=5.0.1", "jwcrypto==1.5.6", + "ledgerblue>=0.1.48", + "ledgereth>=0.10", "pydantic>=2,<3", "pydantic-settings>=2", "pynacl==1.5", # Needed now as default with _load_account changement diff --git a/src/aleph/sdk/account.py b/src/aleph/sdk/account.py index 6af5e32c..83b6e1ea 100644 --- a/src/aleph/sdk/account.py +++ b/src/aleph/sdk/account.py @@ -1,24 +1,26 @@ -import asyncio import logging from pathlib import Path -from typing import Dict, Optional, Type, TypeVar +from typing import Dict, Literal, Optional, Type, TypeVar, Union, overload from aleph_message.models import Chain +from ledgereth.exceptions import LedgerError +from typing_extensions import TypeAlias from aleph.sdk.chains.common import get_fallback_private_key from aleph.sdk.chains.ethereum import ETHAccount from aleph.sdk.chains.evm import EVMAccount -from aleph.sdk.chains.remote import RemoteAccount from aleph.sdk.chains.solana import SOLAccount from aleph.sdk.chains.substrate import DOTAccount from aleph.sdk.chains.svm import SVMAccount -from aleph.sdk.conf import load_main_configuration, settings +from aleph.sdk.conf import AccountType, load_main_configuration, settings from aleph.sdk.evm_utils import get_chains_with_super_token -from aleph.sdk.types import AccountFromPrivateKey +from aleph.sdk.types import AccountFromPrivateKey, HardwareAccount +from aleph.sdk.wallets.ledger import LedgerETHAccount logger = logging.getLogger(__name__) T = TypeVar("T", bound=AccountFromPrivateKey) +AccountTypes: TypeAlias = Union["AccountFromPrivateKey", "HardwareAccount"] chain_account_map: Dict[Chain, Type[T]] = { # type: ignore Chain.ARBITRUM: EVMAccount, @@ -56,7 +58,7 @@ def load_chain_account_type(chain: Chain) -> Type[AccountFromPrivateKey]: def account_from_hex_string( private_key_str: str, - account_type: Optional[Type[T]], + account_type: Optional[Type[AccountFromPrivateKey]], chain: Optional[Chain] = None, ) -> AccountFromPrivateKey: if private_key_str.startswith("0x"): @@ -78,7 +80,7 @@ def account_from_hex_string( def account_from_file( private_key_path: Path, - account_type: Optional[Type[T]], + account_type: Optional[Type[AccountFromPrivateKey]], chain: Optional[Chain] = None, ) -> AccountFromPrivateKey: private_key = private_key_path.read_bytes() @@ -97,13 +99,60 @@ def account_from_file( return account +@overload +def _load_account( + private_key_str: str, + private_key_path: None = None, + account_type: Type[AccountFromPrivateKey] = ..., + chain: Optional[Chain] = None, +) -> AccountFromPrivateKey: ... + + +@overload +def _load_account( + private_key_str: Literal[None], + private_key_path: Path, + account_type: Type[AccountFromPrivateKey] = ..., + chain: Optional[Chain] = None, +) -> AccountFromPrivateKey: ... + + +@overload +def _load_account( + private_key_str: Literal[None], + private_key_path: Literal[None], + account_type: Type[HardwareAccount], + chain: Optional[Chain] = None, +) -> HardwareAccount: ... + + +@overload def _load_account( private_key_str: Optional[str] = None, private_key_path: Optional[Path] = None, - account_type: Optional[Type[AccountFromPrivateKey]] = None, + account_type: Optional[Type[AccountTypes]] = None, chain: Optional[Chain] = None, -) -> AccountFromPrivateKey: - """Load an account from a private key string or file, or from the configuration file.""" +) -> AccountTypes: ... + + +def _load_account( + private_key_str: Optional[str] = None, + private_key_path: Optional[Path] = None, + account_type: Optional[Type[AccountTypes]] = None, + chain: Optional[Chain] = None, +) -> AccountTypes: + """Load an account from a private key string or file, or from the configuration file. + + This function can return different types of accounts based on the input: + - AccountFromPrivateKey: When a private key is provided (string or file) + - HardwareAccount: When config has AccountType.HARDWARE and a Ledger device is connected + + The function will attempt to load an account in the following order: + 1. From provided private key string + 2. From provided private key file + 3. From Ledger device (if config.type is HARDWARE) + 4. Generate a fallback private key + """ config = load_main_configuration(settings.CONFIG_FILE) default_chain = settings.DEFAULT_CHAIN @@ -129,27 +178,36 @@ def _load_account( # Loads private key from a string if private_key_str: - return account_from_hex_string(private_key_str, account_type, chain) + return account_from_hex_string(private_key_str, None, chain) + # Loads private key from a file elif private_key_path and private_key_path.is_file(): - return account_from_file(private_key_path, account_type, chain) - # For ledger keys - elif settings.REMOTE_CRYPTO_HOST: - logger.debug("Using remote account") - loop = asyncio.get_event_loop() - return loop.run_until_complete( - RemoteAccount.from_crypto_host( - host=settings.REMOTE_CRYPTO_HOST, - unix_socket=settings.REMOTE_CRYPTO_UNIX_SOCKET, - ) - ) + return account_from_file(private_key_path, account_type, chain) # type: ignore + elif config and config.address and config.type == AccountType.HARDWARE: + logger.debug("Using ledger account") + try: + ledger_account = None + if config.derivation_path: + ledger_account = LedgerETHAccount.from_path(config.derivation_path) + else: + ledger_account = LedgerETHAccount.from_address(config.address) + + if ledger_account: + # Connect provider to the chain + # Only valid for EVM chain sign we sign TX using device + # and then use Superfluid logic to publish it to BASE / AVAX + if chain: + ledger_account.connect_chain(chain) + return ledger_account + except LedgerError as e: + logger.warning(f"Ledger Error : {e.message}") + raise e + except OSError as e: + logger.warning("Please ensure Udev rules are set to use Ledger") + raise e + # Fallback: config.path if set, else generate a new private key - else: - new_private_key = get_fallback_private_key() - account = account_from_hex_string( - bytes.hex(new_private_key), account_type, chain - ) - logger.info( - f"Generated fallback private key with address {account.get_address()}" - ) - return account + new_private_key = get_fallback_private_key() + account = account_from_hex_string(bytes.hex(new_private_key), None, chain) + logger.info(f"Generated fallback private key with address {account.get_address()}") + return account diff --git a/src/aleph/sdk/chains/ethereum.py b/src/aleph/sdk/chains/ethereum.py index 02bebd8f..22601897 100644 --- a/src/aleph/sdk/chains/ethereum.py +++ b/src/aleph/sdk/chains/ethereum.py @@ -1,5 +1,6 @@ import asyncio import base64 +from abc import abstractmethod from decimal import Decimal from pathlib import Path from typing import Awaitable, Dict, Optional, Union @@ -36,65 +37,30 @@ from .common import BaseAccount, get_fallback_private_key, get_public_key -class ETHAccount(BaseAccount): - """Interact with an Ethereum address or key pair on EVM blockchains""" +class BaseEthAccount(BaseAccount): + """Base logic to interact with EVM blockchains""" CHAIN = "ETH" CURVE = "secp256k1" - _account: LocalAccount + _provider: Optional[Web3] chain: Optional[Chain] chain_id: Optional[int] rpc: Optional[str] superfluid_connector: Optional[Superfluid] - def __init__( - self, - private_key: bytes, - chain: Optional[Chain] = None, - ): - self.private_key = private_key - self._account: LocalAccount = Account.from_key(self.private_key) + def __init__(self, chain: Optional[Chain] = None): + self.chain = chain self.connect_chain(chain=chain) - @staticmethod - def from_mnemonic(mnemonic: str, chain: Optional[Chain] = None) -> "ETHAccount": - Account.enable_unaudited_hdwallet_features() - return ETHAccount( - private_key=Account.from_mnemonic(mnemonic=mnemonic).key, chain=chain - ) - - def export_private_key(self) -> str: - """Export the private key using standard format.""" - return f"0x{base64.b16encode(self.private_key).decode().lower()}" - - def get_address(self) -> str: - return self._account.address - - def get_public_key(self) -> str: - return "0x" + get_public_key(private_key=self._account.key).hex() - - async def sign_raw(self, buffer: bytes) -> bytes: - """Sign a raw buffer.""" - msghash = encode_defunct(text=buffer.decode("utf-8")) - sig = self._account.sign_message(msghash) - return sig["signature"] - - async def sign_message(self, message: Dict) -> Dict: + @abstractmethod + async def _sign_and_send_transaction(self, tx_params: TxParams) -> str: """ - Returns a signed message from an aleph.im message. - Args: - message: Message to sign - Returns: - Dict: Signed message + Sign and broadcast a transaction using the provided ETHAccount + @param tx_params - Transaction parameters + @returns - str - Transaction hash """ - signed_message = await super().sign_message(message) - - # Apply that fix as seems that sometimes the .hex() method doesn't add the 0x str at the beginning - if not str(signed_message["signature"]).startswith("0x"): - signed_message["signature"] = "0x" + signed_message["signature"] - - return signed_message + raise NotImplementedError def connect_chain(self, chain: Optional[Chain] = None): self.chain = chain @@ -150,36 +116,13 @@ def can_transact(self, tx: TxParams, block=True) -> bool: ) return valid - async def _sign_and_send_transaction(self, tx_params: TxParams) -> str: - """ - Sign and broadcast a transaction using the provided ETHAccount - @param tx_params - Transaction parameters - @returns - str - Transaction hash - """ - - def sign_and_send() -> TxReceipt: - if self._provider is None: - raise ValueError("Provider not connected") - signed_tx = self._provider.eth.account.sign_transaction( - tx_params, self._account.key - ) - - tx_hash = self._provider.eth.send_raw_transaction(signed_tx.raw_transaction) - tx_receipt = self._provider.eth.wait_for_transaction_receipt( - tx_hash, settings.TX_TIMEOUT + def get_eth_balance(self) -> Decimal: + if not self._provider: + raise ValueError( + "Provider not set. Please configure a provider before checking balance." ) - return tx_receipt - loop = asyncio.get_running_loop() - tx_receipt = await loop.run_in_executor(None, sign_and_send) - return tx_receipt["transactionHash"].hex() - - def get_eth_balance(self) -> Decimal: - return Decimal( - self._provider.eth.get_balance(self._account.address) - if self._provider - else 0 - ) + return Decimal(self._provider.eth.get_balance(self.get_address())) def get_token_balance(self) -> Decimal: if self.chain and self._provider: @@ -247,6 +190,84 @@ def manage_flow( ) +class ETHAccount(BaseEthAccount): + """Interact with an Ethereum address or key pair on EVM blockchains""" + + _account: LocalAccount + + def __init__( + self, + private_key: bytes, + chain: Optional[Chain] = None, + ): + self.private_key = private_key + self._account = Account.from_key(self.private_key) + super().__init__(chain=chain) + + @staticmethod + def from_mnemonic(mnemonic: str, chain: Optional[Chain] = None) -> "ETHAccount": + Account.enable_unaudited_hdwallet_features() + return ETHAccount( + private_key=Account.from_mnemonic(mnemonic=mnemonic).key, chain=chain + ) + + def export_private_key(self) -> str: + """Export the private key using standard format.""" + return f"0x{base64.b16encode(self.private_key).decode().lower()}" + + def get_address(self) -> str: + return self._account.address + + def get_public_key(self) -> str: + return "0x" + get_public_key(private_key=self._account.key).hex() + + async def sign_raw(self, buffer: bytes) -> bytes: + """Sign a raw buffer.""" + msghash = encode_defunct(text=buffer.decode("utf-8")) + sig = self._account.sign_message(msghash) + return sig["signature"] + + async def sign_message(self, message: Dict) -> Dict: + """ + Returns a signed message from an aleph Cloud message. + Args: + message: Message to sign + Returns: + Dict: Signed message + """ + signed_message = await super().sign_message(message) + + # Apply that fix as seems that sometimes the .hex() method doesn't add the 0x str at the beginning + if not str(signed_message["signature"]).startswith("0x"): + signed_message["signature"] = "0x" + signed_message["signature"] + + return signed_message + + async def _sign_and_send_transaction(self, tx_params: TxParams) -> str: + """ + Sign and broadcast a transaction using the provided ETHAccount + @param tx_params - Transaction parameters + @returns - str - Transaction hash + """ + + def sign_and_send() -> TxReceipt: + if self._provider is None: + raise ValueError("Provider not connected") + signed_tx = self._provider.eth.account.sign_transaction( + tx_params, self._account.key + ) + + tx_hash = self._provider.eth.send_raw_transaction(signed_tx.raw_transaction) + tx_receipt = self._provider.eth.wait_for_transaction_receipt( + tx_hash, settings.TX_TIMEOUT + ) + return tx_receipt + + loop = asyncio.get_running_loop() + tx_receipt = await loop.run_in_executor(None, sign_and_send) + return tx_receipt["transactionHash"].hex() + + def get_fallback_account( path: Optional[Path] = None, chain: Optional[Chain] = None ) -> ETHAccount: diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index 02e8ec85..ae79063a 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -1,13 +1,14 @@ import json import logging import os +from enum import Enum from pathlib import Path from shutil import which from typing import ClassVar, Dict, List, Optional, Union from aleph_message.models import Chain from aleph_message.models.execution.environment import HypervisorType -from pydantic import BaseModel, Field +from pydantic import BaseModel, Field, field_validator, model_validator from pydantic_settings import BaseSettings, SettingsConfigDict from aleph.sdk.types import ChainInfo @@ -286,16 +287,77 @@ class Settings(BaseSettings): ) +class AccountType(str, Enum): + IMPORTED: str = "imported" + HARDWARE: str = "hardware" + + class MainConfiguration(BaseModel): """ Intern Chain Management with Account. """ - path: Path + path: Optional[Path] = None + type: AccountType = AccountType.IMPORTED chain: Chain - + address: Optional[str] = None + derivation_path: Optional[str] = None model_config = SettingsConfigDict(use_enum_values=True) + @field_validator("type", mode="before") + def normalize_type(cls, v): + """Handle legacy 'internal'/'external' and accept both strings or enums.""" + if v is None: + return v + if isinstance(v, AccountType): + return v + v_str = str(v).lower().strip() + if v_str == "internal": + return AccountType.IMPORTED + elif v_str == "external": + return AccountType.HARDWARE + elif v_str in ("imported", "hardware"): + return AccountType(v_str) + raise ValueError(f"Unknown account type: {v}") + + @model_validator(mode="before") + def infer_type(cls, values: dict): + """ + Previously, the `type` field was optional to maintain backward compatibility + for users with older configurations (e.g., using a private key). + + We now enforce `type` as required, but still handle legacy cases where it may + be missing by inferring its value automatically. + + Inference logic: + - If `type` is explicitly set, it is left unchanged. + - If `type` is missing: + - If `path` is provided → assume `imported` + - If only `address` is provided → assume `hardware` (Ledger) + (This scenario should not normally occur, but is handled for safety.) + - If both `path` and `address` are present → trust `path` (imported) + """ + + t = values.get("type") + path = values.get("path") + address = values.get("address") + + # If type already given , keep it + if t is not None: + return values + + # Infer if missing + if path: + values["type"] = AccountType.IMPORTED + elif address: + values["type"] = AccountType.HARDWARE + else: + raise ValueError( + "Cannot infer account type: please provide 'type', or 'path' (imported), or 'address' (hardware)." + ) + + return values + # Settings singleton settings = Settings() @@ -328,7 +390,9 @@ class MainConfiguration(BaseModel): with open(settings.CONFIG_FILE, "r", encoding="utf-8") as f: config_data = json.load(f) - if "path" in config_data: + if "path" in config_data and ( + "type" not in config_data or config_data["type"] == AccountType.IMPORTED + ): settings.PRIVATE_KEY_FILE = Path(config_data["path"]) except json.JSONDecodeError: pass @@ -351,7 +415,10 @@ def save_main_configuration(file_path: Path, data: MainConfiguration): """ with file_path.open("w") as file: data_serializable = data.model_dump() - data_serializable["path"] = str(data_serializable["path"]) + if ( + data_serializable["path"] is not None + ): # Avoid having path : "None" in config file + data_serializable["path"] = str(data_serializable["path"]) json.dump(data_serializable, file, indent=4) @@ -367,8 +434,7 @@ def load_main_configuration(file_path: Path) -> Optional[MainConfiguration]: try: with file_path.open("rb") as file: content = file.read() - data = json.loads(content.decode("utf-8")) - return MainConfiguration(**data) + return MainConfiguration.model_validate_json(content.decode("utf-8")) except UnicodeDecodeError as e: logger.error(f"Unable to decode {file_path} as UTF-8: {e}") except json.JSONDecodeError: diff --git a/src/aleph/sdk/connectors/superfluid.py b/src/aleph/sdk/connectors/superfluid.py index cd971b74..2d12080f 100644 --- a/src/aleph/sdk/connectors/superfluid.py +++ b/src/aleph/sdk/connectors/superfluid.py @@ -3,8 +3,8 @@ from decimal import Decimal from typing import TYPE_CHECKING, Optional -from eth_utils import to_normalized_address from superfluid import CFA_V1, Operation, Web3FlowInfo +from web3 import Web3 from web3.exceptions import ContractCustomError from aleph.sdk.evm_utils import ( @@ -17,7 +17,7 @@ from aleph.sdk.types import TokenType if TYPE_CHECKING: - from aleph.sdk.chains.ethereum import ETHAccount + from aleph.sdk.chains.ethereum import BaseEthAccount class Superfluid: @@ -25,32 +25,61 @@ class Superfluid: Wrapper around the Superfluid APIs in order to CRUD Superfluid flows between two accounts. """ - account: ETHAccount + account: BaseEthAccount normalized_address: str super_token: str cfaV1Instance: CFA_V1 MIN_4_HOURS = 60 * 60 * 4 - def __init__(self, account: ETHAccount): + def __init__(self, account: BaseEthAccount): self.account = account - self.normalized_address = to_normalized_address(account.get_address()) + self.normalized_address = Web3.to_checksum_address(account.get_address()) if account.chain: self.super_token = str(get_super_token_address(account.chain)) self.cfaV1Instance = CFA_V1(account.rpc, account.chain_id) + # Helpers Functions + def _get_populated_transaction_request(self, operation, rpc: str): + """ + Prepares the transaction to be signed by either imported / hardware wallets + @param operation - on chain operations + @param rpc - RPC URL + @param address - address from Ledger account + @returns - TxParams - The transaction object + """ + + call = ( + operation.forwarder_call + if operation.forwarder_call is not None + else operation.agreement_call + ) + populated_transaction = call.build_transaction( + {"from": self.normalized_address} + ) + + web3 = Web3(Web3.HTTPProvider(rpc)) + nonce = web3.eth.get_transaction_count(self.normalized_address) + + populated_transaction["nonce"] = nonce + return populated_transaction + def _simulate_create_tx_flow(self, flow: Decimal, block=True) -> bool: try: operation = self.cfaV1Instance.create_flow( sender=self.normalized_address, - receiver=to_normalized_address( + receiver=Web3.to_checksum_address( "0x0000000000000000000000000000000000000001" ), # Fake Address we do not sign/send this transactions super_token=self.super_token, flow_rate=int(to_wei_token(flow)), ) - - populated_transaction = operation._get_populated_transaction_request( - self.account.rpc, self.account._account.key + if not self.account.rpc: + raise ValueError( + f"RPC endpoint is required but not set for this chain {self.account.chain}." + ) + populated_transaction = self._get_populated_transaction_request( + operation=operation, + rpc=self.account.rpc, ) return self.account.can_transact(tx=populated_transaction, block=block) except ContractCustomError as e: @@ -66,12 +95,17 @@ def _simulate_create_tx_flow(self, flow: Decimal, block=True) -> bool: async def _execute_operation_with_account(self, operation: Operation) -> str: """ - Execute an operation using the provided ETHAccount + Execute an operation using the provided account @param operation - Operation instance from the library @returns - str - Transaction hash """ - populated_transaction = operation._get_populated_transaction_request( - self.account.rpc, self.account._account.key + if not self.account.rpc: + raise ValueError( + f"RPC endpoint is required but not set for this chain {self.account.chain}." + ) + + populated_transaction = self._get_populated_transaction_request( + operation=operation, rpc=self.account.rpc ) self.account.can_transact(tx=populated_transaction) @@ -86,7 +120,7 @@ async def create_flow(self, receiver: str, flow: Decimal) -> str: return await self._execute_operation_with_account( operation=self.cfaV1Instance.create_flow( sender=self.normalized_address, - receiver=to_normalized_address(receiver), + receiver=Web3.to_checksum_address(receiver), super_token=self.super_token, flow_rate=int(to_wei_token(flow)), ), @@ -95,8 +129,8 @@ async def create_flow(self, receiver: str, flow: Decimal) -> str: async def get_flow(self, sender: str, receiver: str) -> Web3FlowInfo: """Fetch information about the Superfluid flow between two addresses.""" return self.cfaV1Instance.get_flow( - sender=to_normalized_address(sender), - receiver=to_normalized_address(receiver), + sender=Web3.to_checksum_address(sender), + receiver=Web3.to_checksum_address(receiver), super_token=self.super_token, ) @@ -105,7 +139,7 @@ async def delete_flow(self, receiver: str) -> str: return await self._execute_operation_with_account( operation=self.cfaV1Instance.delete_flow( sender=self.normalized_address, - receiver=to_normalized_address(receiver), + receiver=Web3.to_checksum_address(receiver), super_token=self.super_token, ), ) @@ -115,7 +149,7 @@ async def update_flow(self, receiver: str, flow: Decimal) -> str: return await self._execute_operation_with_account( operation=self.cfaV1Instance.update_flow( sender=self.normalized_address, - receiver=to_normalized_address(receiver), + receiver=Web3.to_checksum_address(receiver), super_token=self.super_token, flow_rate=int(to_wei_token(flow)), ), diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index b839a14b..8d952b18 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -23,8 +23,15 @@ TypeAdapter, field_validator, ) - -__all__ = ("StorageEnum", "Account", "AccountFromPrivateKey", "GenericMessage") +from typing_extensions import runtime_checkable + +__all__ = ( + "StorageEnum", + "Account", + "AccountFromPrivateKey", + "HardwareAccount", + "GenericMessage", +) from aleph_message.models import AlephMessage, Chain @@ -35,6 +42,7 @@ class StorageEnum(str, Enum): # Use a protocol to avoid importing crypto libraries +@runtime_checkable class Account(Protocol): CHAIN: str CURVE: str @@ -52,6 +60,7 @@ def get_address(self) -> str: ... def get_public_key(self) -> str: ... +@runtime_checkable class AccountFromPrivateKey(Account, Protocol): """Only accounts that are initialized from a private key string are supported.""" @@ -64,6 +73,27 @@ def export_private_key(self) -> str: ... def switch_chain(self, chain: Optional[str] = None) -> None: ... +@runtime_checkable +class HardwareAccount(Account, Protocol): + """Account using hardware wallet.""" + + @staticmethod + def from_address( + address: str, device: Optional[Any] = None + ) -> Optional["HardwareAccount"]: ... + + @staticmethod + def from_path(path: str, device: Optional[Any] = None) -> "HardwareAccount": ... + + def get_address(self) -> str: ... + + def switch_chain(self, chain: Optional[str] = None) -> None: ... + + async def sign_message(self, message: Dict) -> Dict: ... + + async def sign_raw(self, buffer: bytes) -> bytes: ... + + GenericMessage = TypeVar("GenericMessage", bound=AlephMessage) diff --git a/src/aleph/sdk/wallets/ledger/ethereum.py b/src/aleph/sdk/wallets/ledger/ethereum.py index 18712a0a..a09958d6 100644 --- a/src/aleph/sdk/wallets/ledger/ethereum.py +++ b/src/aleph/sdk/wallets/ledger/ethereum.py @@ -1,35 +1,58 @@ from __future__ import annotations +import asyncio +import logging from typing import Dict, List, Optional +from aleph_message.models import Chain from eth_typing import HexStr from ledgerblue.Dongle import Dongle from ledgereth import find_account, get_account_by_path, get_accounts from ledgereth.comms import init_dongle from ledgereth.messages import sign_message from ledgereth.objects import LedgerAccount, SignedMessage +from ledgereth.transactions import Type2Transaction, sign_transaction +from web3.types import TxReceipt -from ...chains.common import BaseAccount, get_verification_buffer +from ...chains.common import get_verification_buffer +from ...chains.ethereum import BaseEthAccount from ...utils import bytes_from_hex +logger = logging.getLogger(__name__) -class LedgerETHAccount(BaseAccount): + +class LedgerETHAccount(BaseEthAccount): """Account using the Ethereum app on Ledger hardware wallets.""" - CHAIN = "ETH" - CURVE = "secp256k1" _account: LedgerAccount _device: Dongle - def __init__(self, account: LedgerAccount, device: Dongle): + def __init__( + self, account: LedgerAccount, device: Dongle, chain: Optional[Chain] = None + ): """Initialize an aleph.im account instance that relies on a LedgerHQ device and the Ethereum Ledger application for signatures. See the static methods `self.from_address(...)` and `self.from_path(...)` for an easier method of instantiation. """ + super().__init__(chain=None) + self._account = account self._device = device + if chain: + self.connect_chain(chain=chain) + + @staticmethod + def get_accounts( + device: Optional[Dongle] = None, count: int = 5 + ) -> List[LedgerAccount]: + """Initialize an aleph.im account from a LedgerHQ device from + a known wallet address. + """ + device = device or init_dongle() + accounts: List[LedgerAccount] = get_accounts(dongle=device, count=count) + return accounts @staticmethod def from_address( @@ -68,6 +91,9 @@ async def sign_message(self, message: Dict) -> Dict: # TODO: Check why the code without a wallet uses `encode_defunct`. msghash: bytes = get_verification_buffer(message) + logger.warning( + "Please Sign messages using ledger" + ) # allow to propagate it to cli sig: SignedMessage = sign_message( msghash, dongle=self._device, sender_path=self._account.path ) @@ -79,12 +105,66 @@ async def sign_message(self, message: Dict) -> Dict: async def sign_raw(self, buffer: bytes) -> bytes: """Sign a raw buffer.""" + logger.warning( + "Please sign the message on your Ledger device" + ) # allow to propagate it to cli sig: SignedMessage = sign_message( buffer, dongle=self._device, sender_path=self._account.path ) signature: HexStr = sig.signature return bytes_from_hex(signature) + async def _sign_and_send_transaction(self, tx_params: dict) -> str: + """ + Sign and broadcast a transaction using the Ledger hardware wallet. + Equivalent of the software _sign_and_send_transaction(). + + @param tx_params: dict - Transaction parameters + @returns: str - Transaction hash + """ + if self._provider is None: + raise ValueError("Provider not connected") + + def sign_and_send() -> TxReceipt: + logger.warning( + "Please Sign messages using ledger" + ) # allow to propagate it to cli + + # Type2Transaction + tx = Type2Transaction( + chain_id=tx_params["chainId"], + nonce=tx_params["nonce"], + max_priority_fee_per_gas=tx_params["maxPriorityFeePerGas"], + max_fee_per_gas=tx_params["maxFeePerGas"], + gas_limit=tx_params["gas"], + destination=bytes.fromhex(tx_params["to"][2:]), + amount=tx_params["value"], + data=bytes.fromhex(tx_params["data"][2:]), + ) + signed_tx = sign_transaction( + tx=tx, + sender_path=self._account.path, + dongle=self._device, + ) + + provider = self._provider + if provider is None: + raise ValueError("Provider not connected") + + tx_hash = provider.eth.send_raw_transaction(signed_tx.rawTransaction) + + tx_receipt = provider.eth.wait_for_transaction_receipt( + tx_hash, + timeout=getattr(self, "TX_TIMEOUT", 120), # optional custom timeout + ) + + return tx_receipt + + loop = asyncio.get_running_loop() + tx_receipt = await loop.run_in_executor(None, sign_and_send) + + return tx_receipt["transactionHash"].hex() + def get_address(self) -> str: return self._account.address diff --git a/tests/unit/test_gas_estimation.py b/tests/unit/test_gas_estimation.py index abbd8c5c..7db391ad 100644 --- a/tests/unit/test_gas_estimation.py +++ b/tests/unit/test_gas_estimation.py @@ -36,8 +36,8 @@ def mock_superfluid(mock_eth_account): superfluid = Superfluid(mock_eth_account) superfluid.cfaV1Instance = MagicMock() superfluid.cfaV1Instance.create_flow = MagicMock() - superfluid.super_token = "0xsupertokenaddress" - superfluid.normalized_address = "0xsenderaddress" + superfluid.super_token = "0x0000000000000000000000000000000000000000" + superfluid.normalized_address = "0x0000000000000000000000000000000000000000" # Mock the operation operation = MagicMock() @@ -109,16 +109,20 @@ class TestSuperfluidFlowEstimation: async def test_simulate_create_tx_flow_success( self, mock_superfluid, mock_eth_account ): - # Patch the can_transact method to simulate a successful transaction - with patch.object(mock_eth_account, "can_transact", return_value=True): - result = mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) - assert result is True - - # Verify the flow was correctly simulated but not executed - mock_superfluid.cfaV1Instance.create_flow.assert_called_once() - assert "0x0000000000000000000000000000000000000001" in str( - mock_superfluid.cfaV1Instance.create_flow.call_args - ) + # Patch both the _get_populated_transaction_request and can_transact methods + mock_tx = {"value": 0, "gas": 100000, "gasPrice": 20_000_000_000} + with patch.object( + mock_superfluid, "_get_populated_transaction_request", return_value=mock_tx + ): + with patch.object(mock_eth_account, "can_transact", return_value=True): + result = mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) + assert result is True + + # Verify the flow was correctly simulated but not executed + mock_superfluid.cfaV1Instance.create_flow.assert_called_once() + assert "0x0000000000000000000000000000000000000001" in str( + mock_superfluid.cfaV1Instance.create_flow.call_args + ) @pytest.mark.asyncio async def test_simulate_create_tx_flow_contract_error( @@ -128,17 +132,22 @@ async def test_simulate_create_tx_flow_contract_error( error = ContractCustomError("Insufficient deposit") error.data = "0xea76c9b3" # This is the specific error code checked in the code - # Mock can_transact to throw the error - with patch.object(mock_eth_account, "can_transact", side_effect=error): - # Also mock get_super_token_balance for the error case - with patch.object( - mock_eth_account, "get_super_token_balance", return_value=0 - ): - # Should raise InsufficientFundsError for ALEPH token - with pytest.raises(InsufficientFundsError) as exc_info: - mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) - - assert exc_info.value.token_type == TokenType.ALEPH + # Mock _get_populated_transaction_request and can_transact + mock_tx = {"value": 0, "gas": 100000, "gasPrice": 20_000_000_000} + with patch.object( + mock_superfluid, "_get_populated_transaction_request", return_value=mock_tx + ): + # Mock can_transact to throw the error + with patch.object(mock_eth_account, "can_transact", side_effect=error): + # Also mock get_super_token_balance for the error case + with patch.object( + mock_eth_account, "get_super_token_balance", return_value=0 + ): + # Should raise InsufficientFundsError for ALEPH token + with pytest.raises(InsufficientFundsError) as exc_info: + mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) + + assert exc_info.value.token_type == TokenType.ALEPH @pytest.mark.asyncio async def test_simulate_create_tx_flow_other_error( @@ -148,11 +157,16 @@ async def test_simulate_create_tx_flow_other_error( error = ContractCustomError("Other error") error.data = "0xsomeothercode" - # Mock can_transact to throw the error - with patch.object(mock_eth_account, "can_transact", side_effect=error): - # Should return False for other errors - result = mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) - assert result is False + # Mock _get_populated_transaction_request and can_transact + mock_tx = {"value": 0, "gas": 100000, "gasPrice": 20_000_000_000} + with patch.object( + mock_superfluid, "_get_populated_transaction_request", return_value=mock_tx + ): + # Mock can_transact to throw the error + with patch.object(mock_eth_account, "can_transact", side_effect=error): + # Should return False for other errors + result = mock_superfluid._simulate_create_tx_flow(Decimal("0.00000005")) + assert result is False @pytest.mark.asyncio async def test_can_start_flow_uses_simulation(self, mock_superfluid): From b4c1f90d94cf9e8233f544613606e191cb1d8811 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Thu, 27 Nov 2025 13:00:54 +0100 Subject: [PATCH 111/122] Fix: CrnList is no longer a Dict, type check making failed the match of the crn-url with crn-hash (#266) --- src/aleph/sdk/client/services/instance.py | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/aleph/sdk/client/services/instance.py b/src/aleph/sdk/client/services/instance.py index 034ee4b9..9a2dcf20 100644 --- a/src/aleph/sdk/client/services/instance.py +++ b/src/aleph/sdk/client/services/instance.py @@ -4,6 +4,7 @@ from aleph_message.models import InstanceMessage, ItemHash, MessageType, PaymentType from aleph_message.status import MessageStatus +from aleph.sdk.client.services.crn import CrnList from aleph.sdk.query.filters import MessageFilter from aleph.sdk.query.responses import MessagesResponse @@ -47,7 +48,7 @@ async def get_name_of_executable(self, item_hash: ItemHash) -> Optional[str]: return None async def get_instance_allocation_info( - self, msg: InstanceMessage, crn_list: dict + self, msg: InstanceMessage, crn_list: CrnList ) -> Tuple[InstanceMessage, Union[InstanceManual, InstanceWithScheduler]]: vm_hash = msg.item_hash payment_type = safe_getattr(msg, "content.payment.type.value") @@ -62,12 +63,8 @@ async def get_instance_allocation_info( info = InstanceWithScheduler(source="scheduler", allocations=alloc) else: crn_hash = safe_getattr(msg, "content.requirements.node.node_hash") - if isinstance(crn_list, list): - node = next((n for n in crn_list if n.get("hash") == crn_hash), None) - url = sanitize_url(node.get("address")) if node else "" - else: - node = crn_list.get(crn_hash) - url = sanitize_url(node.get("address")) if node else "" + node = crn_list.find_crn_by_hash(crn_hash) + url = sanitize_url(node.address) if node else "" info = InstanceManual(source="manual", crn_url=url) return msg, info @@ -84,8 +81,7 @@ async def get_instances(self, address: str) -> List[InstanceMessage]: return resp.messages async def get_instances_allocations(self, messages_list, only_processed=True): - crn_list_response = await self._client.crn.get_crns_list() - crn_list = crn_list_response.get("crns", {}) + crn_list = await self._client.crn.get_crns_list(only_active=False) tasks = [] for msg in messages_list: From 545f4a88ea74faa0aadb323e3f62ba425af45773 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Thu, 27 Nov 2025 15:49:03 +0100 Subject: [PATCH 112/122] Feature: aleph settings services (#248) * Problem: Ledger wallet users cannot use Aleph to send transactions. Solution: Implement Ledger use on SDK to allow using them. * Fix: Solved linting and types issues for code quality. * Feature: aleph settings services * Feature: use settings aggregates instead of hardcoded crn version * Fix: we should give crn_version instead of fetching it here * fix: linting issue * Fix: crn_version should be optional and given * Apply suggestion from @nesitor Co-authored-by: nesitor * Fix: use settings.ALEPH_AGGREGATE_ADDRESS instead of hardcoded address for pricing and settings * fix: linting * fix: missing typing for ALEPH_AGGREGATE_ADDRESS in settings --------- Co-authored-by: Andres D. Molins Co-authored-by: nesitor --- src/aleph/sdk/client/http.py | 3 +- src/aleph/sdk/client/services/crn.py | 10 +- src/aleph/sdk/client/services/pricing.py | 5 +- src/aleph/sdk/client/services/settings.py | 40 +++++ src/aleph/sdk/conf.py | 2 + tests/unit/services/test_settings.py | 200 ++++++++++++++++++++++ 6 files changed, 249 insertions(+), 11 deletions(-) create mode 100644 src/aleph/sdk/client/services/settings.py create mode 100644 tests/unit/services/test_settings.py diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index c1facba1..fa660433 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -39,6 +39,7 @@ from aleph.sdk.client.services.port_forwarder import PortForwarder from aleph.sdk.client.services.pricing import Pricing from aleph.sdk.client.services.scheduler import Scheduler +from aleph.sdk.client.services.settings import Settings as NetworkSettingsService from aleph.sdk.client.services.voucher import Vouchers from ..conf import settings @@ -146,7 +147,7 @@ async def __aenter__(self): self.instance = Instance(self) self.pricing = Pricing(self) self.voucher = Vouchers(self) - + self.network_settings = NetworkSettingsService(self) return self async def __aexit__(self, exc_type, exc_val, exc_tb): diff --git a/src/aleph/sdk/client/services/crn.py b/src/aleph/sdk/client/services/crn.py index 82cec51b..fe6d538d 100644 --- a/src/aleph/sdk/client/services/crn.py +++ b/src/aleph/sdk/client/services/crn.py @@ -97,7 +97,7 @@ def find_gpu_on_network(self): def filter_crn( self, - latest_crn_version: bool = False, + crn_version: Optional[str] = None, ipv6: bool = False, stream_address: bool = False, confidential: bool = False, @@ -105,7 +105,7 @@ def filter_crn( ) -> list[CRN]: """Filter compute resource node list, unfiltered by default. Args: - latest_crn_version (bool): Filter by latest crn version. + crn_version (str): Filter by specific crn version. ipv6 (bool): Filter invalid IPv6 configuration. stream_address (bool): Filter invalid payment receiver address. confidential (bool): Filter by confidential computing support. @@ -113,15 +113,11 @@ def filter_crn( Returns: list[CRN]: List of compute resource nodes. (if no filter applied, return all) """ - # current_crn_version = await fetch_latest_crn_version() - # Relax current filter to allow use aleph-vm versions since 1.5.1. - # TODO: Allow to specify that option on settings aggregate on maybe on GitHub - current_crn_version = "1.5.1" filtered_crn: list[CRN] = [] for crn_ in self.crns: # Check crn version - if latest_crn_version and (crn_.version or "0.0.0") < current_crn_version: + if crn_version and (crn_.version or "0.0.0") < crn_version: continue # Filter with ipv6 check diff --git a/src/aleph/sdk/client/services/pricing.py b/src/aleph/sdk/client/services/pricing.py index 9c19eb0e..e2b51c50 100644 --- a/src/aleph/sdk/client/services/pricing.py +++ b/src/aleph/sdk/client/services/pricing.py @@ -4,6 +4,7 @@ from typing import TYPE_CHECKING, Dict, List, Optional, Union from aleph.sdk.client.services.base import BaseService +from aleph.sdk.conf import settings if TYPE_CHECKING: pass @@ -205,9 +206,7 @@ def __init__(self, client): async def get_pricing_aggregate( self, ) -> PricingModel: - result = await self.get_config( - address="0xFba561a84A537fCaa567bb7A2257e7142701ae2A" - ) + result = await self.get_config(address=settings.ALEPH_AGGREGATE_ADDRESS) return result.data[0] async def get_pricing_for_services( diff --git a/src/aleph/sdk/client/services/settings.py b/src/aleph/sdk/client/services/settings.py new file mode 100644 index 00000000..9f4de76b --- /dev/null +++ b/src/aleph/sdk/client/services/settings.py @@ -0,0 +1,40 @@ +from typing import List + +from pydantic import BaseModel + +from aleph.sdk.conf import settings + +from .base import BaseService + + +class NetworkAvailableGpu(BaseModel): + name: str + model: str + vendor: str + device_id: str + + +class NetworkSettingsModel(BaseModel): + compatible_gpus: List[NetworkAvailableGpu] + last_crn_version: str + community_wallet_address: str + community_wallet_timestamp: int + + +class Settings(BaseService[NetworkSettingsModel]): + """ + This Service handle logic around Pricing + """ + + aggregate_key = "settings" + model_cls = NetworkSettingsModel + + def __init__(self, client): + super().__init__(client=client) + + # Config from aggregate + async def get_settings_aggregate( + self, + ) -> NetworkSettingsModel: + result = await self.get_config(address=settings.ALEPH_AGGREGATE_ADDRESS) + return result.data[0] diff --git a/src/aleph/sdk/conf.py b/src/aleph/sdk/conf.py index ae79063a..ee91fc39 100644 --- a/src/aleph/sdk/conf.py +++ b/src/aleph/sdk/conf.py @@ -99,6 +99,8 @@ class Settings(BaseSettings): VOUCHER_SOL_REGISTRY: str = "https://api.claim.twentysix.cloud/v1/registry/sol" VOUCHER_ORIGIN_ADDRESS: str = "0xB34f25f2c935bCA437C061547eA12851d719dEFb" + ALEPH_AGGREGATE_ADDRESS: str = "0xFba561a84A537fCaa567bb7A2257e7142701ae2A" + # Web3Provider settings TOKEN_DECIMALS: ClassVar[int] = 18 TX_TIMEOUT: ClassVar[int] = 60 * 3 diff --git a/tests/unit/services/test_settings.py b/tests/unit/services/test_settings.py new file mode 100644 index 00000000..5b2efebb --- /dev/null +++ b/tests/unit/services/test_settings.py @@ -0,0 +1,200 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from aleph.sdk import AlephHttpClient +from aleph.sdk.client.services.settings import NetworkSettingsModel, Settings + + +@pytest.fixture +def mock_settings_aggregate_response(): + return { + "compatible_gpus": [ + { + "name": "AD102GL [L40S]", + "model": "L40S", + "vendor": "NVIDIA", + "device_id": "10de:26b9", + }, + { + "name": "GB202 [GeForce RTX 5090]", + "model": "RTX 5090", + "vendor": "NVIDIA", + "device_id": "10de:2685", + }, + { + "name": "GB202 [GeForce RTX 5090 D]", + "model": "RTX 5090", + "vendor": "NVIDIA", + "device_id": "10de:2687", + }, + { + "name": "AD102 [GeForce RTX 4090]", + "model": "RTX 4090", + "vendor": "NVIDIA", + "device_id": "10de:2684", + }, + { + "name": "AD102 [GeForce RTX 4090 D]", + "model": "RTX 4090", + "vendor": "NVIDIA", + "device_id": "10de:2685", + }, + { + "name": "GA102 [GeForce RTX 3090]", + "model": "RTX 3090", + "vendor": "NVIDIA", + "device_id": "10de:2204", + }, + { + "name": "GA102 [GeForce RTX 3090 Ti]", + "model": "RTX 3090", + "vendor": "NVIDIA", + "device_id": "10de:2203", + }, + { + "name": "AD104GL [RTX 4000 SFF Ada Generation]", + "model": "RTX 4000 ADA", + "vendor": "NVIDIA", + "device_id": "10de:27b0", + }, + { + "name": "AD104GL [RTX 4000 Ada Generation]", + "model": "RTX 4000 ADA", + "vendor": "NVIDIA", + "device_id": "10de:27b2", + }, + { + "name": "GA102GL [RTX A5000]", + "model": "RTX A5000", + "vendor": "NVIDIA", + "device_id": "10de:2231", + }, + { + "name": "GA102GL [RTX A6000]", + "model": "RTX A6000", + "vendor": "NVIDIA", + "device_id": "10de:2230", + }, + { + "name": "GH100 [H100]", + "model": "H100", + "vendor": "NVIDIA", + "device_id": "10de:2336", + }, + { + "name": "GH100 [H100 NVSwitch]", + "model": "H100", + "vendor": "NVIDIA", + "device_id": "10de:22a3", + }, + { + "name": "GH100 [H100 CNX]", + "model": "H100", + "vendor": "NVIDIA", + "device_id": "10de:2313", + }, + { + "name": "GH100 [H100 SXM5 80GB]", + "model": "H100", + "vendor": "NVIDIA", + "device_id": "10de:2330", + }, + { + "name": "GH100 [H100 PCIe]", + "model": "H100", + "vendor": "NVIDIA", + "device_id": "10de:2331", + }, + { + "name": "GA100", + "model": "A100", + "vendor": "NVIDIA", + "device_id": "10de:2080", + }, + { + "name": "GA100", + "model": "A100", + "vendor": "NVIDIA", + "device_id": "10de:2081", + }, + { + "name": "GA100 [A100 SXM4 80GB]", + "model": "A100", + "vendor": "NVIDIA", + "device_id": "10de:20b2", + }, + { + "name": "GA100 [A100 PCIe 80GB]", + "model": "A100", + "vendor": "NVIDIA", + "device_id": "10de:20b5", + }, + { + "name": "GA100 [A100X]", + "model": "A100", + "vendor": "NVIDIA", + "device_id": "10de:20b8", + }, + { + "name": "GH100 [H200 SXM 141GB]", + "model": "H200", + "vendor": "NVIDIA", + "device_id": "10de:2335", + }, + { + "name": "GH100 [H200 NVL]", + "model": "H200", + "vendor": "NVIDIA", + "device_id": "10de:233b", + }, + { + "name": "AD102GL [RTX 6000 ADA]", + "model": "RTX 6000 ADA", + "vendor": "NVIDIA", + "device_id": "10de:26b1", + }, + ], + "last_crn_version": "1.7.2", + "community_wallet_address": "0x5aBd3258C5492fD378EBC2e0017416E199e5Da56", + "community_wallet_timestamp": 1739996239, + } + + +@pytest.mark.asyncio +async def test_get_settings_aggregate( + make_mock_aiohttp_session, mock_settings_aggregate_response +): + client = AlephHttpClient(api_server="http://localhost") + + # Properly mock the fetch_aggregate method using monkeypatch + client._http_session = MagicMock() + monkeypatch = AsyncMock(return_value=mock_settings_aggregate_response) + setattr(client, "fetch_aggregate", monkeypatch) + + settings_service = Settings(client) + result = await settings_service.get_settings_aggregate() + + assert isinstance(result, NetworkSettingsModel) + assert len(result.compatible_gpus) == 24 # We have 24 GPUs in the mock data + + rtx4000_gpu = next( + gpu for gpu in result.compatible_gpus if gpu.device_id == "10de:27b0" + ) + assert rtx4000_gpu.name == "AD104GL [RTX 4000 SFF Ada Generation]" + assert rtx4000_gpu.model == "RTX 4000 ADA" + assert rtx4000_gpu.vendor == "NVIDIA" + + assert result.last_crn_version == "1.7.2" + assert ( + result.community_wallet_address == "0x5aBd3258C5492fD378EBC2e0017416E199e5Da56" + ) + assert result.community_wallet_timestamp == 1739996239 + + # Verify that fetch_aggregate was called with the correct parameters + assert monkeypatch.call_count == 1 + assert ( + monkeypatch.call_args.kwargs["address"] + == "0xFba561a84A537fCaa567bb7A2257e7142701ae2A" + ) + assert monkeypatch.call_args.kwargs["key"] == "settings" From 9ff376baa863e86247e6047e0b23db26a866b41e Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Mon, 22 Dec 2025 16:16:38 +0100 Subject: [PATCH 113/122] fix: remove macos13 from CI (#269) Macos 13 is being deprecated : https://github.com/actions/runner-images/issues/13046 --- .github/workflows/build-wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build-wheels.yml b/.github/workflows/build-wheels.yml index 440b53ca..96d828ad 100644 --- a/.github/workflows/build-wheels.yml +++ b/.github/workflows/build-wheels.yml @@ -13,7 +13,7 @@ jobs: strategy: fail-fast: false matrix: - os: [macos-13, macos-14, ubuntu-22.04, ubuntu-24.04] + os: [macos-14, ubuntu-22.04, ubuntu-24.04] runs-on: ${{ matrix.os }} steps: From 362ed497d40269971174299e9a745fd78f880b41 Mon Sep 17 00:00:00 2001 From: "Alie.E" Date: Tue, 6 Jan 2026 20:33:15 +0100 Subject: [PATCH 114/122] Fix: GPU lists being reset on each iter (#270) --- src/aleph/sdk/client/services/crn.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/aleph/sdk/client/services/crn.py b/src/aleph/sdk/client/services/crn.py index fe6d538d..bca54176 100644 --- a/src/aleph/sdk/client/services/crn.py +++ b/src/aleph/sdk/client/services/crn.py @@ -76,14 +76,14 @@ def find_gpu_on_network(self): continue # Extracts used GPU + compatible_gpu[crn_.address] = [] for gpu in crn_.get("compatible_gpus", []): - compatible_gpu[crn_.address] = [] compatible_gpu[crn_.address].append(GPU.model_validate(gpu)) gpu_count += 1 # Extracts available GPU + available_compatible_gpu[crn_.address] = [] for gpu in crn_.get("compatible_available_gpus", []): - available_compatible_gpu[crn_.address] = [] available_compatible_gpu[crn_.address].append(GPU.model_validate(gpu)) gpu_count += 1 available_gpu_count += 1 From 134994af0a993089e702575e08dce599ce53c33a Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Tue, 13 Jan 2026 10:20:55 +0100 Subject: [PATCH 115/122] feature: get_aggregate(s) methods (#273) Added two new methods to get aggregates, `get_aggregate()` and `get_aggregates()`. Their key feature is to not raise an implementation-dependent exception such as `ClientResponseError` when no aggregate is found but instead return `None` to the caller. This enables implementation-agnostic code (such as the upcoming authorization support) to detect nonexisting aggregates without having to catch implementation-specific exceptions in generic code. Marked `fetch_aggregate` and `fetch_aggregates` as deprecated, their names were inconsistent with the rest of the API anyway. --- src/aleph/sdk/client/abstract.py | 29 ++++++++++++++++++++++ src/aleph/sdk/client/http.py | 31 ++++++++++++++++++++++-- src/aleph/sdk/client/services/base.py | 2 +- tests/unit/services/test_base_service.py | 4 +-- tests/unit/services/test_settings.py | 2 +- tests/unit/test_asynchronous_get.py | 29 ++++++++++++++++++++++ 6 files changed, 91 insertions(+), 6 deletions(-) diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 2816aa3d..fea2db3f 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -35,6 +35,7 @@ ) from aleph_message.models.execution.program import Encoding from aleph_message.status import MessageStatus +from typing_extensions import deprecated from aleph.sdk.conf import settings from aleph.sdk.types import Account @@ -49,7 +50,32 @@ class AlephClient(ABC): + async def get_aggregate(self, address: str, key: str) -> Optional[Dict[str, Dict]]: + """ + Get a value from the aggregate store by owner address and item key. + Returns None if no aggregate was found. + + :param address: Address of the owner of the aggregate + :param key: Key of the aggregate + """ + raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + + async def get_aggregates( + self, address: str, keys: Optional[Iterable[str]] = None + ) -> Optional[Dict[str, Dict]]: + """ + Get key-value pairs from the aggregate store by owner address. + Returns None if no aggregate was found. + + :param address: Address of the owner of the aggregate + :param keys: Keys of the aggregates to fetch (Default: all items) + """ + raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + @abstractmethod + @deprecated( + "This method is deprecated and will be removed in upcoming versions. Use get_aggregate instead." + ) async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: """ Fetch a value from the aggregate store by owner address and item key. @@ -60,6 +86,9 @@ async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: raise NotImplementedError("Did you mean to import `AlephHttpClient`?") @abstractmethod + @deprecated( + "This method is deprecated and will be removed in upcoming versions. Use get_aggregates instead." + ) async def fetch_aggregates( self, address: str, keys: Optional[Iterable[str]] = None ) -> Dict[str, Dict]: diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index fa660433..2d4505b3 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -19,6 +19,7 @@ ) import aiohttp +from aiohttp import ClientResponseError from aiohttp.web import HTTPNotFound from aleph_message import parse_message from aleph_message.models import ( @@ -155,7 +156,7 @@ async def __aexit__(self, exc_type, exc_val, exc_tb): if self._http_session is not None: await self._http_session.close() - async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: + async def _fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: params: Dict[str, Any] = {"keys": key} async with self.http_session.get( @@ -167,7 +168,7 @@ async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: final_result = data.get(key) return final_result - async def fetch_aggregates( + async def _fetch_aggregates( self, address: str, keys: Optional[Iterable[str]] = None ) -> Dict[str, Dict]: keys_str = ",".join(keys) if keys else "" @@ -184,6 +185,32 @@ async def fetch_aggregates( data = result.get("data", dict()) return data + async def get_aggregate(self, address: str, key: str) -> Optional[Dict[str, Dict]]: + try: + return await self.fetch_aggregate(address=address, key=key) + except ClientResponseError as e: + if e.status == 404: + return None + raise + + async def get_aggregates( + self, address: str, keys: Optional[Iterable[str]] = None + ) -> Optional[Dict[str, Dict]]: + try: + return await self.fetch_aggregates(address=address, keys=keys) + except ClientResponseError as e: + if e.status == 404: + return None + raise + + async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Dict]: + return await self._fetch_aggregate(address=address, key=key) + + async def fetch_aggregates( + self, address: str, keys: Optional[Iterable[str]] = None + ) -> Dict[str, Dict]: + return await self._fetch_aggregates(address=address, keys=keys) + async def get_posts( self, page_size: int = 200, diff --git a/src/aleph/sdk/client/services/base.py b/src/aleph/sdk/client/services/base.py index 7459d7f6..77a9cc0b 100644 --- a/src/aleph/sdk/client/services/base.py +++ b/src/aleph/sdk/client/services/base.py @@ -29,7 +29,7 @@ def __init__(self, client: "AlephHttpClient"): async def get_config(self, address: str): - aggregate_data = await self._client.fetch_aggregate( + aggregate_data = await self._client.get_aggregate( address=address, key=self.aggregate_key ) diff --git a/tests/unit/services/test_base_service.py b/tests/unit/services/test_base_service.py index 6c07dd50..2c1304ec 100644 --- a/tests/unit/services/test_base_service.py +++ b/tests/unit/services/test_base_service.py @@ -21,7 +21,7 @@ class DummyService(BaseService[DummyModel]): async def test_get_config_with_data(): mock_client = AsyncMock() mock_data = {"foo": "hello", "bar": 123} - mock_client.fetch_aggregate.return_value = mock_data + mock_client.get_aggregate.return_value = mock_data service = DummyService(mock_client) @@ -37,7 +37,7 @@ async def test_get_config_with_data(): @pytest.mark.asyncio async def test_get_config_with_no_data(): mock_client = AsyncMock() - mock_client.fetch_aggregate.return_value = None + mock_client.get_aggregate.return_value = None service = DummyService(mock_client) result = await service.get_config("0xSOME_ADDRESS") diff --git a/tests/unit/services/test_settings.py b/tests/unit/services/test_settings.py index 5b2efebb..9ac09d9e 100644 --- a/tests/unit/services/test_settings.py +++ b/tests/unit/services/test_settings.py @@ -170,7 +170,7 @@ async def test_get_settings_aggregate( # Properly mock the fetch_aggregate method using monkeypatch client._http_session = MagicMock() monkeypatch = AsyncMock(return_value=mock_settings_aggregate_response) - setattr(client, "fetch_aggregate", monkeypatch) + setattr(client, "get_aggregate", monkeypatch) settings_service = Settings(client) result = await settings_service.get_settings_aggregate() diff --git a/tests/unit/test_asynchronous_get.py b/tests/unit/test_asynchronous_get.py index 7cfb38f3..674becf7 100644 --- a/tests/unit/test_asynchronous_get.py +++ b/tests/unit/test_asynchronous_get.py @@ -23,6 +23,20 @@ async def test_fetch_aggregate(): assert response.keys() == {"nodes", "resource_nodes"} +@pytest.mark.asyncio +async def test_get_aggregate(): + mock_session = make_mock_get_session( + {"data": {"corechannel": {"nodes": [], "resource_nodes": []}}} + ) + async with mock_session: + response = await mock_session.get_aggregate( + address="0xa1B3bb7d2332383D96b7796B908fB7f7F3c2Be10", + key="corechannel", + ) + assert response is not None + assert response.keys() == {"nodes", "resource_nodes"} + + @pytest.mark.asyncio async def test_fetch_aggregates(): mock_session = make_mock_get_session( @@ -37,6 +51,21 @@ async def test_fetch_aggregates(): assert response["corechannel"].keys() == {"nodes", "resource_nodes"} +@pytest.mark.asyncio +async def test_get_aggregates(): + mock_session = make_mock_get_session( + {"data": {"corechannel": {"nodes": [], "resource_nodes": []}}} + ) + + async with mock_session: + response = await mock_session.get_aggregates( + address="0xa1B3bb7d2332383D96b7796B908fB7f7F3c2Be10" + ) + assert response is not None + assert response.keys() == {"corechannel"} + assert response["corechannel"].keys() == {"nodes", "resource_nodes"} + + @pytest.mark.asyncio async def test_get_posts(raw_posts_response): mock_session = make_mock_get_session(raw_posts_response(1)) From 7373c433bb7b1565dfd8bd91879c42bdb01392d0 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Wed, 14 Jan 2026 17:46:58 +0100 Subject: [PATCH 116/122] Feature: filter crn can with based on vm resources (#268) This pull request enhances the filtering capabilities of the CRN (Compute Resource Node) service by allowing users to filter nodes based on specific VM resource requirements (CPU, memory, disk). To support this, new data models for system usage and VM resources have been introduced, and the filtering logic has been updated accordingly. ### Filtering and API Enhancements * Added a new `VmResources` model to specify VM requirements (vCPUs, memory in MiB, disk in MiB), and updated the `filter_crn` method to accept this as an optional parameter. The method now filters nodes based on available resources, ensuring that only nodes meeting the specified criteria are returned. ### Data Model Additions * Introduced several new models (`CpuLoad`, `CoreFrequencies`, `CpuInfo`, `MemoryInfo`, `DiskInfo`, `UsagePeriod`, `SystemUsage`) to represent detailed system usage statistics for CRNs. These are now included in the `CRN` model as an optional `system_usage` field, enabling resource-based filtering. --- src/aleph/sdk/client/services/crn.py | 132 ++++++++++++++++++++++----- src/aleph/sdk/types.py | 7 ++ 2 files changed, 116 insertions(+), 23 deletions(-) diff --git a/src/aleph/sdk/client/services/crn.py b/src/aleph/sdk/client/services/crn.py index bca54176..19477cb4 100644 --- a/src/aleph/sdk/client/services/crn.py +++ b/src/aleph/sdk/client/services/crn.py @@ -1,9 +1,10 @@ +from datetime import datetime from typing import TYPE_CHECKING, Dict, List, Optional, Union import aiohttp from aiohttp.client_exceptions import ClientResponseError from aleph_message.models import ItemHash -from pydantic import BaseModel +from pydantic import BaseModel, NonNegativeInt, PositiveInt from aleph.sdk.conf import settings from aleph.sdk.exceptions import MethodNotAvailableOnCRN, VmNotFoundOnHost @@ -13,6 +14,7 @@ CrnV1List, CrnV2List, DictLikeModel, + VmResources, ) from aleph.sdk.utils import extract_valid_eth_address, sanitize_url @@ -20,15 +22,73 @@ from aleph.sdk.client.http import AlephHttpClient +class CpuLoad(BaseModel): + load1: float + load5: float + load15: float + + +class CoreFrequencies(BaseModel): + min: float + max: float + + +class CpuInfo(BaseModel): + count: PositiveInt + load_average: CpuLoad + core_frequencies: CoreFrequencies + + +class CpuProperties(BaseModel): + architecture: str + vendor: str + features: List[str] = [] + + +class MemoryInfo(BaseModel): + total_kB: PositiveInt + available_kB: NonNegativeInt + + +class DiskInfo(BaseModel): + total_kB: PositiveInt + available_kB: NonNegativeInt + + +class UsagePeriod(BaseModel): + start_timestamp: datetime + duration_seconds: NonNegativeInt + + +class Properties(BaseModel): + cpu: CpuProperties + + class GPU(BaseModel): vendor: str model: str device_name: str device_class: str pci_host: str + device_id: str compatible: bool +class GpuUsages(BaseModel): + devices: List[GPU] = [] + available_devices: List[GPU] = [] + + +class SystemUsage(BaseModel): + cpu: CpuInfo + mem: MemoryInfo + disk: DiskInfo + period: UsagePeriod + properties: Properties + gpu: GpuUsages + active: bool + + class NetworkGPUS(BaseModel): total_gpu_count: int available_gpu_count: int @@ -47,6 +107,7 @@ class CRN(DictLikeModel): gpu_support: Optional[bool] = False confidential_support: Optional[bool] = False qemu_support: Optional[bool] = False + system_usage: Optional[SystemUsage] = None version: Optional[str] = "0.0.0" payment_receiver_address: Optional[str] # Can be None if not configured @@ -71,20 +132,20 @@ def find_gpu_on_network(self): compatible_gpu: Dict[str, List[GPU]] = {} available_compatible_gpu: Dict[str, List[GPU]] = {} - for crn_ in self.crns: - if not crn_.gpu_support: + for crn in self.crns: + if not crn.gpu_support: continue # Extracts used GPU - compatible_gpu[crn_.address] = [] - for gpu in crn_.get("compatible_gpus", []): - compatible_gpu[crn_.address].append(GPU.model_validate(gpu)) + compatible_gpu[crn.address] = [] + for gpu in crn.get("compatible_gpus", []): + compatible_gpu[crn.address].append(GPU.model_validate(gpu)) gpu_count += 1 # Extracts available GPU - available_compatible_gpu[crn_.address] = [] - for gpu in crn_.get("compatible_available_gpus", []): - available_compatible_gpu[crn_.address].append(GPU.model_validate(gpu)) + available_compatible_gpu[crn.address] = [] + for gpu in crn.get("compatible_available_gpus", []): + available_compatible_gpu[crn.address].append(GPU.model_validate(gpu)) gpu_count += 1 available_gpu_count += 1 @@ -102,6 +163,7 @@ def filter_crn( stream_address: bool = False, confidential: bool = False, gpu: bool = False, + vm_resources: Optional[VmResources] = None, ) -> list[CRN]: """Filter compute resource node list, unfiltered by default. Args: @@ -110,51 +172,75 @@ def filter_crn( stream_address (bool): Filter invalid payment receiver address. confidential (bool): Filter by confidential computing support. gpu (bool): Filter by GPU support. + vm_resources (VmResources): Filter by VM need, vcpus, memory, disk. Returns: list[CRN]: List of compute resource nodes. (if no filter applied, return all) """ filtered_crn: list[CRN] = [] - for crn_ in self.crns: + for crn in self.crns: # Check crn version - if crn_version and (crn_.version or "0.0.0") < crn_version: + if crn_version and (crn.version or "0.0.0") < crn_version: continue # Filter with ipv6 check if ipv6: - ipv6_check = crn_.get("ipv6_check") + ipv6_check = crn.get("ipv6_check") + if not ipv6_check or not all(ipv6_check.values()): continue if stream_address and not extract_valid_eth_address( - crn_.payment_receiver_address or "" + crn.payment_receiver_address or "" ): continue # Confidential Filter - if confidential and not crn_.confidential_support: + if confidential and not crn.confidential_support: continue # Filter with GPU / Available GPU - available_gpu = crn_.get("compatible_available_gpus") - if gpu and (not crn_.gpu_support or not available_gpu): + available_gpu = crn.get("compatible_available_gpus") + if gpu and (not crn.gpu_support or not available_gpu): continue - filtered_crn.append(crn_) + # Filter VM resources + if vm_resources: + crn_usage = crn.system_usage + if not crn_usage: + continue + + # Check CPU count + if crn_usage.cpu.count < vm_resources.vcpus: + continue + + # Convert MiB to kB (1 MiB = 1024 kB) for proper comparison + memory_kb_required = vm_resources.memory * 1024 + disk_kb_required = vm_resources.disk_mib * 1024 + + # Check free memory + if crn_usage.mem.available_kB < memory_kb_required: + continue + + # Check free disk + if crn_usage.disk.available_kB < disk_kb_required: + continue + + filtered_crn.append(crn) return filtered_crn # Find CRN by address def find_crn_by_address(self, address: str) -> Optional[CRN]: - for crn_ in self.crns: - if crn_.address == sanitize_url(address): - return crn_ + for crn in self.crns: + if crn.address == sanitize_url(address): + return crn return None # Find CRN by hash def find_crn_by_hash(self, crn_hash: str) -> Optional[CRN]: - for crn_ in self.crns: - if crn_.hash == crn_hash: - return crn_ + for crn in self.crns: + if crn.hash == crn_hash: + return crn return None def find_crn( diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index 8d952b18..ed2524cb 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -19,6 +19,7 @@ BaseModel, ConfigDict, Field, + PositiveInt, RootModel, TypeAdapter, field_validator, @@ -399,3 +400,9 @@ class Voucher(BaseModel): image: str icon: str attributes: list[VoucherAttribute] + + +class VmResources(BaseModel): + vcpus: PositiveInt + memory: PositiveInt + disk_mib: PositiveInt From 34cd2f802043fda28da2aac12c63faa5d4139385 Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Wed, 14 Jan 2026 18:20:24 +0100 Subject: [PATCH 117/122] fix: pin substrate-interface to < 1.8.0 (#275) There appears to be a bug in the 1.8.0 version that was just pushed minutes ago by the Substrate team. This breaks our CI. Pinning to below 1.8 for now. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 24012413..46adc715 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,7 +81,7 @@ optional-dependencies.solana = [ ] optional-dependencies.substrate = [ "py-sr25519-bindings", - "substrate-interface", + "substrate-interface<1.8", ] optional-dependencies.tezos = [ "pytezos-crypto==3.13.4.1", From 04ef0b0935f1c5080269c1a5f87906c9b5bc3042 Mon Sep 17 00:00:00 2001 From: 1yam <40899431+1yam@users.noreply.github.com> Date: Wed, 14 Jan 2026 19:15:13 +0100 Subject: [PATCH 118/122] feature: new method in AlephHttpClient `get_store_estimated_price` (#271) --- src/aleph/sdk/client/http.py | 55 ++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/src/aleph/sdk/client/http.py b/src/aleph/sdk/client/http.py index 2d4505b3..6fa6d4ac 100644 --- a/src/aleph/sdk/client/http.py +++ b/src/aleph/sdk/client/http.py @@ -524,6 +524,61 @@ async def watch_messages( elif msg.type == aiohttp.WSMsgType.ERROR: break + async def get_store_estimated_price( + self, + storage_size_mib: int, + ) -> PriceResponse: + """ + Get the estimated price for a store operation. + + :param storage_size_mib: size in mib you want to store + :return: Price response with cost information + """ + content = { + "address": "0xWeDoNotNeedARealAddress", + "time": time.time(), + "item_type": ItemType.storage, + "estimated_size_mib": storage_size_mib, + "item_hash": compute_sha256("dummy_value"), + } + + item_content: str = json.dumps( + content, + separators=(",", ":"), + default=extended_json_encoder, + ) + + message_dict = dict( + sender=content["address"], + chain=Chain.ETH, + type=MessageType.store, + content=content, + item_content=item_content, + time=time.time(), + channel=settings.DEFAULT_CHANNEL, + item_type=ItemType.inline, + item_hash=compute_sha256(item_content), + signature="0x" + "0" * 130, # Add a dummy signature to pass validation + ) + + message = parse_message(message_dict) + + async with self.http_session.post( + "/api/v0/price/estimate", json=dict(message=message) + ) as resp: + try: + resp.raise_for_status() + response_json = await resp.json() + cost = response_json.get("cost", None) + + return PriceResponse( + cost=cost, + required_tokens=response_json["required_tokens"], + payment_type=response_json["payment_type"], + ) + except aiohttp.ClientResponseError as e: + raise e + async def get_estimated_price( self, content: ExecutableContent, From 50ba6ed2425bed498fe3899472bce9ea892aeb8e Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Wed, 14 Jan 2026 19:15:36 +0100 Subject: [PATCH 119/122] feature: list, create and revoke authorizations (#272) Added methods to `AlephClient` to interact with the authorization system of Aleph Cloud. This enables to delegate specific operations to addresses other than your own, whether it is to enable other people to perform specific actions on your behalf or simply avoid signing messages frequently from your main token-holding wallet. --- src/aleph/sdk/client/abstract.py | 58 ++- src/aleph/sdk/types.py | 71 ++- tests/unit/services/test_authorizations.py | 562 +++++++++++++++++++++ 3 files changed, 688 insertions(+), 3 deletions(-) create mode 100644 tests/unit/services/test_authorizations.py diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index fea2db3f..894717a2 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -38,7 +38,7 @@ from typing_extensions import deprecated from aleph.sdk.conf import settings -from aleph.sdk.types import Account +from aleph.sdk.types import Account, Authorization, SecurityAggregateContent from aleph.sdk.utils import extended_json_encoder from ..query.filters import MessageFilter, PostFilter @@ -295,6 +295,30 @@ def get_program_price( """ raise NotImplementedError("Did you mean to import `AlephHttpClient`?") + async def get_authorizations(self, address: str) -> list[Authorization]: + """ + Retrieves all authorizations for a specific address. + """ + # TODO: update this implementation to use `get_aggregate()` once + # https://github.com/aleph-im/aleph-sdk-python/pull/273 is merged. + # There's currently no way to detect a nonexistent aggregate in generic code just yet. + # fetch_aggregate() throws an implementation-specific ClientResponseError in case of 404. + import aiohttp + + try: + security_aggregate_dict = await self.fetch_aggregate( + address=address, key="security" + ) + except aiohttp.ClientResponseError as e: + if e.status == 404: + return [] + raise + + security_aggregate = SecurityAggregateContent.model_validate( + security_aggregate_dict + ) + return security_aggregate.authorizations + class AuthenticatedAlephClient(AlephClient): account: Account @@ -617,3 +641,35 @@ async def storage_push(self, content: Mapping) -> str: :param content: The dict-like content to upload """ raise NotImplementedError() + + async def update_all_authorizations(self, authorizations: list[Authorization]): + """ + Updates all authorizations for the current account. + Danger! This will replace all authorizations for the account. Use with care. + + :param authorizations: List of authorizations to set. These authorizations will replace the existing ones. + """ + security_aggregate = SecurityAggregateContent(authorizations=authorizations) + await self.create_aggregate( + key="security", content=security_aggregate.model_dump() + ) + + async def add_authorization(self, authorization: Authorization): + """ + Adds a specific authorization for the current account. + """ + authorizations = await self.get_authorizations(self.account.get_address()) + authorizations.append(authorization) + await self.update_all_authorizations(authorizations) + + async def revoke_all_authorizations(self, address: str): + """ + Revokes all authorizations for a specific address. + """ + authorizations = await self.get_authorizations(self.account.get_address()) + filtered_authorizations = [ + authorization + for authorization in authorizations + if authorization.address != address + ] + await self.update_all_authorizations(filtered_authorizations) diff --git a/src/aleph/sdk/types.py b/src/aleph/sdk/types.py index ed2524cb..1bbe66d1 100644 --- a/src/aleph/sdk/types.py +++ b/src/aleph/sdk/types.py @@ -14,7 +14,7 @@ Union, ) -from aleph_message.models import ItemHash +from aleph_message.models import ItemHash, MessageType from pydantic import ( BaseModel, ConfigDict, @@ -24,9 +24,11 @@ TypeAdapter, field_validator, ) -from typing_extensions import runtime_checkable +from typing_extensions import Self, runtime_checkable __all__ = ( + "Authorization", + "AuthorizationBuilder", "StorageEnum", "Account", "AccountFromPrivateKey", @@ -406,3 +408,68 @@ class VmResources(BaseModel): vcpus: PositiveInt memory: PositiveInt disk_mib: PositiveInt + + +class Authorization(BaseModel): + """A single authorization entry for delegated access.""" + + address: str + chain: Optional[Chain] = None + channels: list[str] = [] + types: list[MessageType] = [] + post_types: list[str] = [] + aggregate_keys: list[str] = [] + + +class AuthorizationBuilder: + def __init__(self, address: str): + self._address: str = address + self._chain: Optional[Chain] = None + self._channels: list[str] = [] + self._message_types: list[MessageType] = [] + self._post_types: list[str] = [] + self._aggregate_keys: list[str] = [] + + def chain(self, chain: Chain) -> Self: + self._chain = chain + return self + + def channel(self, channel: str) -> Self: + self._channels.append(channel) + return self + + def message_type(self, message_type: MessageType) -> Self: + self._message_types.append(message_type) + return self + + def post_type(self, post_type: str) -> Self: + if MessageType.post not in self._message_types: + raise ValueError( + "Cannot set post_type without allowing POST message type first" + ) + self._post_types.append(post_type) + return self + + def aggregate_key(self, aggregate_key: str) -> Self: + if MessageType.aggregate not in self._message_types: + raise ValueError( + "Cannot set post_type without allowing AGGREGATE message type first" + ) + self._aggregate_keys.append(aggregate_key) + return self + + def build(self) -> Authorization: + return Authorization( + address=self._address, + chain=self._chain, + channels=self._channels, + types=self._message_types, + post_types=self._post_types, + aggregate_keys=self._aggregate_keys, + ) + + +class SecurityAggregateContent(BaseModel): + """Content schema for the 'security' aggregate.""" + + authorizations: list[Authorization] = [] diff --git a/tests/unit/services/test_authorizations.py b/tests/unit/services/test_authorizations.py new file mode 100644 index 00000000..7ab2b7ee --- /dev/null +++ b/tests/unit/services/test_authorizations.py @@ -0,0 +1,562 @@ +""" +Tests for authorization methods in AlephClient. +""" + +from typing import Any, Dict, Iterable, Optional, Tuple + +import pytest +from aleph_message.models import AggregateMessage, Chain, MessageType +from aleph_message.status import MessageStatus + +from aleph.sdk.client.abstract import AuthenticatedAlephClient +from aleph.sdk.types import ( + Account, + Authorization, + AuthorizationBuilder, + SecurityAggregateContent, +) + + +class FakeAccount: + """Minimal fake account for testing.""" + + CHAIN = "ETH" + CURVE = "secp256k1" + + def __init__(self, address: str = "0xTestAddress1234567890123456789012345678"): + self._address = address + + async def sign_message(self, message: Dict) -> Dict: + message["signature"] = "0x" + "ab" * 65 + return message + + async def sign_raw(self, buffer: bytes) -> bytes: + return b"fake_signature" + + def get_address(self) -> str: + return self._address + + def get_public_key(self) -> str: + return "0x" + "cd" * 33 + + +class MockAlephClient(AuthenticatedAlephClient): + """ + A fake authenticated client that maintains an in-memory aggregate store. + Aggregates are dictionaries that get merged/updated with each create_aggregate call. + """ + + def __init__(self, account: Optional[Account] = None): + self.account = account or FakeAccount() + # Storage: {address: {key: content}} + self._aggregates: Dict[str, Dict[str, Any]] = {} + + async def fetch_aggregate(self, address: str, key: str) -> Dict[str, Any]: + """Fetch a single aggregate by address and key.""" + if address not in self._aggregates: + return {"authorizations": []} + return self._aggregates[address].get(key, {"authorizations": []}) + + async def fetch_aggregates( + self, address: str, keys: Optional[Iterable[str]] = None + ) -> Dict[str, Dict]: + """Fetch multiple aggregates.""" + if address not in self._aggregates: + return {} + if keys is None: + return self._aggregates[address] + return {k: v for k, v in self._aggregates[address].items() if k in keys} + + async def create_aggregate( + self, + key: str, + content: Dict[str, Any], + address: Optional[str] = None, + channel: Optional[str] = None, + inline: bool = True, + sync: bool = False, + ) -> Tuple[AggregateMessage, MessageStatus]: + """ + Create/update an aggregate. Merges content into existing aggregate. + """ + address = address or self.account.get_address() + + if address not in self._aggregates: + self._aggregates[address] = {} + + # Aggregates merge content (like a dict update) + if key in self._aggregates[address]: + self._aggregates[address][key].update(content) + else: + self._aggregates[address][key] = content + + # Return a minimal mock message + mock_message = AggregateMessage.model_validate( + { + "item_hash": "44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a", + "type": "AGGREGATE", + "chain": "ETH", + "sender": address, + "signature": "0x" + "ab" * 65, + "item_type": "inline", + "item_content": "{}", + "content": { + "key": key, + "address": address, + "content": content, + "time": 0, + }, + "time": 0, + "channel": channel or "TEST", + } + ) + return mock_message, MessageStatus.PROCESSED + + # Stub implementations for abstract methods we don't need + async def create_post(self, *args, **kwargs): + raise NotImplementedError + + async def create_store(self, *args, **kwargs): + raise NotImplementedError + + async def create_program(self, *args, **kwargs): + raise NotImplementedError + + async def create_instance(self, *args, **kwargs): + raise NotImplementedError + + async def forget(self, *args, **kwargs): + raise NotImplementedError + + async def submit(self, *args, **kwargs): + raise NotImplementedError + + async def get_posts(self, *args, **kwargs): + raise NotImplementedError + + async def download_file(self, *args, **kwargs): + raise NotImplementedError + + async def download_file_to_path(self, *args, **kwargs): + raise NotImplementedError + + async def get_messages(self, *args, **kwargs): + raise NotImplementedError + + async def get_message(self, *args, **kwargs): + raise NotImplementedError + + def watch_messages(self, *args, **kwargs): + raise NotImplementedError + + def get_estimated_price(self, *args, **kwargs): + raise NotImplementedError + + def get_program_price(self, *args, **kwargs): + raise NotImplementedError + + +# Fixtures +@pytest.fixture +def mock_client() -> MockAlephClient: + """Create a fresh fake client for each test.""" + return MockAlephClient() + + +@pytest.fixture +def mock_client_with_existing_auth() -> MockAlephClient: + """Create a fake client with pre-existing authorizations.""" + client = MockAlephClient() + client._aggregates[client.account.get_address()] = { + "security": { + "authorizations": [ + { + "address": "0xExistingAddress123456789012345678901234", + "chain": "ETH", + "channels": ["existing_channel"], + "types": ["POST"], + "post_types": [], + "aggregate_keys": [], + } + ] + } + } + return client + + +# Tests for get_authorizations +class TestGetAuthorizations: + @pytest.mark.asyncio + async def test_get_authorizations_empty(self, mock_client: MockAlephClient): + """When no authorizations exist, returns empty list.""" + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert authorizations == [] + + @pytest.mark.asyncio + async def test_get_authorizations_returns_existing( + self, mock_client_with_existing_auth: MockAlephClient + ): + """Returns existing authorizations from aggregate store.""" + authorizations = await mock_client_with_existing_auth.get_authorizations( + mock_client_with_existing_auth.account.get_address() + ) + + assert len(authorizations) == 1 + assert authorizations[0].address == "0xExistingAddress123456789012345678901234" + assert authorizations[0].chain == Chain.ETH + assert authorizations[0].channels == ["existing_channel"] + + +# Tests for update_all_authorizations +class TestUpdateAllAuthorizations: + @pytest.mark.asyncio + async def test_update_replaces_all_authorizations( + self, mock_client: MockAlephClient + ): + """update_all_authorizations replaces the entire authorization list.""" + auth1 = Authorization(address="0xAddress1111111111111111111111111111111111") + auth2 = Authorization(address="0xAddress2222222222222222222222222222222222") + + await mock_client.update_all_authorizations([auth1, auth2]) + + # Verify stored content + stored = mock_client._aggregates[mock_client.account.get_address()]["security"] + assert len(stored["authorizations"]) == 2 + + @pytest.mark.asyncio + async def test_update_with_empty_list_clears_authorizations( + self, mock_client_with_existing_auth: MockAlephClient + ): + """Passing an empty list removes all authorizations.""" + await mock_client_with_existing_auth.update_all_authorizations([]) + + authorizations = await mock_client_with_existing_auth.get_authorizations( + mock_client_with_existing_auth.account.get_address() + ) + assert authorizations == [] + + @pytest.mark.asyncio + async def test_update_preserves_authorization_fields( + self, mock_client: MockAlephClient + ): + """All authorization fields are preserved when storing.""" + auth = Authorization( + address="0xFullAuth111111111111111111111111111111111", + chain=Chain.ETH, + channels=["channel1", "channel2"], + types=[MessageType.post, MessageType.aggregate], + post_types=["blog", "comment"], + aggregate_keys=["settings"], + ) + + await mock_client.update_all_authorizations([auth]) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 1 + retrieved = authorizations[0] + assert retrieved.address == auth.address + assert retrieved.chain == Chain.ETH + assert retrieved.channels == ["channel1", "channel2"] + assert MessageType.post in retrieved.types + assert "blog" in retrieved.post_types + + +# Tests for add_authorization +class TestAddAuthorization: + @pytest.mark.asyncio + async def test_add_to_empty(self, mock_client: MockAlephClient): + """Adding authorization when none exist.""" + auth = Authorization(address="0xNewAddress1111111111111111111111111111111") + + await mock_client.add_authorization(auth) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 1 + assert ( + authorizations[0].address == "0xNewAddress1111111111111111111111111111111" + ) + + @pytest.mark.asyncio + async def test_add_appends_to_existing( + self, mock_client_with_existing_auth: MockAlephClient + ): + """Adding authorization appends to existing list.""" + new_auth = Authorization( + address="0xNewAddress2222222222222222222222222222222", + channels=["new_channel"], + ) + + await mock_client_with_existing_auth.add_authorization(new_auth) + + authorizations = await mock_client_with_existing_auth.get_authorizations( + mock_client_with_existing_auth.account.get_address() + ) + assert len(authorizations) == 2 + addresses = [a.address for a in authorizations] + assert "0xExistingAddress123456789012345678901234" in addresses + assert "0xNewAddress2222222222222222222222222222222" in addresses + + @pytest.mark.asyncio + async def test_add_multiple_authorizations_sequentially( + self, mock_client: MockAlephClient + ): + """Adding multiple authorizations one by one.""" + auth1 = Authorization(address="0xFirst11111111111111111111111111111111111") + auth2 = Authorization(address="0xSecond2222222222222222222222222222222222") + auth3 = Authorization(address="0xThird33333333333333333333333333333333333") + + await mock_client.add_authorization(auth1) + await mock_client.add_authorization(auth2) + await mock_client.add_authorization(auth3) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 3 + + +# Tests for revoke_all_authorizations +class TestRevokeAllAuthorizations: + @pytest.mark.asyncio + async def test_revoke_removes_matching_address( + self, mock_client_with_existing_auth: MockAlephClient + ): + """Revoking removes all authorizations for the specified address.""" + await mock_client_with_existing_auth.revoke_all_authorizations( + "0xExistingAddress123456789012345678901234" + ) + + authorizations = await mock_client_with_existing_auth.get_authorizations( + mock_client_with_existing_auth.account.get_address() + ) + assert len(authorizations) == 0 + + @pytest.mark.asyncio + async def test_revoke_keeps_other_addresses(self, mock_client: MockAlephClient): + """Revoking only removes authorizations for the specified address.""" + auth1 = Authorization(address="0xToRevoke111111111111111111111111111111111") + auth2 = Authorization(address="0xToKeep22222222222222222222222222222222222") + auth3 = Authorization( + address="0xToRevoke111111111111111111111111111111111" + ) # Duplicate + + await mock_client.update_all_authorizations([auth1, auth2, auth3]) + + await mock_client.revoke_all_authorizations( + "0xToRevoke111111111111111111111111111111111" + ) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 1 + assert ( + authorizations[0].address == "0xToKeep22222222222222222222222222222222222" + ) + + @pytest.mark.asyncio + async def test_revoke_nonexistent_address_is_noop( + self, mock_client: MockAlephClient + ): + """Revoking an address that doesn't exist does nothing.""" + auth = Authorization(address="0xExisting1111111111111111111111111111111111") + await mock_client.add_authorization(auth) + + await mock_client.revoke_all_authorizations( + "0xNonExistent22222222222222222222222222222" + ) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 1 + + @pytest.mark.asyncio + async def test_revoke_from_empty_is_noop(self, mock_client: MockAlephClient): + """Revoking when no authorizations exist doesn't error.""" + await mock_client.revoke_all_authorizations( + "0xAnyAddress111111111111111111111111111111111" + ) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert authorizations == [] + + +# Integration tests - full workflows +class TestAuthorizationWorkflows: + @pytest.mark.asyncio + async def test_full_lifecycle(self, mock_client: MockAlephClient): + """Test complete authorization lifecycle: add, verify, revoke.""" + delegate_address = "0xDelegate111111111111111111111111111111111" + + # Initially empty + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 0 + + # Add authorization + auth = Authorization( + address=delegate_address, + channels=["MY_APP"], + types=[MessageType.post], + ) + await mock_client.add_authorization(auth) + + # Verify it exists + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 1 + assert authorizations[0].address == delegate_address + assert "MY_APP" in authorizations[0].channels + + # Revoke + await mock_client.revoke_all_authorizations(delegate_address) + + # Verify it's gone + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 0 + + @pytest.mark.asyncio + async def test_multiple_delegates_workflow(self, mock_client: MockAlephClient): + """Test managing authorizations for multiple delegate addresses.""" + delegate1 = "0xDelegate1111111111111111111111111111111111" + delegate2 = "0xDelegate2222222222222222222222222222222222" + + # Add two delegates + await mock_client.add_authorization( + Authorization(address=delegate1, channels=["channel_a"]) + ) + await mock_client.add_authorization( + Authorization(address=delegate2, channels=["channel_b"]) + ) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 2 + + # Revoke first delegate + await mock_client.revoke_all_authorizations(delegate1) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 1 + assert authorizations[0].address == delegate2 + + @pytest.mark.asyncio + async def test_replace_all_authorizations(self, mock_client: MockAlephClient): + """Test replacing all authorizations at once.""" + # Add initial authorizations + await mock_client.add_authorization( + Authorization(address="0xOld111111111111111111111111111111111111111") + ) + await mock_client.add_authorization( + Authorization(address="0xOld222222222222222222222222222222222222222") + ) + + # Replace with new set + new_auths = [ + Authorization(address="0xNew111111111111111111111111111111111111111"), + Authorization(address="0xNew222222222222222222222222222222222222222"), + Authorization(address="0xNew333333333333333333333333333333333333333"), + ] + await mock_client.update_all_authorizations(new_auths) + + authorizations = await mock_client.get_authorizations( + mock_client.account.get_address() + ) + assert len(authorizations) == 3 + addresses = {a.address for a in authorizations} + assert "0xOld111111111111111111111111111111111111111" not in addresses + assert "0xNew111111111111111111111111111111111111111" in addresses + + +# Model tests +class TestAuthorizationModel: + def test_minimal_authorization(self): + """Authorization can be created with just an address.""" + auth = Authorization(address="0x1234567890123456789012345678901234567890") + assert auth.address == "0x1234567890123456789012345678901234567890" + assert auth.chain is None + assert auth.channels == [] + assert auth.types == [] + + def test_full_authorization(self): + """Authorization with all fields set.""" + auth = Authorization( + address="0x1234567890123456789012345678901234567890", + chain=Chain.ETH, + channels=["ch1", "ch2"], + types=[MessageType.post, MessageType.store], + post_types=["blog"], + aggregate_keys=["settings"], + ) + assert auth.chain == Chain.ETH + assert len(auth.channels) == 2 + assert len(auth.types) == 2 + + def test_security_aggregate_serialization(self): + """SecurityAggregateContent serializes correctly.""" + auth = Authorization( + address="0x1234567890123456789012345678901234567890", + channels=["test"], + ) + content = SecurityAggregateContent(authorizations=[auth]) + dumped = content.model_dump() + + assert "authorizations" in dumped + assert len(dumped["authorizations"]) == 1 + assert dumped["authorizations"][0]["address"] == auth.address + + +class TestAuthorizationBuilder: + def test_authorization_builder_only_address(self): + """Test the AuthorizationBuilder.""" + auth = AuthorizationBuilder( + address="0x1234567890123456789012345678901234567890" + ).build() + assert auth.address == "0x1234567890123456789012345678901234567890" + assert auth.chain is None + assert auth.channels == [] + assert auth.types == [] + assert auth.post_types == [] + assert auth.aggregate_keys == [] + + def test_authorization_builder(self): + """Test the AuthorizationBuilder with a detailed configuration.""" + sample_authorization = Authorization( + address="0xFullAuth111111111111111111111111111111111", + chain=Chain.ETH, + channels=["channel1", "channel2"], + types=[MessageType.post, MessageType.aggregate], + post_types=["blog", "comment"], + aggregate_keys=["settings"], + ) + + auth = AuthorizationBuilder(address=sample_authorization.address).chain( + sample_authorization.chain + ) + for channel in sample_authorization.channels: + auth = auth.channel(channel) + for message_type in sample_authorization.types: + auth = auth.message_type(message_type) + for post_type in sample_authorization.post_types: + auth = auth.post_type(post_type) + for aggregate_key in sample_authorization.aggregate_keys: + auth = auth.aggregate_key(aggregate_key) + auth = auth.build() + + assert auth == sample_authorization From 2c115f31356a698d7bfe6e2165188d7b5623664a Mon Sep 17 00:00:00 2001 From: Angel Date: Tue, 17 Feb 2026 10:23:33 +0100 Subject: [PATCH 120/122] feat: support credit payment in store messages (#278) --- pyproject.toml | 3 +- src/aleph/sdk/client/abstract.py | 2 + src/aleph/sdk/client/authenticated_http.py | 11 +++- tests/unit/test_asynchronous.py | 69 ++++++++++++++++++++++ 4 files changed, 83 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 46adc715..2bf21974 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dynamic = [ "version" ] dependencies = [ "aiohttp>=3.8.3", "aioresponses>=0.7.6", - "aleph-message>=1.0.5", + "aleph-message>=1.1", "aleph-superfluid>=0.3", "base58==2.1.1", # Needed now as default with _load_account changement "coincurve; python_version>='3.9'", @@ -115,6 +115,7 @@ include = [ python = [ "3.9", "3.10", "3.11" ] [tool.hatch.envs.testing] +python = "3.13" features = [ "cosmos", "dns", diff --git a/src/aleph/sdk/client/abstract.py b/src/aleph/sdk/client/abstract.py index 894717a2..3daa198e 100644 --- a/src/aleph/sdk/client/abstract.py +++ b/src/aleph/sdk/client/abstract.py @@ -388,6 +388,7 @@ async def create_store( extra_fields: Optional[dict] = None, channel: Optional[str] = settings.DEFAULT_CHANNEL, sync: bool = False, + payment: Optional[Payment] = None, ) -> Tuple[AlephMessage, MessageStatus]: """ Create a STORE message to store a file on the aleph.im network. @@ -404,6 +405,7 @@ async def create_store( :param extra_fields: Extra fields to add to the STORE message (Default: None) :param channel: Channel to post the message to (Default: "TEST") :param sync: If true, waits for the message to be processed by the API server (Default: False) + :param payment: Payment method used to pay for storage (Default: hold on ETH) """ raise NotImplementedError( "Did you mean to import `AuthenticatedAlephHttpClient`?" diff --git a/src/aleph/sdk/client/authenticated_http.py b/src/aleph/sdk/client/authenticated_http.py index 4528a5b7..11aa08f0 100644 --- a/src/aleph/sdk/client/authenticated_http.py +++ b/src/aleph/sdk/client/authenticated_http.py @@ -12,6 +12,7 @@ AggregateContent, AggregateMessage, AlephMessage, + Chain, ForgetContent, ForgetMessage, InstanceMessage, @@ -24,7 +25,7 @@ StoreContent, StoreMessage, ) -from aleph_message.models.execution.base import Encoding, Payment +from aleph_message.models.execution.base import Encoding, Payment, PaymentType from aleph_message.models.execution.environment import ( HostRequirements, HypervisorType, @@ -350,8 +351,12 @@ async def create_store( extra_fields: Optional[dict] = None, channel: Optional[str] = settings.DEFAULT_CHANNEL, sync: bool = False, + payment: Optional[Payment] = None, ) -> Tuple[StoreMessage, MessageStatus]: address = address or settings.ADDRESS_TO_USE or self.account.get_address() + payment = payment or Payment( + chain=Chain.ETH, type=PaymentType.hold, receiver=None + ) extra_fields = extra_fields or {} @@ -374,6 +379,7 @@ async def create_store( extra_fields=extra_fields, channel=channel, sync=sync, + payment=payment, ) elif storage_engine == StorageEnum.ipfs: # We do not support authenticated upload for IPFS yet. Use the legacy method @@ -397,6 +403,7 @@ async def create_store( "item_type": storage_engine, "item_hash": file_hash, "time": time.time(), + "payment": payment, } if extra_fields is not None: values.update(extra_fields) @@ -660,6 +667,7 @@ async def _upload_file_native( extra_fields: Optional[dict] = None, channel: Optional[str] = settings.DEFAULT_CHANNEL, sync: bool = False, + payment: Optional[Payment] = None, ) -> Tuple[StoreMessage, MessageStatus]: file_hash = hashlib.sha256(file_content).hexdigest() if magic and guess_mime_type: @@ -674,6 +682,7 @@ async def _upload_file_native( item_hash=ItemHash(file_hash), mime_type=mime_type, # type: ignore time=time.time(), + payment=payment, **(extra_fields or {}), ) message, _ = await self._storage_push_file_with_message( diff --git a/tests/unit/test_asynchronous.py b/tests/unit/test_asynchronous.py index e2647590..1221a9b0 100644 --- a/tests/unit/test_asynchronous.py +++ b/tests/unit/test_asynchronous.py @@ -293,3 +293,72 @@ async def test_create_instance_insufficient_funds_error( receiver=None, ), ) + + +@pytest.mark.asyncio +async def test_create_instance_with_credit_payment(mock_session_with_post_success): + """Test that an instance can be created with credit payment.""" + async with mock_session_with_post_success as session: + instance_message, message_status = await session.create_instance( + rootfs="cafecafecafecafecafecafecafecafecafecafecafecafecafecafecafecafe", + rootfs_size=1, + channel="TEST", + metadata={"tags": ["test"]}, + payment=Payment( + chain=Chain.ETH, + receiver=None, + type=PaymentType.credit, + ), + ) + + assert instance_message.content.payment.type == PaymentType.credit + assert instance_message.content.payment.chain == Chain.ETH + assert instance_message.content.payment.receiver is None + + assert mock_session_with_post_success.http_session.post.assert_called_once + assert isinstance(instance_message, InstanceMessage) + + +@pytest.mark.asyncio +async def test_create_store_with_credit_payment(mock_session_with_post_success): + """Test that a store message can be created with credit payment.""" + mock_ipfs_push_file = AsyncMock() + mock_ipfs_push_file.return_value = "QmRTV3h1jLcACW4FRfdisokkQAk4E4qDhUzGpgdrd4JAFy" + + mock_session_with_post_success.ipfs_push_file = mock_ipfs_push_file + + async with mock_session_with_post_success as session: + store_message, message_status = await session.create_store( + file_content=b"HELLO", + channel="TEST", + storage_engine=StorageEnum.ipfs, + payment=Payment( + chain=Chain.ETH, + receiver=None, + type=PaymentType.credit, + ), + ) + + assert store_message.content.payment.type == PaymentType.credit + assert store_message.content.payment.chain == Chain.ETH + assert isinstance(store_message, StoreMessage) + + +@pytest.mark.asyncio +async def test_create_store_default_payment(mock_session_with_post_success): + """Test that a store message defaults to hold payment on ETH.""" + mock_ipfs_push_file = AsyncMock() + mock_ipfs_push_file.return_value = "QmRTV3h1jLcACW4FRfdisokkQAk4E4qDhUzGpgdrd4JAFy" + + mock_session_with_post_success.ipfs_push_file = mock_ipfs_push_file + + async with mock_session_with_post_success as session: + store_message, message_status = await session.create_store( + file_content=b"HELLO", + channel="TEST", + storage_engine=StorageEnum.ipfs, + ) + + assert store_message.content.payment.type == PaymentType.hold + assert store_message.content.payment.chain == Chain.ETH + assert isinstance(store_message, StoreMessage) From 0c31a48621274456f3e3abeef48c745e4cb904a4 Mon Sep 17 00:00:00 2001 From: "Alie.E" Date: Tue, 17 Feb 2026 12:46:30 +0100 Subject: [PATCH 121/122] add dns ALIAS record support (#277) * Add ALIAS record support for custom domain * add tldextract for root domain detection that support multilevel TLDs --- pyproject.toml | 1 + src/aleph/sdk/domain.py | 53 +++++++++++++++++++++++++++++++---------- 2 files changed, 42 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2bf21974..ad051b58 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -56,6 +56,7 @@ optional-dependencies.cosmos = [ ] optional-dependencies.dns = [ "aiodns", + "tldextract", ] optional-dependencies.docs = [ "sphinxcontrib-plantuml", diff --git a/src/aleph/sdk/domain.py b/src/aleph/sdk/domain.py index 79a0c5d4..70a53b08 100644 --- a/src/aleph/sdk/domain.py +++ b/src/aleph/sdk/domain.py @@ -5,6 +5,7 @@ from urllib.parse import urlparse import aiodns +import tldextract from pydantic import BaseModel, HttpUrl from .conf import settings @@ -198,6 +199,13 @@ async def check_domain( record_type = dns_rule.dns["type"] record_value = dns_rule.dns["value"] + if record_type == "alias": + # ALIAS records cannot be reliably validated via DNS since the + # provider resolves them to A records asynchronously. Consider + # the rule as valid and trust the user's configuration. + status[dns_rule.name] = True + continue + try: entries = await resolver.query(record_name, record_type.upper()) except aiodns.error.DNSError: @@ -249,19 +257,35 @@ def get_required_dns_rules( elif target == TargetType.INSTANCE: cname_value = f"{hostname}.{settings.DNS_INSTANCE_DOMAIN}" - # cname rule - dns_rules.append( - DNSRule( - name="cname", - dns={ - "type": "cname", - "name": hostname, - "value": cname_value, - }, - info=f"Create a CNAME record for {hostname} with value {cname_value}", - on_error=f"CNAME record not found: {hostname}", + # cname or alias rule + if self.is_root_domain(hostname): + record_type = "alias" + dns_rules.append( + DNSRule( + name=record_type, + dns={ + "type": record_type, + "name": hostname, + "value": cname_value, + }, + info=f"Create an ALIAS record for {hostname} with value {cname_value}", + on_error=f"ALIAS record not found: {hostname}", + ) + ) + else: + record_type = "cname" + dns_rules.append( + DNSRule( + name=record_type, + dns={ + "type": record_type, + "name": hostname, + "value": cname_value, + }, + info=f"Create a CNAME record for {hostname} with value {cname_value}", + on_error=f"CNAME record not found: {hostname}", + ) ) - ) if target == TargetType.IPFS: # ipfs rule @@ -294,3 +318,8 @@ def get_required_dns_rules( ) return dns_rules + + @staticmethod + def is_root_domain(hostname: Hostname) -> bool: + extracted = tldextract.extract(hostname) + return bool(extracted.domain) and not extracted.subdomain From de022f60dbd230c6ed6877eb6d3df9dd31122fae Mon Sep 17 00:00:00 2001 From: Olivier Desenfans Date: Mon, 23 Feb 2026 11:20:19 +0100 Subject: [PATCH 122/122] internal: release pipeline (#280) --- .github/workflows/publish.yml | 48 +++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 .github/workflows/publish.yml diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..5bbaeb4a --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,48 @@ +--- +name: Publish to PyPI + +on: + push: + tags: + - "*" + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.12" + + - name: Install build dependencies + run: pip install hatch hatch-vcs + + - name: Build package + run: hatch build + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: dist + path: dist/ + + publish: + needs: build + runs-on: ubuntu-latest + environment: pypi + permissions: + id-token: write + steps: + - name: Download artifacts + uses: actions/download-artifact@v4 + with: + name: dist + path: dist/ + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1