diff --git a/aptos_sdk/package_publisher.py b/aptos_sdk/package_publisher.py index ac7fcad..a92de63 100644 --- a/aptos_sdk/package_publisher.py +++ b/aptos_sdk/package_publisher.py @@ -3,22 +3,23 @@ import os from enum import Enum -from typing import List, Optional +from typing import List, Optional, Tuple import tomli from .account import Account from .account_address import AccountAddress +from .aptos_cli_wrapper import AptosCLIWrapper from .async_client import RestClient from .bcs import Serializer from .transactions import EntryFunction, TransactionArgument, TransactionPayload # Maximum amount of publishing data, this gives us buffer for BCS overheads -MAX_TRANSACTION_SIZE: int = 62000 +MAX_CHUNK_SIZE: int = 60000 # The location of the large package publisher MODULE_ADDRESS: AccountAddress = AccountAddress.from_str( - "0xfa3911d7715238b2e3bd5b26b6a35e11ffa16cff318bc11471e84eccee8bd291" + "0x0e1ca3011bdd07246d4d16d909dbb2d6953a86c4735d5acf5865d962c630cce7" ) # Domain separator for the code object address derivation @@ -31,6 +32,216 @@ class PublishMode(Enum): OBJECT_UPGRADE = "OBJECT_UPGRADE" +class CompileHelper: + """Helper methods for compiling Move packages.""" + + @staticmethod + async def derive_object_address( + client: RestClient, publisher_address: AccountAddress, required_txns: int = 1 + ) -> AccountAddress: + sequence_number = await client.account_sequence_number(publisher_address) + return CompileHelper._create_object_deployment_address( + publisher_address, sequence_number + required_txns + ) + + @staticmethod + def _create_object_deployment_address( + creator_address: AccountAddress, creator_sequence_number: int + ) -> AccountAddress: + ser = Serializer() + ser.to_bytes(OBJECT_CODE_DEPLOYMENT_DOMAIN_SEPARATOR) + ser.u64(creator_sequence_number) + seed = ser.output() + + return AccountAddress.for_named_object(creator_address, seed) + + +class PublishHelper: + """Helper methods for publishing Move packages.""" + + @staticmethod + def load_package_artifacts(package_dir: str) -> Tuple[bytes, List[bytes]]: + with open(os.path.join(package_dir, "Move.toml"), "rb") as f: + data = tomli.load(f) + package = data["package"]["name"] + + package_build_dir = os.path.join(package_dir, "build", package) + module_directory = os.path.join(package_build_dir, "bytecode_modules") + module_paths = os.listdir(module_directory) + modules = [] + for module_path in module_paths: + module_path = os.path.join(module_directory, module_path) + if not os.path.isfile(module_path) and not module_path.endswith(".mv"): + continue + with open(module_path, "rb") as f: + module = f.read() + modules.append(module) + + metadata_path = os.path.join(package_build_dir, "package-metadata.bcs") + with open(metadata_path, "rb") as f: + metadata = f.read() + + return metadata, modules + + @staticmethod + def is_large_package( + package_metadata: bytes, + modules: List[bytes], + ) -> bool: + total_size = len(package_metadata) + for module in modules: + total_size += len(module) + + return total_size >= MAX_CHUNK_SIZE + + @staticmethod + def prepare_chunked_payloads( + package_metadata: bytes, + modules: List[bytes], + large_packages_module_address: AccountAddress, + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, + ) -> List[TransactionPayload]: + + # Chunk the metadata and insert it into payloads. The last chunk may be small enough + # to be placed with other data. This may also be the only chunk. + payloads = [] + metadata_chunks = PublishHelper.create_chunks(package_metadata) + for metadata_chunk in metadata_chunks[:-1]: + payloads.append( + PublishHelper.create_large_package_staging_payload( + large_packages_module_address, metadata_chunk, [], [] + ) + ) + + metadata_chunk = metadata_chunks[-1] + taken_size = len(metadata_chunk) + modules_indices: List[int] = [] + data_chunks: List[bytes] = [] + + # Chunk each module and place them into a payload when adding more would exceed the + # maximum transaction size. + for idx, module in enumerate(modules): + chunked_module = PublishHelper.create_chunks(module) + for chunk in chunked_module: + if taken_size + len(chunk) > MAX_CHUNK_SIZE: + payloads.append( + PublishHelper.create_large_package_staging_payload( + large_packages_module_address, + metadata_chunk, + modules_indices, + data_chunks, + ) + ) + metadata_chunk = b"" + modules_indices = [] + data_chunks = [] + taken_size = 0 + if idx not in modules_indices: + modules_indices.append(idx) + data_chunks.append(chunk) + taken_size += len(chunk) + + # The last transaction will stage any leftover data from the chunking process. + # It will then assemble all staged code chunks and publish it within the large_packages Move module. + payloads.append( + PublishHelper.create_large_package_publishing_payload( + large_packages_module_address, + metadata_chunk, + modules_indices, + data_chunks, + publish_mode, + code_object_address, + ) + ) + + return payloads + + @staticmethod + def create_chunks(data: bytes) -> List[bytes]: + chunks: List[bytes] = [] + read_data = 0 + while read_data < len(data): + start_read_data = read_data + read_data = min(read_data + MAX_CHUNK_SIZE, len(data)) + taken_data = data[start_read_data:read_data] + chunks.append(taken_data) + return chunks + + @staticmethod + def create_large_package_staging_payload( + module_address: AccountAddress, + chunked_package_metadata: bytes, + modules_indices: List[int], + chunked_modules: List[bytes], + ) -> TransactionPayload: + transaction_arguments = [ + TransactionArgument(chunked_package_metadata, Serializer.to_bytes), + TransactionArgument( + modules_indices, Serializer.sequence_serializer(Serializer.u16) + ), + TransactionArgument( + chunked_modules, Serializer.sequence_serializer(Serializer.to_bytes) + ), + ] + + payload = EntryFunction.natural( + f"{module_address}::large_packages", + "stage_code_chunk", + [], + transaction_arguments, + ) + + return TransactionPayload(payload) + + @staticmethod + def create_large_package_publishing_payload( + module_address: AccountAddress, + chunked_package_metadata: bytes, + modules_indices: List[int], + chunked_modules: List[bytes], + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, + ) -> TransactionPayload: + transaction_arguments = [ + TransactionArgument(chunked_package_metadata, Serializer.to_bytes), + TransactionArgument( + modules_indices, Serializer.sequence_serializer(Serializer.u16) + ), + TransactionArgument( + chunked_modules, Serializer.sequence_serializer(Serializer.to_bytes) + ), + ] + + # Add code_object_address argument if the publishing mode is OBJECT_UPGRADE + if publish_mode == PublishMode.OBJECT_UPGRADE: + if code_object_address is None: + raise ValueError( + "code_object_address must be provided for OBJECT_UPGRADE mode" + ) + transaction_arguments.append( + TransactionArgument(code_object_address, Serializer.struct) + ) + + if publish_mode == PublishMode.ACCOUNT_DEPLOY: + function_name = "stage_code_chunk_and_publish_to_account" + elif publish_mode == PublishMode.OBJECT_DEPLOY: + function_name = "stage_code_chunk_and_publish_to_object" + elif publish_mode == PublishMode.OBJECT_UPGRADE: + function_name = "stage_code_chunk_and_upgrade_object_code" + else: + raise ValueError(f"Unexpected publish mode: {publish_mode}") + + payload = EntryFunction.natural( + f"{module_address}::large_packages", + function_name, + [], + transaction_arguments, + ) + + return TransactionPayload(payload) + + class PackagePublisher: """A wrapper around publishing packages.""" @@ -110,38 +321,106 @@ async def upgrade_package_object( ) return await self.client.submit_bcs_transaction(signed_transaction) - async def publish_package_in_path( + async def compile_and_publish_move_package( self, sender: Account, package_dir: str, - large_package_address: AccountAddress = MODULE_ADDRESS, + module_name: str, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, - code_object: Optional[AccountAddress] = None, + code_object_address: Optional[AccountAddress] = None, ) -> List[str]: - with open(os.path.join(package_dir, "Move.toml"), "rb") as f: - data = tomli.load(f) - package = data["package"]["name"] + """ + Compiles and publishes a Move package, handling both regular and large packages, as well as account and object + deployments. - package_build_dir = os.path.join(package_dir, "build", package) - module_directory = os.path.join(package_build_dir, "bytecode_modules") - module_paths = os.listdir(module_directory) - modules = [] - for module_path in module_paths: - module_path = os.path.join(module_directory, module_path) - if not os.path.isfile(module_path) and not module_path.endswith(".mv"): - continue - with open(module_path, "rb") as f: - module = f.read() - modules.append(module) + This method abstracts away complexities such as object address derivation, chunked publishing, and preliminary + compilation if needed. - metadata_path = os.path.join(package_build_dir, "package-metadata.bcs") - with open(metadata_path, "rb") as f: - metadata = f.read() + Note: This method requires the local Aptos CLI for compilation and will not work without it. + """ + AptosCLIWrapper.assert_cli_exists() + + # Determine the account or object address for publishing the package. + if publish_mode == PublishMode.ACCOUNT_DEPLOY: + deploy_address = sender.address() + + elif publish_mode == PublishMode.OBJECT_DEPLOY: + # Calculate the number of transactions needed for the chunked publish to predict the code object address. + # Start by assuming a single transaction for a preliminary build to estimate the artifact size. + deploy_address = await CompileHelper.derive_object_address( + self.client, sender.address() + ) + + AptosCLIWrapper.compile_package(package_dir, {module_name: deploy_address}) + metadata, modules = PublishHelper.load_package_artifacts(package_dir) + + # If the package size requires chunked publishing, recalculate the deploy address. + if PublishHelper.is_large_package(metadata, modules): + # Number of transactions required for the chunked publish. + required_txns = len( + PublishHelper.prepare_chunked_payloads( + metadata, + modules, + large_packages_module_address, + PublishMode.OBJECT_DEPLOY, + ) + ) + + if required_txns > 1: + deploy_address = await CompileHelper.derive_object_address( + self.client, sender.address(), required_txns + ) + + elif publish_mode == PublishMode.OBJECT_UPGRADE: + if code_object_address is None: + raise ValueError( + "code_object_address must be provided for OBJECT_UPGRADE mode" + ) + deploy_address = code_object_address + + else: + raise ValueError(f"Unexpected publish mode: {publish_mode}") + + # Compile the package with the correct deployment address. + AptosCLIWrapper.compile_package(package_dir, {module_name: deploy_address}) + + print(f"Deploying {module_name} to {deploy_address}...") + + return await self.publish_package_in_path( + sender, + package_dir, + large_packages_module_address, + publish_mode, + code_object_address, + ) + + async def publish_package_in_path( + self, + sender: Account, + package_dir: str, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, + ) -> List[str]: + """ + Publishes a Move package from the specified directory under the sender's account. + + The package in `package_dir` must be pre-compiled, as this method loads the artifacts from that directory. + This method handles both regular and large packages. If the package size exceeds `MAX_CHUNK_SIZE`, + chunked publishing is triggered, allowing the deployment to be split across multiple transactions. + """ + metadata, modules = PublishHelper.load_package_artifacts(package_dir) # If the package size is larger than a single transaction limit, use chunked publish. - if self.is_large_package(metadata, modules): + if PublishHelper.is_large_package(metadata, modules): return await self.chunked_package_publish( - sender, metadata, modules, large_package_address, publish_mode + sender, + metadata, + modules, + large_packages_module_address, + publish_mode, + code_object_address, ) # If the deployment can fit into a single transaction, use the normal package publisher @@ -150,161 +429,54 @@ async def publish_package_in_path( elif publish_mode == PublishMode.OBJECT_DEPLOY: txn_hash = await self.publish_package_to_object(sender, metadata, modules) elif publish_mode == PublishMode.OBJECT_UPGRADE: - if code_object is None: - raise ValueError("code_object must be provided for OBJECT_UPGRADE mode") + if code_object_address is None: + raise ValueError( + "code_object_address must be provided for OBJECT_UPGRADE mode" + ) txn_hash = await self.upgrade_package_object( - sender, metadata, modules, code_object + sender, metadata, modules, code_object_address ) else: raise ValueError(f"Unexpected publish mode: {publish_mode}") return [txn_hash] - async def derive_object_address( - self, publisher_address: AccountAddress - ) -> AccountAddress: - sequence_number = await self.client.account_sequence_number(publisher_address) - return self.create_object_deployment_address( - publisher_address, sequence_number + 1 - ) - - @staticmethod - def create_object_deployment_address( - creator_address: AccountAddress, creator_sequence_number: int - ) -> AccountAddress: - ser = Serializer() - ser.to_bytes(OBJECT_CODE_DEPLOYMENT_DOMAIN_SEPARATOR) - ser.u64(creator_sequence_number) - seed = ser.output() - - return AccountAddress.for_named_object(creator_address, seed) - async def chunked_package_publish( self, sender: Account, package_metadata: bytes, modules: List[bytes], - large_package_address: AccountAddress = MODULE_ADDRESS, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, ) -> List[str]: """ Chunks the package_metadata and modules across as many transactions as necessary. Each transaction has a base cost and the maximum size is currently 64K, so this chunks - them into 62K + the base transaction size. This should be sufficient for reasonably + them into 60K + the base transaction size. This should be sufficient for reasonably optimistic transaction batching. The batching tries to place as much data in a transaction before moving to the chunk to the next transaction. """ # Chunk the metadata and insert it into payloads. The last chunk may be small enough # to be placed with other data. This may also be the only chunk. - payloads = [] - metadata_chunks = PackagePublisher.create_chunks(package_metadata) - for metadata_chunk in metadata_chunks[:-1]: - payloads.append( - PackagePublisher.create_large_package_publishing_payload( - large_package_address, metadata_chunk, [], [], False - ) - ) - - metadata_chunk = metadata_chunks[-1] - taken_size = len(metadata_chunk) - modules_indices: List[int] = [] - data_chunks: List[bytes] = [] - - # Chunk each module and place them into a payload when adding more would exceed the - # maximum transaction size. - for idx, module in enumerate(modules): - chunked_module = PackagePublisher.create_chunks(module) - for chunk in chunked_module: - if taken_size + len(chunk) > MAX_TRANSACTION_SIZE: - payloads.append( - PackagePublisher.create_large_package_publishing_payload( - large_package_address, - metadata_chunk, - modules_indices, - data_chunks, - False, - ) - ) - metadata_chunk = b"" - modules_indices = [] - data_chunks = [] - taken_size = 0 - if idx not in modules_indices: - modules_indices.append(idx) - data_chunks.append(chunk) - taken_size += len(chunk) - - # There will almost certainly be left over data from the chunking, so pass the last - # chunk for the sake of publishing. - payloads.append( - PackagePublisher.create_large_package_publishing_payload( - large_package_address, - metadata_chunk, - modules_indices, - data_chunks, - True, - ) + payloads = PublishHelper.prepare_chunked_payloads( + package_metadata, + modules, + large_packages_module_address, + publish_mode, + code_object_address, ) # Submit and wait for each transaction, including publishing. txn_hashes = [] - for payload in payloads: - print("Submitting transaction...") + for idx, payload in enumerate(payloads): + print(f"Submitting transaction...({idx + 1}/{len(payloads)})") signed_txn = await self.client.create_bcs_signed_transaction( sender, payload ) txn_hash = await self.client.submit_bcs_transaction(signed_txn) await self.client.wait_for_transaction(txn_hash) txn_hashes.append(txn_hash) + print("Done.") return txn_hashes - - @staticmethod - def create_large_package_publishing_payload( - module_address: AccountAddress, - chunked_package_metadata: bytes, - modules_indices: List[int], - chunked_modules: List[bytes], - publish: bool, - ) -> TransactionPayload: - transaction_arguments = [ - TransactionArgument(chunked_package_metadata, Serializer.to_bytes), - TransactionArgument( - modules_indices, Serializer.sequence_serializer(Serializer.u16) - ), - TransactionArgument( - chunked_modules, Serializer.sequence_serializer(Serializer.to_bytes) - ), - TransactionArgument(publish, Serializer.bool), - ] - - payload = EntryFunction.natural( - f"{module_address}::large_packages", - "stage_code", - [], - transaction_arguments, - ) - - return TransactionPayload(payload) - - @staticmethod - def is_large_package( - package_metadata: bytes, - modules: List[bytes], - ) -> bool: - total_size = len(package_metadata) - for module in modules: - total_size += len(module) - - return total_size >= MAX_TRANSACTION_SIZE - - @staticmethod - def create_chunks(data: bytes) -> List[bytes]: - chunks: List[bytes] = [] - read_data = 0 - while read_data < len(data): - start_read_data = read_data - read_data = min(read_data + MAX_TRANSACTION_SIZE, len(data)) - taken_data = data[start_read_data:read_data] - chunks.append(taken_data) - return chunks diff --git a/examples/integration_test.py b/examples/integration_test.py index 6335540..61358d0 100644 --- a/examples/integration_test.py +++ b/examples/integration_test.py @@ -56,9 +56,6 @@ async def test_hello_blockchain(self): await hello_blockchain.main(contract_address) async def test_large_package_publisher(self): - # TODO -- this is currently broken, out of gas - return - from . import large_package_publisher large_packages_dir = os.path.join( diff --git a/examples/large_package_publisher.py b/examples/large_package_publisher.py index 7cda9ed..3649819 100644 --- a/examples/large_package_publisher.py +++ b/examples/large_package_publisher.py @@ -1,13 +1,16 @@ # Copyright © Aptos Foundation # SPDX-License-Identifier: Apache-2.0 + """ -This example depends on the MoonCoin.move module having already been published to the destination blockchain. -One method to do so is to use the CLI: - * Acquire the Aptos CLI, see https://aptos.dev/cli-tools/aptos-cli-tool/install-aptos-cli - * `python -m examples.your-coin ~/aptos-core/aptos-move/move-examples/moon_coin`. - * Open another terminal and `aptos move compile --package-dir ~/aptos-core/aptos-move/move-examples/moon_coin --save-metadata --named-addresses MoonCoin=`. - * Return to the first terminal and press enter. +This example demonstrates publishing large Move packages which cannot fit in a single transaction, using the most +abstract method `compile_and_publish_move_package` from the `PackagePublisher` class. This method handles all necessary +steps for compiling and publishing both regular and large packages. + +Note: This method requires the presence of the Aptos CLI in `APTOS_CLI_PATH`. As an alternative, if you want finer +control over the process or do not want to rely on the CLI, you may use `publish_package_in_path`, which is +demonstrated in the `object_code_deployment.py` example. """ + import asyncio import os import sys @@ -15,14 +18,17 @@ import aptos_sdk.cli as aptos_sdk_cli from aptos_sdk.account import Account from aptos_sdk.account_address import AccountAddress -from aptos_sdk.aptos_cli_wrapper import AptosCLIWrapper from aptos_sdk.async_client import ClientConfig, FaucetClient, RestClient -from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher +from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher, PublishMode from .common import APTOS_CORE_PATH, FAUCET_AUTH_TOKEN, FAUCET_URL, NODE_URL async def publish_large_packages(large_packages_dir) -> AccountAddress: + """ + Publish the `large_packages.move` module under Alice's account for localnet tests. + This module is not yet part of the Aptos framework, so it must be manually published for testing. + """ rest_client = RestClient(NODE_URL) faucet_client = FaucetClient(FAUCET_URL, rest_client, FAUCET_AUTH_TOKEN) @@ -36,13 +42,14 @@ async def publish_large_packages(large_packages_dir) -> AccountAddress: async def main( large_package_example_dir, - large_packages_account: AccountAddress = MODULE_ADDRESS, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, ): client_config = ClientConfig() client_config.transaction_wait_in_seconds = 120 client_config.max_gas_amount = 1_000_000 rest_client = RestClient(NODE_URL, client_config) faucet_client = FaucetClient(FAUCET_URL, rest_client, FAUCET_AUTH_TOKEN) + publisher = PackagePublisher(rest_client) alice = Account.generate() req0 = faucet_client.fund_account(alice.address(), 1_000_000_000) @@ -52,18 +59,37 @@ async def main( alice_balance = await rest_client.account_balance(alice.address()) print(f"Alice: {alice.address()} {alice_balance}") - if AptosCLIWrapper.does_cli_exist(): - AptosCLIWrapper.compile_package( - large_package_example_dir, {"large_package_example": alice.address()} - ) - else: - input("\nUpdate the module with Alice's address, compile, and press Enter.") + # Name of the move module for the package to be published, containing artifacts larger than the MAX_CHUNK_SIZE + module_name = "large_package_example" - publisher = PackagePublisher(rest_client) - await publisher.publish_package_in_path( - alice, large_package_example_dir, large_packages_account + # -- Example 1. Account deployment + print("=== Publishing large package to account ===") + + account_deploy_txn_hash = await publisher.compile_and_publish_move_package( + alice, large_package_example_dir, module_name, large_packages_module_address ) + print(f"Tx submitted: {account_deploy_txn_hash[0]}") + await rest_client.wait_for_transaction(account_deploy_txn_hash[0]) + print("Transaction completed.") + + # ----- Example 2. Object code deployment + print("=== Publishing large package to object ===") + + object_deploy_txn_hash = await publisher.compile_and_publish_move_package( + alice, + large_package_example_dir, + module_name, + large_packages_module_address, + PublishMode.OBJECT_DEPLOY, + ) + + print(f"The last tx submitted: {object_deploy_txn_hash[-1]}") + await rest_client.wait_for_transaction(object_deploy_txn_hash[-1]) + print("Transaction completed.") + + await rest_client.close() + if __name__ == "__main__": if len(sys.argv) == 2: diff --git a/examples/object_code_deployment.py b/examples/object_code_deployment.py index df8bae4..1fe59b3 100644 --- a/examples/object_code_deployment.py +++ b/examples/object_code_deployment.py @@ -1,6 +1,15 @@ # Copyright © Aptos Foundation # SPDX-License-Identifier: Apache-2.0 +""" +This example demonstrates publishing Move packages using the `publish_package_in_path` method from the +`PackagePublisher` class. This method provides more control over the package publishing process, directly loading +artifacts from a pre-compiled package directory and handling both Account and Object deployment. + +Note: For a higher-level abstraction that handles compilation and deployment automatically, you may use +`compile_and_publish_move_package`, as demonstrated in the `large_package_publisher.py` example. +""" + import asyncio import os import sys @@ -8,7 +17,12 @@ from aptos_sdk.account import Account from aptos_sdk.aptos_cli_wrapper import AptosCLIWrapper from aptos_sdk.async_client import FaucetClient, RestClient -from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher, PublishMode +from aptos_sdk.package_publisher import ( + MODULE_ADDRESS, + CompileHelper, + PackagePublisher, + PublishMode, +) from .common import APTOS_CORE_PATH, FAUCET_AUTH_TOKEN, FAUCET_URL, NODE_URL @@ -29,7 +43,9 @@ async def main(package_dir): print(f"Alice: {alice_balance}") # The object address is derived from publisher's address and sequence number. - code_object_address = await package_publisher.derive_object_address(alice.address()) + code_object_address = await CompileHelper.derive_object_address( + rest_client, alice.address() + ) module_name = "hello_blockchain" print("\nCompiling package...") @@ -57,7 +73,7 @@ async def main(package_dir): package_dir, MODULE_ADDRESS, publish_mode=PublishMode.OBJECT_UPGRADE, - code_object=code_object_address, + code_object_address=code_object_address, ) print(f"Tx submitted: {upgrade_txn_hash[0]}") await rest_client.wait_for_transaction(upgrade_txn_hash[0])