From 18efe239cceffb29de1a4d1a622516f3feb7bdd4 Mon Sep 17 00:00:00 2001 From: Junha Park <0xjunha@gmail.com> Date: Thu, 10 Oct 2024 13:44:27 +0900 Subject: [PATCH 1/6] Add support for publishing large packages to PackagePublisher - Updated `large_packages` Move module address - Downsized chunk size to 60KB - Made methods in PackagePublisher reusable in examples - Added object deployment and upgrade options for chunked publishing - Updated `large_package_publisher` example --- aptos_sdk/package_publisher.py | 185 ++++++++++++++++++++-------- examples/integration_test.py | 3 - examples/large_package_publisher.py | 103 +++++++++++++--- examples/object_code_deployment.py | 2 +- 4 files changed, 223 insertions(+), 70 deletions(-) diff --git a/aptos_sdk/package_publisher.py b/aptos_sdk/package_publisher.py index ac7fcad..8dbf7cb 100644 --- a/aptos_sdk/package_publisher.py +++ b/aptos_sdk/package_publisher.py @@ -3,7 +3,7 @@ import os from enum import Enum -from typing import List, Optional +from typing import List, Optional, Tuple import tomli @@ -14,11 +14,11 @@ from .transactions import EntryFunction, TransactionArgument, TransactionPayload # Maximum amount of publishing data, this gives us buffer for BCS overheads -MAX_TRANSACTION_SIZE: int = 62000 +MAX_CHUNK_SIZE: int = 60000 # The location of the large package publisher MODULE_ADDRESS: AccountAddress = AccountAddress.from_str( - "0xfa3911d7715238b2e3bd5b26b6a35e11ffa16cff318bc11471e84eccee8bd291" + "0xa29df848eebfe5d981f708c2a5b06d31af2be53bbd8ddc94c8523f4b903f7adb" ) # Domain separator for the code object address derivation @@ -116,8 +116,41 @@ async def publish_package_in_path( package_dir: str, large_package_address: AccountAddress = MODULE_ADDRESS, publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, - code_object: Optional[AccountAddress] = None, + code_object_address: Optional[AccountAddress] = None, ) -> List[str]: + metadata, modules = PackagePublisher.load_package_artifacts(package_dir) + + # If the package size is larger than a single transaction limit, use chunked publish. + if self.is_large_package(metadata, modules): + return await self.chunked_package_publish( + sender, + metadata, + modules, + large_package_address, + publish_mode, + code_object_address, + ) + + # If the deployment can fit into a single transaction, use the normal package publisher + if publish_mode == PublishMode.ACCOUNT_DEPLOY: + txn_hash = await self.publish_package(sender, metadata, modules) + elif publish_mode == PublishMode.OBJECT_DEPLOY: + txn_hash = await self.publish_package_to_object(sender, metadata, modules) + elif publish_mode == PublishMode.OBJECT_UPGRADE: + if code_object_address is None: + raise ValueError( + "code_object_address must be provided for OBJECT_UPGRADE mode" + ) + txn_hash = await self.upgrade_package_object( + sender, metadata, modules, code_object_address + ) + else: + raise ValueError(f"Unexpected publish mode: {publish_mode}") + + return [txn_hash] + + @staticmethod + def load_package_artifacts(package_dir: str) -> Tuple[bytes, List[bytes]]: with open(os.path.join(package_dir, "Move.toml"), "rb") as f: data = tomli.load(f) package = data["package"]["name"] @@ -138,34 +171,14 @@ async def publish_package_in_path( with open(metadata_path, "rb") as f: metadata = f.read() - # If the package size is larger than a single transaction limit, use chunked publish. - if self.is_large_package(metadata, modules): - return await self.chunked_package_publish( - sender, metadata, modules, large_package_address, publish_mode - ) - - # If the deployment can fit into a single transaction, use the normal package publisher - if publish_mode == PublishMode.ACCOUNT_DEPLOY: - txn_hash = await self.publish_package(sender, metadata, modules) - elif publish_mode == PublishMode.OBJECT_DEPLOY: - txn_hash = await self.publish_package_to_object(sender, metadata, modules) - elif publish_mode == PublishMode.OBJECT_UPGRADE: - if code_object is None: - raise ValueError("code_object must be provided for OBJECT_UPGRADE mode") - txn_hash = await self.upgrade_package_object( - sender, metadata, modules, code_object - ) - else: - raise ValueError(f"Unexpected publish mode: {publish_mode}") - - return [txn_hash] + return metadata, modules async def derive_object_address( - self, publisher_address: AccountAddress + self, publisher_address: AccountAddress, required_txns: int = 1 ) -> AccountAddress: sequence_number = await self.client.account_sequence_number(publisher_address) return self.create_object_deployment_address( - publisher_address, sequence_number + 1 + publisher_address, sequence_number + required_txns ) @staticmethod @@ -186,23 +199,56 @@ async def chunked_package_publish( modules: List[bytes], large_package_address: AccountAddress = MODULE_ADDRESS, publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, ) -> List[str]: """ Chunks the package_metadata and modules across as many transactions as necessary. Each transaction has a base cost and the maximum size is currently 64K, so this chunks - them into 62K + the base transaction size. This should be sufficient for reasonably + them into 60K + the base transaction size. This should be sufficient for reasonably optimistic transaction batching. The batching tries to place as much data in a transaction before moving to the chunk to the next transaction. """ + # Chunk the metadata and insert it into payloads. The last chunk may be small enough + # to be placed with other data. This may also be the only chunk. + payloads = PackagePublisher.prepare_chunked_payloads( + package_metadata, + modules, + large_package_address, + publish_mode, + code_object_address, + ) + + # Submit and wait for each transaction, including publishing. + txn_hashes = [] + for idx, payload in enumerate(payloads): + print(f"Submitting transaction...({idx + 1}/{len(payloads)})") + signed_txn = await self.client.create_bcs_signed_transaction( + sender, payload + ) + txn_hash = await self.client.submit_bcs_transaction(signed_txn) + await self.client.wait_for_transaction(txn_hash) + txn_hashes.append(txn_hash) + print("Done.") + return txn_hashes + + @staticmethod + def prepare_chunked_payloads( + package_metadata: bytes, + modules: List[bytes], + large_package_address: AccountAddress, + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, + ) -> List[TransactionPayload]: + # Chunk the metadata and insert it into payloads. The last chunk may be small enough # to be placed with other data. This may also be the only chunk. payloads = [] metadata_chunks = PackagePublisher.create_chunks(package_metadata) for metadata_chunk in metadata_chunks[:-1]: payloads.append( - PackagePublisher.create_large_package_publishing_payload( - large_package_address, metadata_chunk, [], [], False + PackagePublisher.create_large_package_staging_payload( + large_package_address, metadata_chunk, [], [] ) ) @@ -216,14 +262,13 @@ async def chunked_package_publish( for idx, module in enumerate(modules): chunked_module = PackagePublisher.create_chunks(module) for chunk in chunked_module: - if taken_size + len(chunk) > MAX_TRANSACTION_SIZE: + if taken_size + len(chunk) > MAX_CHUNK_SIZE: payloads.append( - PackagePublisher.create_large_package_publishing_payload( + PackagePublisher.create_large_package_staging_payload( large_package_address, metadata_chunk, modules_indices, data_chunks, - False, ) ) metadata_chunk = b"" @@ -235,29 +280,46 @@ async def chunked_package_publish( data_chunks.append(chunk) taken_size += len(chunk) - # There will almost certainly be left over data from the chunking, so pass the last - # chunk for the sake of publishing. + # The last transaction will stage any leftover data from the chunking process. + # It will then assemble all staged code chunks and publish it within the large_packages Move module. payloads.append( PackagePublisher.create_large_package_publishing_payload( large_package_address, metadata_chunk, modules_indices, data_chunks, - True, + publish_mode, + code_object_address, ) ) - # Submit and wait for each transaction, including publishing. - txn_hashes = [] - for payload in payloads: - print("Submitting transaction...") - signed_txn = await self.client.create_bcs_signed_transaction( - sender, payload - ) - txn_hash = await self.client.submit_bcs_transaction(signed_txn) - await self.client.wait_for_transaction(txn_hash) - txn_hashes.append(txn_hash) - return txn_hashes + return payloads + + @staticmethod + def create_large_package_staging_payload( + module_address: AccountAddress, + chunked_package_metadata: bytes, + modules_indices: List[int], + chunked_modules: List[bytes], + ) -> TransactionPayload: + transaction_arguments = [ + TransactionArgument(chunked_package_metadata, Serializer.to_bytes), + TransactionArgument( + modules_indices, Serializer.sequence_serializer(Serializer.u16) + ), + TransactionArgument( + chunked_modules, Serializer.sequence_serializer(Serializer.to_bytes) + ), + ] + + payload = EntryFunction.natural( + f"{module_address}::large_packages", + "stage_code_chunk", + [], + transaction_arguments, + ) + + return TransactionPayload(payload) @staticmethod def create_large_package_publishing_payload( @@ -265,7 +327,8 @@ def create_large_package_publishing_payload( chunked_package_metadata: bytes, modules_indices: List[int], chunked_modules: List[bytes], - publish: bool, + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, ) -> TransactionPayload: transaction_arguments = [ TransactionArgument(chunked_package_metadata, Serializer.to_bytes), @@ -275,12 +338,30 @@ def create_large_package_publishing_payload( TransactionArgument( chunked_modules, Serializer.sequence_serializer(Serializer.to_bytes) ), - TransactionArgument(publish, Serializer.bool), ] + # Add code_object_address argument if the publishing mode is OBJECT_UPGRADE + if publish_mode == PublishMode.OBJECT_UPGRADE: + if code_object_address is None: + raise ValueError( + "code_object_address must be provided for OBJECT_UPGRADE mode" + ) + transaction_arguments.append( + TransactionArgument(code_object_address, Serializer.struct) + ) + + if publish_mode == PublishMode.ACCOUNT_DEPLOY: + function_name = "stage_code_chunk_and_publish_to_account" + elif publish_mode == PublishMode.OBJECT_DEPLOY: + function_name = "stage_code_chunk_and_publish_to_object" + elif publish_mode == PublishMode.OBJECT_UPGRADE: + function_name = "stage_code_chunk_and_upgrade_object_code" + else: + raise ValueError(f"Unexpected publish mode: {publish_mode}") + payload = EntryFunction.natural( f"{module_address}::large_packages", - "stage_code", + function_name, [], transaction_arguments, ) @@ -296,7 +377,7 @@ def is_large_package( for module in modules: total_size += len(module) - return total_size >= MAX_TRANSACTION_SIZE + return total_size >= MAX_CHUNK_SIZE @staticmethod def create_chunks(data: bytes) -> List[bytes]: @@ -304,7 +385,7 @@ def create_chunks(data: bytes) -> List[bytes]: read_data = 0 while read_data < len(data): start_read_data = read_data - read_data = min(read_data + MAX_TRANSACTION_SIZE, len(data)) + read_data = min(read_data + MAX_CHUNK_SIZE, len(data)) taken_data = data[start_read_data:read_data] chunks.append(taken_data) return chunks diff --git a/examples/integration_test.py b/examples/integration_test.py index 6335540..61358d0 100644 --- a/examples/integration_test.py +++ b/examples/integration_test.py @@ -56,9 +56,6 @@ async def test_hello_blockchain(self): await hello_blockchain.main(contract_address) async def test_large_package_publisher(self): - # TODO -- this is currently broken, out of gas - return - from . import large_package_publisher large_packages_dir = os.path.join( diff --git a/examples/large_package_publisher.py b/examples/large_package_publisher.py index 4ddc542..264b891 100644 --- a/examples/large_package_publisher.py +++ b/examples/large_package_publisher.py @@ -1,13 +1,5 @@ # Copyright © Aptos Foundation # SPDX-License-Identifier: Apache-2.0 -""" -This example depends on the MoonCoin.move module having already been published to the destination blockchain. -One method to do so is to use the CLI: - * Acquire the Aptos CLI, see https://aptos.dev/cli-tools/aptos-cli-tool/install-aptos-cli - * `python -m examples.your-coin ~/aptos-core/aptos-move/move-examples/moon_coin`. - * Open another terminal and `aptos move compile --package-dir ~/aptos-core/aptos-move/move-examples/moon_coin --save-metadata --named-addresses MoonCoin=`. - * Return to the first terminal and press enter. -""" import asyncio import os import sys @@ -17,7 +9,7 @@ from aptos_sdk.account_address import AccountAddress from aptos_sdk.aptos_cli_wrapper import AptosCLIWrapper from aptos_sdk.async_client import ClientConfig, FaucetClient, RestClient -from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher +from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher, PublishMode from .common import APTOS_CORE_PATH, FAUCET_URL, NODE_URL @@ -36,13 +28,14 @@ async def publish_large_packages(large_packages_dir) -> AccountAddress: async def main( large_package_example_dir, - large_packages_account: AccountAddress = MODULE_ADDRESS, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, ): client_config = ClientConfig() client_config.transaction_wait_in_seconds = 120 client_config.max_gas_amount = 1_000_000 rest_client = RestClient(NODE_URL, client_config) faucet_client = FaucetClient(FAUCET_URL, rest_client) + publisher = PackagePublisher(rest_client) alice = Account.generate() req0 = faucet_client.fund_account(alice.address(), 1_000_000_000) @@ -52,18 +45,100 @@ async def main( alice_balance = await rest_client.account_balance(alice.address()) print(f"Alice: {alice.address()} {alice_balance}") + # Name of the move module for the package to be published, containing artifacts larger than the MAX_CHUNK_SIZE + module_name = "large_package_example" + + # Example 1. Account deployment + print("=== Publishing large package to account ===") + if AptosCLIWrapper.does_cli_exist(): AptosCLIWrapper.compile_package( - large_package_example_dir, {"large_package_example": alice.address()} + large_package_example_dir, {module_name: alice.address()} ) else: input("\nUpdate the module with Alice's address, compile, and press Enter.") - publisher = PackagePublisher(rest_client) - await publisher.publish_package_in_path( - alice, large_package_example_dir, large_packages_account + account_deploy_txn_hash = await publisher.publish_package_in_path( + alice, large_package_example_dir, large_packages_module_address + ) + + print(f"Tx submitted: {account_deploy_txn_hash[0]}") + await rest_client.wait_for_transaction(account_deploy_txn_hash[0]) + print(f"Package deployed to account {alice.address()}") + + # Example 2. Object code deployment + # Note: Here we assume that we already know we should use the chunked publish mode, so we run a preliminary build. + print("=== Publishing large package to object ===") + + # Calculate the number of transactions needed for the chunked publish to predict the code object address. + # Start by deriving the address assuming a single transaction for a preliminary build to estimate artifact size. + code_object_address = await publisher.derive_object_address(alice.address()) + + print("\nCompiling package as a preliminary build...") + if AptosCLIWrapper.does_cli_exist(): + AptosCLIWrapper.compile_package( + large_package_example_dir, {module_name: code_object_address} + ) + else: + print(f"Address of the object to be created: {code_object_address}") + input( + "\nUpdate the module with the derived code object address, compile, and press enter." + ) + + metadata, modules = publisher.load_package_artifacts(large_package_example_dir) + + # Number of transactions required for the chunked publish. + required_txns = len( + publisher.prepare_chunked_payloads( + metadata, + modules, + large_packages_module_address, + PublishMode.OBJECT_DEPLOY, + ) ) + if required_txns > 1: + code_object_address = await publisher.derive_object_address( + alice.address(), required_txns + ) + print("\nCompiling the package with updated object address...") + if AptosCLIWrapper.does_cli_exist(): + AptosCLIWrapper.compile_package( + large_package_example_dir, {module_name: code_object_address} + ) + else: + print(f"Address of the object to be created: {code_object_address}") + input( + "\nUpdate the module with the derived code object address, compile, and press enter." + ) + + object_deploy_txn_hash = await publisher.publish_package_in_path( + alice, + large_package_example_dir, + large_packages_module_address, + PublishMode.OBJECT_DEPLOY, + ) + + print(f"The last tx submitted: {object_deploy_txn_hash[-1]}") + await rest_client.wait_for_transaction(object_deploy_txn_hash[-1]) + print(f"Package deployed to object {code_object_address}") + + # Example 3. Object code upgrade + print("=== Upgrading large package object ===") + + object_upgrade_txn_hash = await publisher.publish_package_in_path( + alice, + large_package_example_dir, + large_packages_module_address, + PublishMode.OBJECT_UPGRADE, + code_object_address, + ) + + print(f"The last tx submitted: {object_upgrade_txn_hash[-1]}") + await rest_client.wait_for_transaction(object_upgrade_txn_hash[-1]) + print(f"Package in object {code_object_address} upgraded") + await rest_client.close() + if __name__ == "__main__": if len(sys.argv) == 2: diff --git a/examples/object_code_deployment.py b/examples/object_code_deployment.py index c6c467e..7165bbf 100644 --- a/examples/object_code_deployment.py +++ b/examples/object_code_deployment.py @@ -57,7 +57,7 @@ async def main(package_dir): package_dir, MODULE_ADDRESS, publish_mode=PublishMode.OBJECT_UPGRADE, - code_object=code_object_address, + code_object_address=code_object_address, ) print(f"Tx submitted: {upgrade_txn_hash[0]}") await rest_client.wait_for_transaction(upgrade_txn_hash[0]) From 728f6430dca010cd59bac0e307c171cd8e558e8f Mon Sep 17 00:00:00 2001 From: Junha Park <0xjunha@gmail.com> Date: Thu, 10 Oct 2024 20:25:09 +0900 Subject: [PATCH 2/6] Update large_packages.move module address --- aptos_sdk/package_publisher.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aptos_sdk/package_publisher.py b/aptos_sdk/package_publisher.py index 8dbf7cb..ee1e817 100644 --- a/aptos_sdk/package_publisher.py +++ b/aptos_sdk/package_publisher.py @@ -18,7 +18,7 @@ # The location of the large package publisher MODULE_ADDRESS: AccountAddress = AccountAddress.from_str( - "0xa29df848eebfe5d981f708c2a5b06d31af2be53bbd8ddc94c8523f4b903f7adb" + "0x0e1ca3011bdd07246d4d16d909dbb2d6953a86c4735d5acf5865d962c630cce7" ) # Domain separator for the code object address derivation From e5c59af0758ae8a3253807a73a5896c322daddb9 Mon Sep 17 00:00:00 2001 From: Junha Park <0xjunha@gmail.com> Date: Fri, 25 Oct 2024 18:41:31 +0900 Subject: [PATCH 3/6] Add function docs --- aptos_sdk/package_publisher.py | 7 +++++++ examples/large_package_publisher.py | 4 ++++ 2 files changed, 11 insertions(+) diff --git a/aptos_sdk/package_publisher.py b/aptos_sdk/package_publisher.py index ee1e817..4524ac6 100644 --- a/aptos_sdk/package_publisher.py +++ b/aptos_sdk/package_publisher.py @@ -118,6 +118,13 @@ async def publish_package_in_path( publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, code_object_address: Optional[AccountAddress] = None, ) -> List[str]: + """ + Publishes a Move package from the specified directory under the sender's account. + + The package in `package_dir` must be pre-compiled, as this method loads the artifacts from that directory. + This method handles both regular and large packages. If the package size exceeds `MAX_CHUNK_SIZE`, + chunked publishing is triggered, allowing the deployment to be split across multiple transactions. + """ metadata, modules = PackagePublisher.load_package_artifacts(package_dir) # If the package size is larger than a single transaction limit, use chunked publish. diff --git a/examples/large_package_publisher.py b/examples/large_package_publisher.py index 264b891..68a156c 100644 --- a/examples/large_package_publisher.py +++ b/examples/large_package_publisher.py @@ -15,6 +15,10 @@ async def publish_large_packages(large_packages_dir) -> AccountAddress: + """ + Publish the `large_packages.move` module under Alice's account for localnet tests. + This module is not yet part of the Aptos framework, so it must be manually published for testing. + """ rest_client = RestClient(NODE_URL) faucet_client = FaucetClient(FAUCET_URL, rest_client) From 5dd16446623267f3d2189ba4f23323647c27c8ca Mon Sep 17 00:00:00 2001 From: Junha Park <0xjunha@gmail.com> Date: Fri, 25 Oct 2024 19:43:57 +0900 Subject: [PATCH 4/6] Refactor `package_publisher.py`, introducing helper classes --- aptos_sdk/package_publisher.py | 397 ++++++++++++++-------------- examples/large_package_publisher.py | 20 +- examples/object_code_deployment.py | 11 +- 3 files changed, 226 insertions(+), 202 deletions(-) diff --git a/aptos_sdk/package_publisher.py b/aptos_sdk/package_publisher.py index 4524ac6..8a6a52b 100644 --- a/aptos_sdk/package_publisher.py +++ b/aptos_sdk/package_publisher.py @@ -31,130 +31,32 @@ class PublishMode(Enum): OBJECT_UPGRADE = "OBJECT_UPGRADE" -class PackagePublisher: - """A wrapper around publishing packages.""" - - client: RestClient - - def __init__(self, client: RestClient): - self.client = client - - async def publish_package( - self, sender: Account, package_metadata: bytes, modules: List[bytes] - ) -> str: - transaction_arguments = [ - TransactionArgument(package_metadata, Serializer.to_bytes), - TransactionArgument( - modules, Serializer.sequence_serializer(Serializer.to_bytes) - ), - ] - - payload = EntryFunction.natural( - "0x1::code", - "publish_package_txn", - [], - transaction_arguments, - ) +class CompileHelper: + """Helper methods for compiling Move packages.""" - signed_transaction = await self.client.create_bcs_signed_transaction( - sender, TransactionPayload(payload) - ) - return await self.client.submit_bcs_transaction(signed_transaction) - - async def publish_package_to_object( - self, sender: Account, package_metadata: bytes, modules: List[bytes] - ) -> str: - transaction_arguments = [ - TransactionArgument(package_metadata, Serializer.to_bytes), - TransactionArgument( - modules, Serializer.sequence_serializer(Serializer.to_bytes) - ), - ] - - payload = EntryFunction.natural( - "0x1::object_code_deployment", - "publish", - [], - transaction_arguments, - ) - - signed_transaction = await self.client.create_bcs_signed_transaction( - sender, TransactionPayload(payload) - ) - return await self.client.submit_bcs_transaction(signed_transaction) - - async def upgrade_package_object( - self, - sender: Account, - package_metadata: bytes, - modules: List[bytes], - object_address: AccountAddress, - ) -> str: - transaction_arguments = [ - TransactionArgument(package_metadata, Serializer.to_bytes), - TransactionArgument( - modules, Serializer.sequence_serializer(Serializer.to_bytes) - ), - TransactionArgument(object_address, Serializer.struct), - ] - - payload = EntryFunction.natural( - "0x1::object_code_deployment", - "upgrade", - [], - transaction_arguments, - ) - - signed_transaction = await self.client.create_bcs_signed_transaction( - sender, TransactionPayload(payload) + @staticmethod + async def derive_object_address( + client: RestClient, publisher_address: AccountAddress, required_txns: int = 1 + ) -> AccountAddress: + sequence_number = await client.account_sequence_number(publisher_address) + return CompileHelper._create_object_deployment_address( + publisher_address, sequence_number + required_txns ) - return await self.client.submit_bcs_transaction(signed_transaction) - async def publish_package_in_path( - self, - sender: Account, - package_dir: str, - large_package_address: AccountAddress = MODULE_ADDRESS, - publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, - code_object_address: Optional[AccountAddress] = None, - ) -> List[str]: - """ - Publishes a Move package from the specified directory under the sender's account. - - The package in `package_dir` must be pre-compiled, as this method loads the artifacts from that directory. - This method handles both regular and large packages. If the package size exceeds `MAX_CHUNK_SIZE`, - chunked publishing is triggered, allowing the deployment to be split across multiple transactions. - """ - metadata, modules = PackagePublisher.load_package_artifacts(package_dir) + @staticmethod + def _create_object_deployment_address( + creator_address: AccountAddress, creator_sequence_number: int + ) -> AccountAddress: + ser = Serializer() + ser.to_bytes(OBJECT_CODE_DEPLOYMENT_DOMAIN_SEPARATOR) + ser.u64(creator_sequence_number) + seed = ser.output() - # If the package size is larger than a single transaction limit, use chunked publish. - if self.is_large_package(metadata, modules): - return await self.chunked_package_publish( - sender, - metadata, - modules, - large_package_address, - publish_mode, - code_object_address, - ) + return AccountAddress.for_named_object(creator_address, seed) - # If the deployment can fit into a single transaction, use the normal package publisher - if publish_mode == PublishMode.ACCOUNT_DEPLOY: - txn_hash = await self.publish_package(sender, metadata, modules) - elif publish_mode == PublishMode.OBJECT_DEPLOY: - txn_hash = await self.publish_package_to_object(sender, metadata, modules) - elif publish_mode == PublishMode.OBJECT_UPGRADE: - if code_object_address is None: - raise ValueError( - "code_object_address must be provided for OBJECT_UPGRADE mode" - ) - txn_hash = await self.upgrade_package_object( - sender, metadata, modules, code_object_address - ) - else: - raise ValueError(f"Unexpected publish mode: {publish_mode}") - return [txn_hash] +class PublishHelper: + """Helper methods for publishing Move packages.""" @staticmethod def load_package_artifacts(package_dir: str) -> Tuple[bytes, List[bytes]]: @@ -180,64 +82,16 @@ def load_package_artifacts(package_dir: str) -> Tuple[bytes, List[bytes]]: return metadata, modules - async def derive_object_address( - self, publisher_address: AccountAddress, required_txns: int = 1 - ) -> AccountAddress: - sequence_number = await self.client.account_sequence_number(publisher_address) - return self.create_object_deployment_address( - publisher_address, sequence_number + required_txns - ) - @staticmethod - def create_object_deployment_address( - creator_address: AccountAddress, creator_sequence_number: int - ) -> AccountAddress: - ser = Serializer() - ser.to_bytes(OBJECT_CODE_DEPLOYMENT_DOMAIN_SEPARATOR) - ser.u64(creator_sequence_number) - seed = ser.output() - - return AccountAddress.for_named_object(creator_address, seed) - - async def chunked_package_publish( - self, - sender: Account, + def is_large_package( package_metadata: bytes, modules: List[bytes], - large_package_address: AccountAddress = MODULE_ADDRESS, - publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, - code_object_address: Optional[AccountAddress] = None, - ) -> List[str]: - """ - Chunks the package_metadata and modules across as many transactions as necessary. - Each transaction has a base cost and the maximum size is currently 64K, so this chunks - them into 60K + the base transaction size. This should be sufficient for reasonably - optimistic transaction batching. The batching tries to place as much data in a transaction - before moving to the chunk to the next transaction. - """ - - # Chunk the metadata and insert it into payloads. The last chunk may be small enough - # to be placed with other data. This may also be the only chunk. - payloads = PackagePublisher.prepare_chunked_payloads( - package_metadata, - modules, - large_package_address, - publish_mode, - code_object_address, - ) + ) -> bool: + total_size = len(package_metadata) + for module in modules: + total_size += len(module) - # Submit and wait for each transaction, including publishing. - txn_hashes = [] - for idx, payload in enumerate(payloads): - print(f"Submitting transaction...({idx + 1}/{len(payloads)})") - signed_txn = await self.client.create_bcs_signed_transaction( - sender, payload - ) - txn_hash = await self.client.submit_bcs_transaction(signed_txn) - await self.client.wait_for_transaction(txn_hash) - txn_hashes.append(txn_hash) - print("Done.") - return txn_hashes + return total_size >= MAX_CHUNK_SIZE @staticmethod def prepare_chunked_payloads( @@ -251,10 +105,10 @@ def prepare_chunked_payloads( # Chunk the metadata and insert it into payloads. The last chunk may be small enough # to be placed with other data. This may also be the only chunk. payloads = [] - metadata_chunks = PackagePublisher.create_chunks(package_metadata) + metadata_chunks = PublishHelper.create_chunks(package_metadata) for metadata_chunk in metadata_chunks[:-1]: payloads.append( - PackagePublisher.create_large_package_staging_payload( + PublishHelper.create_large_package_staging_payload( large_package_address, metadata_chunk, [], [] ) ) @@ -267,11 +121,11 @@ def prepare_chunked_payloads( # Chunk each module and place them into a payload when adding more would exceed the # maximum transaction size. for idx, module in enumerate(modules): - chunked_module = PackagePublisher.create_chunks(module) + chunked_module = PublishHelper.create_chunks(module) for chunk in chunked_module: if taken_size + len(chunk) > MAX_CHUNK_SIZE: payloads.append( - PackagePublisher.create_large_package_staging_payload( + PublishHelper.create_large_package_staging_payload( large_package_address, metadata_chunk, modules_indices, @@ -290,7 +144,7 @@ def prepare_chunked_payloads( # The last transaction will stage any leftover data from the chunking process. # It will then assemble all staged code chunks and publish it within the large_packages Move module. payloads.append( - PackagePublisher.create_large_package_publishing_payload( + PublishHelper.create_large_package_publishing_payload( large_package_address, metadata_chunk, modules_indices, @@ -302,6 +156,17 @@ def prepare_chunked_payloads( return payloads + @staticmethod + def create_chunks(data: bytes) -> List[bytes]: + chunks: List[bytes] = [] + read_data = 0 + while read_data < len(data): + start_read_data = read_data + read_data = min(read_data + MAX_CHUNK_SIZE, len(data)) + taken_data = data[start_read_data:read_data] + chunks.append(taken_data) + return chunks + @staticmethod def create_large_package_staging_payload( module_address: AccountAddress, @@ -375,24 +240,168 @@ def create_large_package_publishing_payload( return TransactionPayload(payload) - @staticmethod - def is_large_package( + +class PackagePublisher: + """A wrapper around publishing packages.""" + + client: RestClient + + def __init__(self, client: RestClient): + self.client = client + + async def publish_package( + self, sender: Account, package_metadata: bytes, modules: List[bytes] + ) -> str: + transaction_arguments = [ + TransactionArgument(package_metadata, Serializer.to_bytes), + TransactionArgument( + modules, Serializer.sequence_serializer(Serializer.to_bytes) + ), + ] + + payload = EntryFunction.natural( + "0x1::code", + "publish_package_txn", + [], + transaction_arguments, + ) + + signed_transaction = await self.client.create_bcs_signed_transaction( + sender, TransactionPayload(payload) + ) + return await self.client.submit_bcs_transaction(signed_transaction) + + async def publish_package_to_object( + self, sender: Account, package_metadata: bytes, modules: List[bytes] + ) -> str: + transaction_arguments = [ + TransactionArgument(package_metadata, Serializer.to_bytes), + TransactionArgument( + modules, Serializer.sequence_serializer(Serializer.to_bytes) + ), + ] + + payload = EntryFunction.natural( + "0x1::object_code_deployment", + "publish", + [], + transaction_arguments, + ) + + signed_transaction = await self.client.create_bcs_signed_transaction( + sender, TransactionPayload(payload) + ) + return await self.client.submit_bcs_transaction(signed_transaction) + + async def upgrade_package_object( + self, + sender: Account, package_metadata: bytes, modules: List[bytes], - ) -> bool: - total_size = len(package_metadata) - for module in modules: - total_size += len(module) + object_address: AccountAddress, + ) -> str: + transaction_arguments = [ + TransactionArgument(package_metadata, Serializer.to_bytes), + TransactionArgument( + modules, Serializer.sequence_serializer(Serializer.to_bytes) + ), + TransactionArgument(object_address, Serializer.struct), + ] - return total_size >= MAX_CHUNK_SIZE + payload = EntryFunction.natural( + "0x1::object_code_deployment", + "upgrade", + [], + transaction_arguments, + ) - @staticmethod - def create_chunks(data: bytes) -> List[bytes]: - chunks: List[bytes] = [] - read_data = 0 - while read_data < len(data): - start_read_data = read_data - read_data = min(read_data + MAX_CHUNK_SIZE, len(data)) - taken_data = data[start_read_data:read_data] - chunks.append(taken_data) - return chunks + signed_transaction = await self.client.create_bcs_signed_transaction( + sender, TransactionPayload(payload) + ) + return await self.client.submit_bcs_transaction(signed_transaction) + + async def publish_package_in_path( + self, + sender: Account, + package_dir: str, + large_package_address: AccountAddress = MODULE_ADDRESS, + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, + ) -> List[str]: + """ + Publishes a Move package from the specified directory under the sender's account. + + The package in `package_dir` must be pre-compiled, as this method loads the artifacts from that directory. + This method handles both regular and large packages. If the package size exceeds `MAX_CHUNK_SIZE`, + chunked publishing is triggered, allowing the deployment to be split across multiple transactions. + """ + metadata, modules = PublishHelper.load_package_artifacts(package_dir) + + # If the package size is larger than a single transaction limit, use chunked publish. + if PublishHelper.is_large_package(metadata, modules): + return await self.chunked_package_publish( + sender, + metadata, + modules, + large_package_address, + publish_mode, + code_object_address, + ) + + # If the deployment can fit into a single transaction, use the normal package publisher + if publish_mode == PublishMode.ACCOUNT_DEPLOY: + txn_hash = await self.publish_package(sender, metadata, modules) + elif publish_mode == PublishMode.OBJECT_DEPLOY: + txn_hash = await self.publish_package_to_object(sender, metadata, modules) + elif publish_mode == PublishMode.OBJECT_UPGRADE: + if code_object_address is None: + raise ValueError( + "code_object_address must be provided for OBJECT_UPGRADE mode" + ) + txn_hash = await self.upgrade_package_object( + sender, metadata, modules, code_object_address + ) + else: + raise ValueError(f"Unexpected publish mode: {publish_mode}") + + return [txn_hash] + + async def chunked_package_publish( + self, + sender: Account, + package_metadata: bytes, + modules: List[bytes], + large_package_address: AccountAddress = MODULE_ADDRESS, + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, + ) -> List[str]: + """ + Chunks the package_metadata and modules across as many transactions as necessary. + Each transaction has a base cost and the maximum size is currently 64K, so this chunks + them into 60K + the base transaction size. This should be sufficient for reasonably + optimistic transaction batching. The batching tries to place as much data in a transaction + before moving to the chunk to the next transaction. + """ + + # Chunk the metadata and insert it into payloads. The last chunk may be small enough + # to be placed with other data. This may also be the only chunk. + payloads = PublishHelper.prepare_chunked_payloads( + package_metadata, + modules, + large_package_address, + publish_mode, + code_object_address, + ) + + # Submit and wait for each transaction, including publishing. + txn_hashes = [] + for idx, payload in enumerate(payloads): + print(f"Submitting transaction...({idx + 1}/{len(payloads)})") + signed_txn = await self.client.create_bcs_signed_transaction( + sender, payload + ) + txn_hash = await self.client.submit_bcs_transaction(signed_txn) + await self.client.wait_for_transaction(txn_hash) + txn_hashes.append(txn_hash) + print("Done.") + return txn_hashes diff --git a/examples/large_package_publisher.py b/examples/large_package_publisher.py index 68a156c..5f225e4 100644 --- a/examples/large_package_publisher.py +++ b/examples/large_package_publisher.py @@ -9,7 +9,13 @@ from aptos_sdk.account_address import AccountAddress from aptos_sdk.aptos_cli_wrapper import AptosCLIWrapper from aptos_sdk.async_client import ClientConfig, FaucetClient, RestClient -from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher, PublishMode +from aptos_sdk.package_publisher import ( + MODULE_ADDRESS, + CompileHelper, + PackagePublisher, + PublishHelper, + PublishMode, +) from .common import APTOS_CORE_PATH, FAUCET_URL, NODE_URL @@ -76,7 +82,9 @@ async def main( # Calculate the number of transactions needed for the chunked publish to predict the code object address. # Start by deriving the address assuming a single transaction for a preliminary build to estimate artifact size. - code_object_address = await publisher.derive_object_address(alice.address()) + code_object_address = await CompileHelper.derive_object_address( + rest_client, alice.address() + ) print("\nCompiling package as a preliminary build...") if AptosCLIWrapper.does_cli_exist(): @@ -89,11 +97,11 @@ async def main( "\nUpdate the module with the derived code object address, compile, and press enter." ) - metadata, modules = publisher.load_package_artifacts(large_package_example_dir) + metadata, modules = PublishHelper.load_package_artifacts(large_package_example_dir) # Number of transactions required for the chunked publish. required_txns = len( - publisher.prepare_chunked_payloads( + PublishHelper.prepare_chunked_payloads( metadata, modules, large_packages_module_address, @@ -102,8 +110,8 @@ async def main( ) if required_txns > 1: - code_object_address = await publisher.derive_object_address( - alice.address(), required_txns + code_object_address = await CompileHelper.derive_object_address( + rest_client, alice.address(), required_txns ) print("\nCompiling the package with updated object address...") if AptosCLIWrapper.does_cli_exist(): diff --git a/examples/object_code_deployment.py b/examples/object_code_deployment.py index 7165bbf..0989611 100644 --- a/examples/object_code_deployment.py +++ b/examples/object_code_deployment.py @@ -8,7 +8,12 @@ from aptos_sdk.account import Account from aptos_sdk.aptos_cli_wrapper import AptosCLIWrapper from aptos_sdk.async_client import FaucetClient, RestClient -from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher, PublishMode +from aptos_sdk.package_publisher import ( + MODULE_ADDRESS, + CompileHelper, + PackagePublisher, + PublishMode, +) from .common import APTOS_CORE_PATH, FAUCET_URL, NODE_URL @@ -29,7 +34,9 @@ async def main(package_dir): print(f"Alice: {alice_balance}") # The object address is derived from publisher's address and sequence number. - code_object_address = await package_publisher.derive_object_address(alice.address()) + code_object_address = await CompileHelper.derive_object_address( + rest_client, alice.address() + ) module_name = "hello_blockchain" print("\nCompiling package...") From e02c98992dfcbbcd9d5174d0871cdc01b6267870 Mon Sep 17 00:00:00 2001 From: Junha Park <0xjunha@gmail.com> Date: Fri, 25 Oct 2024 20:34:46 +0900 Subject: [PATCH 5/6] Add `publish_move_package` method to `PackagePublisher` to encapsulate compiling and publishing processes --- aptos_sdk/package_publisher.py | 89 +++++++++++++++++++++++++++++++--- 1 file changed, 81 insertions(+), 8 deletions(-) diff --git a/aptos_sdk/package_publisher.py b/aptos_sdk/package_publisher.py index 8a6a52b..5a0b38c 100644 --- a/aptos_sdk/package_publisher.py +++ b/aptos_sdk/package_publisher.py @@ -9,6 +9,7 @@ from .account import Account from .account_address import AccountAddress +from .aptos_cli_wrapper import AptosCLIWrapper from .async_client import RestClient from .bcs import Serializer from .transactions import EntryFunction, TransactionArgument, TransactionPayload @@ -97,7 +98,7 @@ def is_large_package( def prepare_chunked_payloads( package_metadata: bytes, modules: List[bytes], - large_package_address: AccountAddress, + large_packages_module_address: AccountAddress, publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, code_object_address: Optional[AccountAddress] = None, ) -> List[TransactionPayload]: @@ -109,7 +110,7 @@ def prepare_chunked_payloads( for metadata_chunk in metadata_chunks[:-1]: payloads.append( PublishHelper.create_large_package_staging_payload( - large_package_address, metadata_chunk, [], [] + large_packages_module_address, metadata_chunk, [], [] ) ) @@ -126,7 +127,7 @@ def prepare_chunked_payloads( if taken_size + len(chunk) > MAX_CHUNK_SIZE: payloads.append( PublishHelper.create_large_package_staging_payload( - large_package_address, + large_packages_module_address, metadata_chunk, modules_indices, data_chunks, @@ -145,7 +146,7 @@ def prepare_chunked_payloads( # It will then assemble all staged code chunks and publish it within the large_packages Move module. payloads.append( PublishHelper.create_large_package_publishing_payload( - large_package_address, + large_packages_module_address, metadata_chunk, modules_indices, data_chunks, @@ -320,11 +321,83 @@ async def upgrade_package_object( ) return await self.client.submit_bcs_transaction(signed_transaction) + async def publish_move_package( + self, + sender: Account, + package_dir: str, + module_name: str, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, + publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, + code_object_address: Optional[AccountAddress] = None, + ) -> List[str]: + """ + Compiles and publishes a Move package, handling both regular and large packages, as well as account and object + deployments. + + This method abstracts away complexities such as object address derivation, chunked publishing, and preliminary + compilation if needed. + + Note: This method requires the local Aptos CLI for compilation and will not work without it. + """ + + if not AptosCLIWrapper.does_cli_exist(): + raise Exception("AptosCLI does not exist.") + + # Determine the account or object address for publishing the package. + if publish_mode == PublishMode.ACCOUNT_DEPLOY: + deploy_address = sender.address() + + elif publish_mode == PublishMode.OBJECT_DEPLOY: + deploy_address = await CompileHelper.derive_object_address( + self.client, sender.address() + ) + + # Compile the package as a preliminary build to check if chunking is required. + AptosCLIWrapper.compile_package(package_dir, {module_name: deploy_address}) + metadata, modules = PublishHelper.load_package_artifacts(package_dir) + + # If the package size requires chunked publishing, recalculate the deploy address. + if PublishHelper.is_large_package(metadata, modules): + required_txns = len( + PublishHelper.prepare_chunked_payloads( + metadata, + modules, + large_packages_module_address, + PublishMode.OBJECT_DEPLOY, + ) + ) + + if required_txns > 1: + deploy_address = await CompileHelper.derive_object_address( + self.client, sender.address(), required_txns + ) + + elif publish_mode == PublishMode.OBJECT_UPGRADE: + if code_object_address is None: + raise ValueError( + "code_object_address must be provided for OBJECT_UPGRADE mode" + ) + deploy_address = code_object_address + + else: + raise ValueError(f"Unexpected publish mode: {publish_mode}") + + # Compile the package with the correct deployment address. + AptosCLIWrapper.compile_package(package_dir, {module_name: deploy_address}) + + return await self.publish_package_in_path( + sender, + package_dir, + large_packages_module_address, + publish_mode, + code_object_address, + ) + async def publish_package_in_path( self, sender: Account, package_dir: str, - large_package_address: AccountAddress = MODULE_ADDRESS, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, code_object_address: Optional[AccountAddress] = None, ) -> List[str]: @@ -343,7 +416,7 @@ async def publish_package_in_path( sender, metadata, modules, - large_package_address, + large_packages_module_address, publish_mode, code_object_address, ) @@ -371,7 +444,7 @@ async def chunked_package_publish( sender: Account, package_metadata: bytes, modules: List[bytes], - large_package_address: AccountAddress = MODULE_ADDRESS, + large_packages_module_address: AccountAddress = MODULE_ADDRESS, publish_mode: PublishMode = PublishMode.ACCOUNT_DEPLOY, code_object_address: Optional[AccountAddress] = None, ) -> List[str]: @@ -388,7 +461,7 @@ async def chunked_package_publish( payloads = PublishHelper.prepare_chunked_payloads( package_metadata, modules, - large_package_address, + large_packages_module_address, publish_mode, code_object_address, ) From 3ab7f8def0314b9f17cbb8c9e362e8e02bcf2818 Mon Sep 17 00:00:00 2001 From: Junha Park <0xjunha@gmail.com> Date: Fri, 25 Oct 2024 21:09:11 +0900 Subject: [PATCH 6/6] Use `compile_and_publish_move_package` method in `large_package_publisher` example --- aptos_sdk/package_publisher.py | 12 ++-- examples/large_package_publisher.py | 101 ++++++---------------------- examples/object_code_deployment.py | 9 +++ 3 files changed, 36 insertions(+), 86 deletions(-) diff --git a/aptos_sdk/package_publisher.py b/aptos_sdk/package_publisher.py index 5a0b38c..a92de63 100644 --- a/aptos_sdk/package_publisher.py +++ b/aptos_sdk/package_publisher.py @@ -321,7 +321,7 @@ async def upgrade_package_object( ) return await self.client.submit_bcs_transaction(signed_transaction) - async def publish_move_package( + async def compile_and_publish_move_package( self, sender: Account, package_dir: str, @@ -339,25 +339,25 @@ async def publish_move_package( Note: This method requires the local Aptos CLI for compilation and will not work without it. """ - - if not AptosCLIWrapper.does_cli_exist(): - raise Exception("AptosCLI does not exist.") + AptosCLIWrapper.assert_cli_exists() # Determine the account or object address for publishing the package. if publish_mode == PublishMode.ACCOUNT_DEPLOY: deploy_address = sender.address() elif publish_mode == PublishMode.OBJECT_DEPLOY: + # Calculate the number of transactions needed for the chunked publish to predict the code object address. + # Start by assuming a single transaction for a preliminary build to estimate the artifact size. deploy_address = await CompileHelper.derive_object_address( self.client, sender.address() ) - # Compile the package as a preliminary build to check if chunking is required. AptosCLIWrapper.compile_package(package_dir, {module_name: deploy_address}) metadata, modules = PublishHelper.load_package_artifacts(package_dir) # If the package size requires chunked publishing, recalculate the deploy address. if PublishHelper.is_large_package(metadata, modules): + # Number of transactions required for the chunked publish. required_txns = len( PublishHelper.prepare_chunked_payloads( metadata, @@ -385,6 +385,8 @@ async def publish_move_package( # Compile the package with the correct deployment address. AptosCLIWrapper.compile_package(package_dir, {module_name: deploy_address}) + print(f"Deploying {module_name} to {deploy_address}...") + return await self.publish_package_in_path( sender, package_dir, diff --git a/examples/large_package_publisher.py b/examples/large_package_publisher.py index 5f225e4..d228a20 100644 --- a/examples/large_package_publisher.py +++ b/examples/large_package_publisher.py @@ -1,5 +1,16 @@ # Copyright © Aptos Foundation # SPDX-License-Identifier: Apache-2.0 + +""" +This example demonstrates publishing large Move packages which cannot fit in a single transaction, using the most +abstract method `compile_and_publish_move_package` from the `PackagePublisher` class. This method handles all necessary +steps for compiling and publishing both regular and large packages. + +Note: This method requires the presence of the Aptos CLI in `APTOS_CLI_PATH`. As an alternative, if you want finer +control over the process or do not want to rely on the CLI, you may use `publish_package_in_path`, which is +demonstrated in the `object_code_deployment.py` example. +""" + import asyncio import os import sys @@ -7,15 +18,8 @@ import aptos_sdk.cli as aptos_sdk_cli from aptos_sdk.account import Account from aptos_sdk.account_address import AccountAddress -from aptos_sdk.aptos_cli_wrapper import AptosCLIWrapper from aptos_sdk.async_client import ClientConfig, FaucetClient, RestClient -from aptos_sdk.package_publisher import ( - MODULE_ADDRESS, - CompileHelper, - PackagePublisher, - PublishHelper, - PublishMode, -) +from aptos_sdk.package_publisher import MODULE_ADDRESS, PackagePublisher, PublishMode from .common import APTOS_CORE_PATH, FAUCET_URL, NODE_URL @@ -58,97 +62,32 @@ async def main( # Name of the move module for the package to be published, containing artifacts larger than the MAX_CHUNK_SIZE module_name = "large_package_example" - # Example 1. Account deployment + # -- Example 1. Account deployment print("=== Publishing large package to account ===") - if AptosCLIWrapper.does_cli_exist(): - AptosCLIWrapper.compile_package( - large_package_example_dir, {module_name: alice.address()} - ) - else: - input("\nUpdate the module with Alice's address, compile, and press Enter.") - - account_deploy_txn_hash = await publisher.publish_package_in_path( - alice, large_package_example_dir, large_packages_module_address + account_deploy_txn_hash = await publisher.compile_and_publish_move_package( + alice, large_package_example_dir, module_name, large_packages_module_address ) print(f"Tx submitted: {account_deploy_txn_hash[0]}") await rest_client.wait_for_transaction(account_deploy_txn_hash[0]) - print(f"Package deployed to account {alice.address()}") + print("Transaction completed.") - # Example 2. Object code deployment - # Note: Here we assume that we already know we should use the chunked publish mode, so we run a preliminary build. + # ----- Example 2. Object code deployment print("=== Publishing large package to object ===") - # Calculate the number of transactions needed for the chunked publish to predict the code object address. - # Start by deriving the address assuming a single transaction for a preliminary build to estimate artifact size. - code_object_address = await CompileHelper.derive_object_address( - rest_client, alice.address() - ) - - print("\nCompiling package as a preliminary build...") - if AptosCLIWrapper.does_cli_exist(): - AptosCLIWrapper.compile_package( - large_package_example_dir, {module_name: code_object_address} - ) - else: - print(f"Address of the object to be created: {code_object_address}") - input( - "\nUpdate the module with the derived code object address, compile, and press enter." - ) - - metadata, modules = PublishHelper.load_package_artifacts(large_package_example_dir) - - # Number of transactions required for the chunked publish. - required_txns = len( - PublishHelper.prepare_chunked_payloads( - metadata, - modules, - large_packages_module_address, - PublishMode.OBJECT_DEPLOY, - ) - ) - - if required_txns > 1: - code_object_address = await CompileHelper.derive_object_address( - rest_client, alice.address(), required_txns - ) - print("\nCompiling the package with updated object address...") - if AptosCLIWrapper.does_cli_exist(): - AptosCLIWrapper.compile_package( - large_package_example_dir, {module_name: code_object_address} - ) - else: - print(f"Address of the object to be created: {code_object_address}") - input( - "\nUpdate the module with the derived code object address, compile, and press enter." - ) - - object_deploy_txn_hash = await publisher.publish_package_in_path( + object_deploy_txn_hash = await publisher.compile_and_publish_move_package( alice, large_package_example_dir, + module_name, large_packages_module_address, PublishMode.OBJECT_DEPLOY, ) print(f"The last tx submitted: {object_deploy_txn_hash[-1]}") await rest_client.wait_for_transaction(object_deploy_txn_hash[-1]) - print(f"Package deployed to object {code_object_address}") - - # Example 3. Object code upgrade - print("=== Upgrading large package object ===") - - object_upgrade_txn_hash = await publisher.publish_package_in_path( - alice, - large_package_example_dir, - large_packages_module_address, - PublishMode.OBJECT_UPGRADE, - code_object_address, - ) + print("Transaction completed.") - print(f"The last tx submitted: {object_upgrade_txn_hash[-1]}") - await rest_client.wait_for_transaction(object_upgrade_txn_hash[-1]) - print(f"Package in object {code_object_address} upgraded") await rest_client.close() diff --git a/examples/object_code_deployment.py b/examples/object_code_deployment.py index 0989611..010c915 100644 --- a/examples/object_code_deployment.py +++ b/examples/object_code_deployment.py @@ -1,6 +1,15 @@ # Copyright © Aptos Foundation # SPDX-License-Identifier: Apache-2.0 +""" +This example demonstrates publishing Move packages using the `publish_package_in_path` method from the +`PackagePublisher` class. This method provides more control over the package publishing process, directly loading +artifacts from a pre-compiled package directory and handling both Account and Object deployment. + +Note: For a higher-level abstraction that handles compilation and deployment automatically, you may use +`compile_and_publish_move_package`, as demonstrated in the `large_package_publisher.py` example. +""" + import asyncio import os import sys