Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 12 additions & 12 deletions hathorlib/headers/nano_header.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,15 @@
from hathorlib.headers.types import VertexHeaderId
from hathorlib.nanocontracts import DeprecatedNanoContract
from hathorlib.nanocontracts.types import NCActionType
from hathorlib.utils import int_to_bytes, unpack, unpack_len
from hathorlib.utils import decode_unsigned, encode_unsigned, int_to_bytes, unpack, unpack_len

if TYPE_CHECKING:
from hathorlib.base_transaction import BaseTransaction

NC_VERSION = 1
NC_INITIALIZE_METHOD = 'initialize'
ADDRESS_LEN_BYTES = 25
ADDRESS_SEQNUM_SIZE: int = 8 # bytes
_NC_SCRIPT_LEN_MAX_BYTES: int = 2


@dataclass(frozen=True)
Expand All @@ -43,6 +44,9 @@ class NanoHeaderAction:
class NanoHeader(VertexBaseHeader):
tx: BaseTransaction

# Sequence number for the caller.
nc_seqnum: int

# nc_id equals to the blueprint_id when a Nano Contract is being created.
# nc_id equals to the nanocontract_id when a method is being called.
nc_id: bytes
Expand All @@ -59,8 +63,6 @@ class NanoHeader(VertexBaseHeader):
nc_address: bytes
nc_script: bytes

nc_version: int = NC_VERSION

@classmethod
def _deserialize_action(cls, buf: bytes) -> tuple[NanoHeaderAction, bytes]:
from hathorlib.base_transaction import bytes_to_output_value
Expand All @@ -78,11 +80,9 @@ def _deserialize_action(cls, buf: bytes) -> tuple[NanoHeaderAction, bytes]:
def deserialize(cls, tx: BaseTransaction, buf: bytes) -> tuple[NanoHeader, bytes]:
header_id, buf = buf[:1], buf[1:]
assert header_id == VertexHeaderId.NANO_HEADER.value
(nc_version,), buf = unpack('!B', buf)
if nc_version != NC_VERSION:
raise ValueError('unknown nanocontract version: {}'.format(nc_version))

nc_id, buf = unpack_len(32, buf)
nc_seqnum, buf = decode_unsigned(buf, max_bytes=ADDRESS_SEQNUM_SIZE)
(nc_method_len,), buf = unpack('!B', buf)
nc_method, buf = unpack_len(nc_method_len, buf)
(nc_args_bytes_len,), buf = unpack('!H', buf)
Expand All @@ -96,14 +96,14 @@ def deserialize(cls, tx: BaseTransaction, buf: bytes) -> tuple[NanoHeader, bytes
nc_actions.append(action)

nc_address, buf = unpack_len(ADDRESS_LEN_BYTES, buf)
(nc_script_len,), buf = unpack('!H', buf)
nc_script_len, buf = decode_unsigned(buf, max_bytes=_NC_SCRIPT_LEN_MAX_BYTES)
nc_script, buf = unpack_len(nc_script_len, buf)

decoded_nc_method = nc_method.decode('ascii')

return cls(
tx=tx,
nc_version=nc_version,
nc_seqnum=nc_seqnum,
nc_id=nc_id,
nc_method=decoded_nc_method,
nc_args_bytes=nc_args_bytes,
Expand All @@ -127,8 +127,8 @@ def _serialize_without_header_id(self, *, skip_signature: bool) -> deque[bytes]:
encoded_method = self.nc_method.encode('ascii')

ret: deque[bytes] = deque()
ret.append(int_to_bytes(NC_VERSION, 1))
ret.append(self.nc_id)
ret.append(encode_unsigned(self.nc_seqnum, max_bytes=ADDRESS_SEQNUM_SIZE))
ret.append(int_to_bytes(len(encoded_method), 1))
ret.append(encoded_method)
ret.append(int_to_bytes(len(self.nc_args_bytes), 2))
Expand All @@ -141,10 +141,10 @@ def _serialize_without_header_id(self, *, skip_signature: bool) -> deque[bytes]:

ret.append(self.nc_address)
if not skip_signature:
ret.append(int_to_bytes(len(self.nc_script), 2))
ret.append(encode_unsigned(len(self.nc_script), max_bytes=_NC_SCRIPT_LEN_MAX_BYTES))
ret.append(self.nc_script)
else:
ret.append(int_to_bytes(0, 2))
ret.append(encode_unsigned(0, max_bytes=_NC_SCRIPT_LEN_MAX_BYTES))
return ret

def serialize(self) -> bytes:
Expand Down
2 changes: 1 addition & 1 deletion hathorlib/nanocontracts/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ class NCActionType(Enum):
DEPOSIT = 1
WITHDRAWAL = 2
GRANT_AUTHORITY = 3
INVOKE_AUTHORITY = 4
ACQUIRE_AUTHORITY = 4

def __str__(self) -> str:
return self.name.lower()
Expand Down
27 changes: 27 additions & 0 deletions hathorlib/serialization/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
# Copyright 2025 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from .deserializer import Deserializer
from .exceptions import BadDataError, OutOfDataError, SerializationError, TooLongError, UnsupportedTypeError
from .serializer import Serializer

__all__ = [
'Serializer',
'Deserializer',
'SerializationError',
'UnsupportedTypeError',
'TooLongError',
'OutOfDataError',
'BadDataError',
]
10 changes: 10 additions & 0 deletions hathorlib/serialization/adapters/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
from .generic_adapter import GenericDeserializerAdapter, GenericSerializerAdapter
from .max_bytes import MaxBytesDeserializer, MaxBytesExceededError, MaxBytesSerializer

__all__ = [
'GenericDeserializerAdapter',
'GenericSerializerAdapter',
'MaxBytesDeserializer',
'MaxBytesExceededError',
'MaxBytesSerializer',
]
110 changes: 110 additions & 0 deletions hathorlib/serialization/adapters/generic_adapter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,110 @@
# Copyright 2025 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from types import TracebackType
from typing import Generic, TypeVar, Union

from typing_extensions import Self, override

from hathorlib.serialization.deserializer import Deserializer
from hathorlib.serialization.serializer import Serializer

from ..types import Buffer

S = TypeVar('S', bound=Serializer)
D = TypeVar('D', bound=Deserializer)


class GenericSerializerAdapter(Serializer, Generic[S]):
inner: S

def __init__(self, serializer: S) -> None:
self.inner = serializer

@override
def finalize(self) -> Buffer:
return self.inner.finalize()

@override
def cur_pos(self) -> int:
return self.inner.cur_pos()

@override
def write_byte(self, data: int) -> None:
self.inner.write_byte(data)

@override
def write_bytes(self, data: Buffer) -> None:
self.inner.write_bytes(data)

# allow using this adapter as a context manager:

def __enter__(self) -> Self:
return self

def __exit__(
self,
exc_type: Union[type[BaseException], None],
exc_value: Union[BaseException, None],
traceback: Union[TracebackType, None],
) -> None:
pass


class GenericDeserializerAdapter(Deserializer, Generic[D]):
inner: D

def __init__(self, deserializer: D) -> None:
self.inner = deserializer

@override
def finalize(self) -> None:
return self.inner.finalize()

@override
def is_empty(self) -> bool:
return self.inner.is_empty()

@override
def peek_byte(self) -> int:
return self.inner.peek_byte()

@override
def peek_bytes(self, n: int, *, exact: bool = True) -> Buffer:
return self.inner.peek_bytes(n, exact=exact)

@override
def read_byte(self) -> int:
return self.inner.read_byte()

@override
def read_bytes(self, n: int, *, exact: bool = True) -> Buffer:
return self.inner.read_bytes(n, exact=exact)

@override
def read_all(self) -> Buffer:
return self.inner.read_all()

# allow using this adapter as a context manager:

def __enter__(self) -> Self:
return self

def __exit__(
self,
exc_type: Union[type[BaseException], None],
exc_value: Union[BaseException, None],
traceback: Union[TracebackType, None],
) -> None:
pass
91 changes: 91 additions & 0 deletions hathorlib/serialization/adapters/max_bytes.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
# Copyright 2025 Hathor Labs
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from typing import TypeVar

from typing_extensions import override

from hathorlib.serialization.deserializer import Deserializer
from hathorlib.serialization.exceptions import SerializationError
from hathorlib.serialization.serializer import Serializer

from ..types import Buffer
from .generic_adapter import GenericDeserializerAdapter, GenericSerializerAdapter

S = TypeVar('S', bound=Serializer)
D = TypeVar('D', bound=Deserializer)


class MaxBytesExceededError(SerializationError):
""" This error is raised when the adapted serializer reached its maximum bytes write/read.

After this exception is raised the adapted serializer cannot be used anymore. Handlers of this exception are
expected to either: bubble up the exception (or an equivalente exception), or return an error. Handlers should not
try to write again on the same serializer.

It is possible that the inner serializer is still usable, but the point where the serialized stopped writing or
reading might leave the rest of the data unusable, so for that reason it should be considered a failed
(de)serialization overall, and not simply a failed "read/write" operation.
"""
pass


class MaxBytesSerializer(GenericSerializerAdapter[S]):
def __init__(self, serializer: S, max_bytes: int) -> None:
super().__init__(serializer)
self._bytes_left = max_bytes

def _check_update_exceeds(self, write_size: int) -> None:
self._bytes_left -= write_size
if self._bytes_left < 0:
raise MaxBytesExceededError

@override
def write_byte(self, data: int) -> None:
self._check_update_exceeds(1)
super().write_byte(data)

@override
def write_bytes(self, data: Buffer) -> None:
data_view = memoryview(data)
self._check_update_exceeds(len(data_view))
super().write_bytes(data_view)


class MaxBytesDeserializer(GenericDeserializerAdapter[D]):
def __init__(self, deserializer: D, max_bytes: int) -> None:
super().__init__(deserializer)
self._bytes_left = max_bytes

def _check_update_exceeds(self, read_size: int) -> None:
self._bytes_left -= read_size
if self._bytes_left < 0:
raise MaxBytesExceededError

@override
def read_byte(self) -> int:
self._check_update_exceeds(1)
return super().read_byte()

@override
def read_bytes(self, n: int, *, exact: bool = True) -> Buffer:
self._check_update_exceeds(n)
return super().read_bytes(n, exact=exact)

@override
def read_all(self) -> Buffer:
result = super().read_bytes(self._bytes_left, exact=False)
if not self.is_empty():
raise MaxBytesExceededError
return result
Loading