Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
### Fixed
- Added missing validation for dataset reference target types to ensure correct `RefSpec.target_type` matching. @sejalpunwatkar [#1429](https://github.com/hdmf-dev/hdmf/pull/1429)
- Fixed reading a `DynamicTable` that contains a named link to a `VectorData` (e.g., `MeaningsTable.target`). The link target was being picked up as an extra column, causing a `"Columns must be the same length"` error when the target column's row count differed from the table's own row count. @rly [#1445](https://github.com/hdmf-dev/hdmf/pull/1445)
- Added end-to-end support for `isodatetime`/`datetime` dtype on datasets and attributes, including extension specs. Build, read, and roundtrip now work for `datetime`/`date` values, and `get_class` generates classes that accept them. Specifically: a new `_isoformat` dtype helper converts `datetime`/`date` to ASCII-encoded ISO bytes on write; `_parse_isoformat` parses stored values back to `datetime` (or `date` for date-only strings) on read; `construct()` accepts an optional `spec_ext` so an inc-site `dtype` override (e.g., a parent group declaring `dtype='isodatetime'` on a `data_type_inc`-extending dataset whose def has no dtype) is honored on read. The `scalar_data` docval macro now includes `datetime`/`date` so generated dtype-less classes accept scalar datetime values. @rly [#1313](https://github.com/hdmf-dev/hdmf/pull/1313)


## HDMF 5.1.0 (March 24, 2026)
Expand Down
56 changes: 51 additions & 5 deletions src/hdmf/build/classgenerator.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from copy import deepcopy
from datetime import datetime, date
from collections.abc import Callable
import warnings

import numpy as np

from ..container import Container, Data, MultiContainerInterface
from ..spec import AttributeSpec, LinkSpec, RefSpec, GroupSpec
from ..spec import AttributeSpec, LinkSpec, RefSpec, GroupSpec, DatasetSpec
from ..spec.spec import BaseStorageSpec, ZERO_OR_MANY, ONE_OR_MANY
from ..utils import docval, getargs, ExtenderMeta, get_docval, popargs, AllowPositional

Expand Down Expand Up @@ -79,7 +80,7 @@ def generate_class(self, **kwargs):
break # each field_spec should be processed by only one generator

for class_generator in self.__custom_generators:
class_generator.post_process(classdict, bases, docval_args, spec)
class_generator.post_process(classdict, bases, docval_args, spec, type_map)

for class_generator in reversed(self.__custom_generators):
# go in reverse order so that base init is added first and
Expand Down Expand Up @@ -252,7 +253,7 @@ def process_field_spec(cls, classdict, docval_args, parent_cls, attr_name, not_i
docval_arg = dict(
name=attr_name,
doc=field_spec.doc,
type=cls._get_type(field_spec, type_map)
type=dtype,
)
shape = getattr(field_spec, 'shape', None)
if shape is not None:
Expand Down Expand Up @@ -285,12 +286,13 @@ def _add_to_docval_args(cls, docval_args, arg, err_if_present=False):
docval_args.append(arg)

@classmethod
def post_process(cls, classdict, bases, docval_args, spec):
def post_process(cls, classdict, bases, docval_args, spec, type_map):
"""Convert classdict['__fields__'] to tuple and update docval args for a fixed name and default name.
:param classdict: The class dictionary to convert with '__fields__' key (or a different bases[0]._fieldsname)
:param bases: The list of base classes.
:param docval_args: The dict of docval arguments.
:param spec: The spec for the container class to generate.
:param type_map: The type map to use.
"""
# convert classdict['__fields__'] from list to tuple if present
for b in bases:
Expand All @@ -308,6 +310,49 @@ def post_process(cls, classdict, bases, docval_args, spec):
# set default name in docval args if provided
cls._set_default_name(docval_args, spec.default_name)

if isinstance(spec, DatasetSpec):
cls._update_data_docval_arg(docval_args, spec)

@classmethod
def _update_data_docval_arg(cls, docval_args, spec):
"""Update the inherited 'data' docval arg in place to reflect the dataset spec.

Updates only `type`, `doc`, and `shape` (when the spec declares one). Other keys
like `default` are preserved from the parent class's docval, so subclasses don't
accidentally lose, e.g., VectorData's empty-list default.
"""
# fixed and default values are not supported for datasets
if spec.value is not None:
warnings.warn(
"Generating a class for a dataset with a fixed value is not supported. "
"The fixed value will be ignored."
)
if spec.default_value is not None:
warnings.warn(
"Generating a class for a dataset with a default value is not supported. "
"The default value will be ignored."
)

if spec.shape is None and spec.dims is None:
if spec.dtype is not None:
dtype = cls._get_type_from_spec_dtype(spec.dtype)
else:
dtype = ('scalar_data', 'array_data', 'data')
else:
dtype = ('array_data', 'data')

existing = next((a for a in docval_args if a['name'] == 'data'), None)
if existing is not None:
existing['type'] = dtype
existing['doc'] = spec.doc
if spec.shape is not None:
existing['shape'] = spec.shape
else:
new_arg = dict(name='data', doc=spec.doc, type=dtype)
if spec.shape is not None:
new_arg['shape'] = spec.shape
docval_args.append(new_arg)

@classmethod
def _get_attrs_not_to_set_init(cls, classdict, parent_docval_args):
return parent_docval_args
Expand Down Expand Up @@ -413,12 +458,13 @@ def process_field_spec(cls, classdict, docval_args, parent_cls, attr_name, not_i
cls._add_to_docval_args(docval_args, docval_arg)

@classmethod
def post_process(cls, classdict, bases, docval_args, spec):
def post_process(cls, classdict, bases, docval_args, spec, type_map):
"""Add MultiContainerInterface to the list of base classes.
:param classdict: The class dictionary.
:param bases: The list of base classes.
:param docval_args: The dict of docval arguments.
:param spec: The spec for the container class to generate.
:param type_map: The type map to use.
"""
if '__clsconf__' in classdict:
# do not add MCI as a base if a base is already a subclass of MultiContainerInterface
Expand Down
20 changes: 13 additions & 7 deletions src/hdmf/build/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,10 +278,12 @@ def get_builder(self, **kwargs):
return result

@docval({'name': 'builder', 'type': (DatasetBuilder, GroupBuilder),
'doc': 'the builder to construct the AbstractContainer from'})
'doc': 'the builder to construct the AbstractContainer from'},
{'name': 'spec_ext', 'type': BaseStorageSpec,
'doc': 'a spec extension carrying inc-site overrides (e.g., dtype) from the parent', 'default': None})
def construct(self, **kwargs):
""" Construct the AbstractContainer represented by the given builder """
builder = getargs('builder', kwargs)
builder, spec_ext = getargs('builder', 'spec_ext', kwargs)
if isinstance(builder, LinkBuilder):
builder = builder.target
builder_id = self.__bldrhash__(builder)
Expand All @@ -290,11 +292,11 @@ def construct(self, **kwargs):
parent_builder = self.__get_parent_dt_builder(builder)
if parent_builder is not None:
parent = self._get_proxy_builder(parent_builder)
result = self.__type_map.construct(builder, self, parent)
result = self.__type_map.construct(builder, self, parent, spec_ext=spec_ext)
else:
# we are at the top of the hierarchy,
# so it must be time to resolve parents
result = self.__type_map.construct(builder, self, None)
result = self.__type_map.construct(builder, self, None, spec_ext=spec_ext)
self.__resolve_parents(result)
self.prebuilt(result, builder)
result.set_modified(False)
Expand Down Expand Up @@ -888,18 +890,22 @@ def build(self, **kwargs):
{'name': 'build_manager', 'type': BuildManager,
'doc': 'the BuildManager for constructing', 'default': None},
{'name': 'parent', 'type': (Proxy, Container),
'doc': 'the parent Container/Proxy for the Container being built', 'default': None})
'doc': 'the parent Container/Proxy for the Container being built', 'default': None},
{'name': 'spec_ext', 'type': BaseStorageSpec,
'doc': 'a spec extension carrying inc-site overrides (e.g., dtype) from the parent', 'default': None})
def construct(self, **kwargs):
""" Construct the AbstractContainer represented by the given builder """
builder, build_manager, parent = getargs('builder', 'build_manager', 'parent', kwargs)
builder, build_manager, parent, spec_ext = getargs(
'builder', 'build_manager', 'parent', 'spec_ext', kwargs
)
if build_manager is None:
build_manager = BuildManager(self)
obj_mapper = self.get_map(builder)
if obj_mapper is None:
dt = builder.attributes[self.namespace_catalog.group_spec_cls.type_key()]
raise ValueError('No ObjectMapper found for builder of type %s' % dt)
else:
return obj_mapper.construct(builder, build_manager, parent)
return obj_mapper.construct(builder, build_manager, parent, spec_ext=spec_ext)

@docval({"name": "container", "type": AbstractContainer, "doc": "the container to convert to a Builder"},
returns='The name a Builder should be given when building this container', rtype=str)
Expand Down
Loading
Loading