diff --git a/glotaran/builtin/elements/baseline/element.py b/glotaran/builtin/elements/baseline/element.py index f09ddb484..e1f5f45d7 100644 --- a/glotaran/builtin/elements/baseline/element.py +++ b/glotaran/builtin/elements/baseline/element.py @@ -15,8 +15,8 @@ class BaselineElement(Element): type: Literal["baseline"] # type:ignore[assignment] - register_as = "baseline" # type:ignore[pydantic-field] - unique = True # type:ignore[pydantic-field] + register_as: str = "baseline" # type:ignore[misc] + unique: bool = True def clp_label(self) -> str: return f"baseline_{self.label}" diff --git a/glotaran/builtin/elements/clp_guide/element.py b/glotaran/builtin/elements/clp_guide/element.py index 260fad8a5..7bc6fda82 100644 --- a/glotaran/builtin/elements/clp_guide/element.py +++ b/glotaran/builtin/elements/clp_guide/element.py @@ -14,8 +14,8 @@ class ClpGuideElement(Element): type: Literal["clp-guide"] # type:ignore[assignment] - register_as = "clp-guide" # type:ignore[pydantic-field] - exclusive = True # type:ignore[pydantic-field] + register_as: str = "clp-guide" # type:ignore[misc] + exclusive: bool = True target: str def calculate_matrix( diff --git a/glotaran/builtin/elements/coherent_artifact/element.py b/glotaran/builtin/elements/coherent_artifact/element.py index 15dfd7a31..03a9283fc 100644 --- a/glotaran/builtin/elements/coherent_artifact/element.py +++ b/glotaran/builtin/elements/coherent_artifact/element.py @@ -13,6 +13,7 @@ from glotaran.model import Element from glotaran.model import GlotaranModelError from glotaran.model import ParameterType +from glotaran.model.data_model import DataModel if TYPE_CHECKING: from glotaran.typing.types import ArrayLike @@ -20,9 +21,9 @@ class CoherentArtifactElement(Element): type: Literal["coherent-artifact"] # type:ignore[assignment] - register_as = "coherent-artifact" # type:ignore[pydantic-field] - dimension = "time" # type:ignore[pydantic-field] - data_model_type = ActivationDataModel # type:ignore[pydantic-field] + register_as: str = "coherent-artifact" # type:ignore[misc] + dimension: str = "time" + data_model_type: type[DataModel] = ActivationDataModel # type:ignore[misc,valid-type] order: int width: ParameterType | None = None diff --git a/glotaran/builtin/elements/damped_oscillation/element.py b/glotaran/builtin/elements/damped_oscillation/element.py index 6eb62d5e7..4862b3e6b 100644 --- a/glotaran/builtin/elements/damped_oscillation/element.py +++ b/glotaran/builtin/elements/damped_oscillation/element.py @@ -20,6 +20,7 @@ from glotaran.model import Element from glotaran.model import Item from glotaran.model import ParameterType +from glotaran.model.data_model import DataModel if TYPE_CHECKING: from glotaran.typing.types import ArrayLike @@ -32,9 +33,11 @@ class Oscillation(Item): class DampedOscillationElement(Element): type: Literal["damped-oscillation"] # type:ignore[assignment] - register_as = "damped-oscillation" # type:ignore[pydantic-field] + register_as: str = "damped-oscillation" # type:ignore[misc] dimension: str = "time" - data_model_type = ActivationDataModel # type:ignore[pydantic-field] + data_model_type: type[ # type:ignore[misc,valid-type] + DataModel + ] = ActivationDataModel oscillations: dict[str, Oscillation] def calculate_matrix( # type:ignore[override] diff --git a/glotaran/builtin/elements/kinetic/element.py b/glotaran/builtin/elements/kinetic/element.py index c24e24d45..3fae34d77 100644 --- a/glotaran/builtin/elements/kinetic/element.py +++ b/glotaran/builtin/elements/kinetic/element.py @@ -14,6 +14,7 @@ from glotaran.builtin.items.activation import MultiGaussianActivation from glotaran.builtin.items.activation import add_activation_to_result_data from glotaran.model import ExtendableElement +from glotaran.model.data_model import DataModel from glotaran.model.data_model import is_data_model_global if TYPE_CHECKING: @@ -21,13 +22,13 @@ class KineticElement(ExtendableElement, Kinetic): - type: Literal["kinetic"] = Literal["kinetic"] # type:ignore[assignment] - register_as = "kinetic" # type:ignore[pydantic-field] - data_model_type = ActivationDataModel # type:ignore[pydantic-field] + type: Literal["kinetic"] # type:ignore[assignment] + register_as: str = "kinetic" # type:ignore[misc] + data_model_type: type[DataModel] = ActivationDataModel # type:ignore[misc, valid-type] dimension: str = "time" def extend(self, other: KineticElement): # type:ignore[override] - return other.copy(update={"rates": self.rates | other.rates}) + return other.model_copy(update={"rates": self.rates | other.rates}) # TODO: consolidate parent method. @classmethod @@ -48,7 +49,11 @@ def combine(cls, kinetics: list[KineticElement]) -> KineticElement: # type:igno The combined KMatrix. """ - return cls(rates=reduce(lambda lhs, rhs: lhs | rhs, [k.rates for k in kinetics]), label="") + return cls( + type="kinetic", + rates=reduce(lambda lhs, rhs: lhs | rhs, [k.rates for k in kinetics]), + label="", + ) @staticmethod def combine_matrices(lhs: ArrayLike, rhs: ArrayLike) -> ArrayLike: diff --git a/glotaran/builtin/elements/spectral/element.py b/glotaran/builtin/elements/spectral/element.py index 33452502e..51a714b86 100644 --- a/glotaran/builtin/elements/spectral/element.py +++ b/glotaran/builtin/elements/spectral/element.py @@ -21,9 +21,9 @@ class SpectralDataModel(DataModel): class SpectralElement(Element): type: Literal["spectral"] # type:ignore[assignment] + register_as: str = "spectral" # type:ignore[misc] dimension: str = "spectral" - register_as = "spectral" # type:ignore[pydantic-field] - data_model_type = SpectralDataModel # type:ignore[pydantic-field] + data_model_type: type[DataModel] = SpectralDataModel # type:ignore[misc,valid-type] shapes: dict[str, SpectralShape.get_annotated_type()] # type:ignore[valid-type] def calculate_matrix( # type:ignore[override] diff --git a/glotaran/io/preprocessor/preprocessor.py b/glotaran/io/preprocessor/preprocessor.py index 6d918a96e..edd9a002c 100644 --- a/glotaran/io/preprocessor/preprocessor.py +++ b/glotaran/io/preprocessor/preprocessor.py @@ -6,15 +6,13 @@ import xarray as xr from pydantic import BaseModel +from pydantic import ConfigDict class PreProcessor(BaseModel, abc.ABC): """A base class for pre=processors.""" - class Config: - """Config for BaseModel.""" - - arbitrary_types_allowed = True + model_config = ConfigDict(arbitrary_types_allowed=True) @abc.abstractmethod def apply(self, data: xr.DataArray) -> xr.DataArray: diff --git a/glotaran/io/preprocessor/test/test_preprocessor.py b/glotaran/io/preprocessor/test/test_preprocessor.py index bc1659425..7f362641b 100644 --- a/glotaran/io/preprocessor/test/test_preprocessor.py +++ b/glotaran/io/preprocessor/test/test_preprocessor.py @@ -35,11 +35,15 @@ def test_to_from_dict(): .correct_baseline_value(1) .correct_baseline_average({"dim_1": slice(0, 2)}) ) - pl_dict = pl.dict() + pl_dict = pl.model_dump() assert pl_dict == { "actions": [ {"action": "baseline-value", "value": 1.0}, - {"action": "baseline-average", "select": {"dim_1": slice(0, 2)}, "exclude": None}, + { + "action": "baseline-average", + "select": {"dim_1": slice(0, 2)}, + "exclude": None, + }, ] } - assert PreProcessingPipeline.parse_obj(pl_dict) == pl + assert PreProcessingPipeline.model_validate(pl_dict) == pl diff --git a/glotaran/model/data_model.py b/glotaran/model/data_model.py index 4ae3fa2fa..0f1a65a04 100644 --- a/glotaran/model/data_model.py +++ b/glotaran/model/data_model.py @@ -3,6 +3,7 @@ from __future__ import annotations from collections.abc import Generator +from typing import TYPE_CHECKING from typing import Any from typing import Literal from uuid import uuid4 @@ -22,6 +23,9 @@ from glotaran.model.weight import Weight from glotaran.parameter import Parameters +if TYPE_CHECKING: + from glotaran.project.library import ModelLibrary + class ExclusiveModelIssue(ItemIssue): """Issue for exclusive elements.""" @@ -202,12 +206,17 @@ class DataModel(Item): def create_class_for_elements(elements: set[type[Element]]) -> type[DataModel]: data_model_cls_name = f"GlotaranDataModel_{str(uuid4()).replace('-','_')}" data_models = tuple( - {e.data_model_type for e in elements if e.data_model_type is not None} + { + e.model_fields["data_model_type"].default + for e in elements + if "data_model_type" in e.model_fields + and e.model_fields["data_model_type"].default is not None + } ) + (DataModel,) return create_model(data_model_cls_name, __base__=data_models) @classmethod - def from_dict(cls, library: dict[str, Element], model_dict: dict[str, Any]) -> DataModel: + def from_dict(cls, library: ModelLibrary, model_dict: dict[str, Any]) -> DataModel: element_labels = model_dict.get("elements", []) + model_dict.get("global_elements", []) if len(element_labels) == 0: raise GlotaranModelError("No element defined for dataset") @@ -314,11 +323,11 @@ def iterate_data_model_global_elements( def resolve_data_model( model: DataModel, - library: dict[str, Element], + library: ModelLibrary, parameters: Parameters, initial: Parameters | None = None, ) -> DataModel: - model = model.copy() + model = model.model_copy() model.elements = [library[m] if isinstance(m, str) else m for m in model.elements] if model.global_elements is not None: model.global_elements = [ diff --git a/glotaran/model/experiment_model.py b/glotaran/model/experiment_model.py index c9073555e..337b87f41 100644 --- a/glotaran/model/experiment_model.py +++ b/glotaran/model/experiment_model.py @@ -1,11 +1,12 @@ """This module contains the dataset group.""" from __future__ import annotations +from typing import TYPE_CHECKING from typing import Any from typing import Literal from pydantic import BaseModel -from pydantic import Extra +from pydantic import ConfigDict from pydantic import Field from glotaran.model.clp_constraint import ClpConstraint @@ -13,7 +14,6 @@ from glotaran.model.clp_relation import ClpRelation from glotaran.model.data_model import DataModel from glotaran.model.data_model import resolve_data_model -from glotaran.model.element import Element from glotaran.model.errors import ItemIssue from glotaran.model.item import ParameterType from glotaran.model.item import get_item_issues @@ -21,15 +21,14 @@ from glotaran.model.item import resolve_parameter from glotaran.parameter import Parameters +if TYPE_CHECKING: + from glotaran.project.library import ModelLibrary + class ExperimentModel(BaseModel): """A dataset group for optimization.""" - class Config: - """Config for pydantic.BaseModel.""" - - arbitrary_types_allowed = True - extra = Extra.forbid + model_config = ConfigDict(arbitrary_types_allowed=True, extra="forbid") clp_link_tolerance: float = 0.0 clp_link_method: Literal["nearest", "backward", "forward"] = "nearest" @@ -45,24 +44,25 @@ class Config: "variable_projection", description="The residual function to use." ) scale: dict[str, ParameterType] = Field( - default_factory=dict, description="The scales of of the datasets in the experiment." + default_factory=dict, + description="The scales of of the datasets in the experiment.", ) @classmethod - def from_dict(cls, library: dict[str, Element], model_dict: dict[str, Any]) -> ExperimentModel: + def from_dict(cls, library: ModelLibrary, model_dict: dict[str, Any]) -> ExperimentModel: model_dict["datasets"] = { label: DataModel.from_dict(library, dataset) for label, dataset in model_dict.get("datasets", {}).items() } - return cls.parse_obj(model_dict) + return cls.model_validate(model_dict) def resolve( self, - library: dict[str, Element], + library: ModelLibrary, parameters: Parameters, initial: Parameters | None = None, ) -> ExperimentModel: - result = self.copy() + result = self.model_copy() result.datasets = { label: resolve_data_model(dataset, library, parameters, initial) for label, dataset in self.datasets.items() diff --git a/glotaran/model/item.py b/glotaran/model/item.py index 70899573c..96d51fa2c 100644 --- a/glotaran/model/item.py +++ b/glotaran/model/item.py @@ -1,6 +1,7 @@ """This module contains the item classes and helper functions.""" import contextlib import typing +from collections import UserDict from functools import cache from inspect import getmro from inspect import isclass @@ -20,11 +21,10 @@ from typing import get_origin from pydantic import BaseModel -from pydantic import Extra +from pydantic import ConfigDict from pydantic import Field from pydantic.fields import FieldInfo -from pydantic.fields import ModelField # type:ignore[attr-defined] -from pydantic.fields import Undefined # type:ignore[attr-defined] +from pydantic_core import PydanticUndefined from glotaran.model.errors import ItemIssue from glotaran.model.errors import ParameterIssue @@ -38,6 +38,34 @@ META_VALIDATOR = "__glotaran_validator__" +class GlotaranFieldMetadata(UserDict): + """Container to hold glotaran field meta data.""" + + @property + def validator(self) -> Callable | None: + """Glotaran validator function if defined, else None.""" + return self[META_VALIDATOR] if META_VALIDATOR in self else None + + +def extract_glotaran_field_metadata(info: FieldInfo) -> GlotaranFieldMetadata: + """Extract glotaran metadata from field info metadata list. + + Parameters + ---------- + info : FieldInfo + Field info to for glotaran metadata in. + + Returns + ------- + GlotaranFieldMetadata + Glotaran meta data from the field info metadata or empty if not present. + """ + for item in info.metadata: + if isinstance(item, GlotaranFieldMetadata): + return item + return GlotaranFieldMetadata() + + class ItemAttribute(FieldInfo): """An attribute for items. @@ -48,7 +76,7 @@ def __init__( self, *, description: str, - default: Any = Undefined, + default: Any = PydanticUndefined, factory: Callable[[], Any] | None = None, validator: Callable | None = None, ): @@ -65,18 +93,20 @@ def __init__( validator: Callable[[Any, Item, Model, Parameters | None], list[ItemIssue]] | None A validator function for the attribute. """ - metadata: dict[str, Any] = {} + glotaran_field_metadata = GlotaranFieldMetadata() if validator is not None: - metadata[META_VALIDATOR] = validator - super().__init__( - default=default, default_factory=factory, description=description, **metadata - ) + glotaran_field_metadata[META_VALIDATOR] = validator + if factory is not None: + super().__init__(default_factory=factory, description=description) + else: + super().__init__(default=default, description=description) + self.metadata.append(glotaran_field_metadata) def Attribute( *, description: str, - default: Any = Undefined, + default: Any = PydanticUndefined, factory: Callable[[], Any] | None = None, validator: Callable | None = None, ) -> Any: @@ -105,11 +135,7 @@ def Attribute( class Item(BaseModel): """A baseclass for items.""" - class Config: - """Config for pydantic.BaseModel.""" - - arbitrary_types_allowed = True - extra = Extra.forbid + model_config = ConfigDict(arbitrary_types_allowed=True, extra="forbid") class TypedItem(Item): @@ -141,12 +167,12 @@ def get_annotated_type(cls) -> object: @cache -def get_structure_and_type_from_field(field: ModelField) -> tuple[None | list | dict, type]: +def get_structure_and_type_from_field(info: FieldInfo) -> tuple[None | list | dict, type]: """Get the structure and type from a field. Parameters ---------- - field: ModelField + info: FieldInfo The field. Returns @@ -154,7 +180,7 @@ def get_structure_and_type_from_field(field: ModelField) -> tuple[None | list | tuple[None | list | dict, type]: The structure and type as atuple. """ - definition = strip_option_type_from_definition(field.annotation) + definition = strip_option_type_from_definition(info.annotation) # type:ignore[arg-type] structure, definition = strip_structure_type_from_definition(definition) definition = strip_option_type_from_definition(definition, strip_type=str) return structure, definition @@ -207,7 +233,7 @@ def strip_structure_type_from_definition(definition: type) -> tuple[None | list def iterate_fields_of_type( item: type[ItemT] | ItemT, field_type: type -) -> Generator[ModelField, None, None]: +) -> Generator[tuple[str, FieldInfo], None, None]: """Iterate over all fields of the given types. Parameters @@ -219,11 +245,11 @@ def iterate_fields_of_type( Yields ------ - ModelField + tuple[str, FieldInfo] The matching attributes. """ - for field in item.__fields__.values(): # type:ignore[union-attr] - _, item_type = get_structure_and_type_from_field(field) + for name, info in item.model_fields.items(): + _, item_type = get_structure_and_type_from_field(info) with contextlib.suppress(TypeError): # issubclass does for some reason not work with e.g. tuple as item_type # and Parameter as attr_type @@ -236,10 +262,10 @@ def iterate_fields_of_type( ): item_type = typing.get_args(typing.get_args(item_type)[0])[0] if isclass(item_type) and issubclass(item_type, field_type): - yield field + yield name, info -def iterate_item_fields(item: type[ItemT] | ItemT) -> Generator[ModelField, None, None]: +def iterate_item_fields(item: type[ItemT] | ItemT) -> Generator[tuple[str, FieldInfo], None, None]: """Iterate over all item fields. Parameters @@ -249,13 +275,15 @@ def iterate_item_fields(item: type[ItemT] | ItemT) -> Generator[ModelField, None Yields ------ - ModelField + tuple[str, FieldInfo] The item fields. """ yield from iterate_fields_of_type(item, Item) -def iterate_parameter_fields(item: type[ItemT] | ItemT) -> Generator[ModelField, None, None]: +def iterate_parameter_fields( + item: type[ItemT] | ItemT, +) -> Generator[tuple[str, FieldInfo], None, None]: """Iterate over all parameter fields. Parameters @@ -265,7 +293,7 @@ def iterate_parameter_fields(item: type[ItemT] | ItemT) -> Generator[ModelField, Yields ------ - ModelField + tuple[str, FieldInfo] The parameter fields. """ yield from iterate_fields_of_type(item, Parameter) @@ -293,45 +321,47 @@ def resolve_item_parameters( resolved: dict[str, Any] = {} initial = initial or parameters - for field in iterate_parameter_fields(item): - value = getattr(item, field.name) + for name, info in iterate_parameter_fields(item): + value = getattr(item, name) if value is None: continue - structure, _ = get_structure_and_type_from_field(field) + structure, _ = get_structure_and_type_from_field(info) if structure is None: - resolved[field.name] = resolve_parameter(value, parameters, initial) + resolved[name] = resolve_parameter(value, parameters, initial) elif structure is list: - resolved[field.name] = [resolve_parameter(v, parameters, initial) for v in value] + resolved[name] = [resolve_parameter(v, parameters, initial) for v in value] elif structure is dict: - resolved[field.name] = { + resolved[name] = { k: resolve_parameter(v, parameters, initial) for k, v in value.items() } - for field in iterate_item_fields(item): - value = getattr(item, field.name) + for name, info in iterate_item_fields(item): + value = getattr(item, name) if value is None: continue - structure, item_type = get_structure_and_type_from_field(field) + structure, item_type = get_structure_and_type_from_field(info) if structure is None: - resolved[field.name] = resolve_item_parameters(value, parameters, initial) + resolved[name] = resolve_item_parameters(value, parameters, initial) elif structure is list: - resolved[field.name] = [resolve_item_parameters(v, parameters, initial) for v in value] + resolved[name] = [resolve_item_parameters(v, parameters, initial) for v in value] elif structure is dict: - resolved[field.name] = { + resolved[name] = { k: resolve_item_parameters(v, parameters, initial) for k, v in value.items() } - return item.copy(update=resolved) + return item.model_copy(update=resolved) def get_item_issues(item: Item, parameters: Parameters) -> list[ItemIssue]: issues = [] - for field in iterate_item_fields(item): - value = getattr(item, field.name) + for name, info in iterate_item_fields(item): + value = getattr(item, name) if value is None: continue - if META_VALIDATOR in field.field_info.extra: - issues += field.field_info.extra[META_VALIDATOR](value, item, parameters) - structure, item_type = get_structure_and_type_from_field(field) + + glotaran_field_metadata = extract_glotaran_field_metadata(info) + if glotaran_field_metadata.validator is not None: + issues += glotaran_field_metadata.validator(value, item, parameters) + structure, _ = get_structure_and_type_from_field(info) if structure is None: issues += get_item_issues(value, parameters) else: @@ -339,11 +369,11 @@ def get_item_issues(item: Item, parameters: Parameters) -> list[ItemIssue]: for v in values: issues += get_item_issues(v, parameters) - for field in iterate_parameter_fields(item): - value = getattr(item, field.name) + for name, info in iterate_parameter_fields(item): + value = getattr(item, name) if value is None: continue - structure, _ = get_structure_and_type_from_field(field) + structure, _ = get_structure_and_type_from_field(info) if structure is None: if isinstance(value, str) and not parameters.has(value): issues += [ParameterIssue(value)] diff --git a/glotaran/model/test/test_data_model.py b/glotaran/model/test/test_data_model.py index db18a8b9d..7e08b265d 100644 --- a/glotaran/model/test/test_data_model.py +++ b/glotaran/model/test/test_data_model.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import Literal import numpy as np @@ -18,7 +20,7 @@ class MockDataModel(DataModel): class MockElementWithDataModel(Element): type: Literal["mock-w-datamodel"] dimension: str = "model" - data_model_type = MockDataModel + data_model_type: type[DataModel] = MockDataModel def calculate_matrix( self, diff --git a/glotaran/model/test/test_item.py b/glotaran/model/test/test_item.py index cc52250ef..5f1dae9af 100644 --- a/glotaran/model/test/test_item.py +++ b/glotaran/model/test/test_item.py @@ -15,21 +15,21 @@ class MockItem(Item): cscalar: int - cscalar_option: int | None + cscalar_option: int | None = None clist: list[int] - clist_option: list[int] | None + clist_option: list[int] | None = None cdict: dict[str, int] - cdict_option: dict[str, int] | None + cdict_option: dict[str, int] | None = None pscalar: ParameterType - pscalar_option: ParameterType | None + pscalar_option: ParameterType | None = None plist: list[ParameterType] - plist_option: list[ParameterType] | None + plist_option: list[ParameterType] | None = None pdict: dict[str, ParameterType] - pdict_option: dict[str, ParameterType] | None + pdict_option: dict[str, ParameterType] | None = None class MockTypedItem(TypedItem): - pass + """This is just a mock item for testing.""" class MockTypedItemConcrete1(MockTypedItem): @@ -43,7 +43,7 @@ class MockTypedItemConcrete2(MockTypedItem): def test_item_fields_structures_and_type(): - item_fields = MockItem.__fields__.values() + item_fields = MockItem.model_fields.values() wanted = ( (None, int), (None, int), @@ -68,7 +68,7 @@ def test_item_fields_structures_and_type(): def test_iterate_parameters(): item_fields = list(iterate_parameter_fields(MockItem)) assert len(item_fields) == 6 - assert [i.name for i in item_fields] == [ + assert [name for name, _ in item_fields] == [ "pscalar", "pscalar_option", "plist", @@ -83,12 +83,12 @@ def test_typed_item(): def test_item_schema(): - got = MockTypedItem.schema() + got = MockTypedItem.model_json_schema() wanted = { "title": "MockTypedItem", - "description": "An item with a type.", + "description": "This is just a mock item for testing.", "type": "object", - "properties": {"type": {"title": "Type", "type": "null"}}, + "properties": {"type": {"const": None, "title": "Type"}}, "required": ["type"], "additionalProperties": False, } @@ -104,7 +104,7 @@ def test_get_issues(): cdict={}, pscalar="foo", plist=["foo", "bar"], - pdict={1: "foo", 2: "bar"}, + pdict={"1": "foo", "2": "bar"}, ) issues = get_item_issues(item, Parameters({})) diff --git a/glotaran/optimization/optimization.py b/glotaran/optimization/optimization.py index 8217d819a..d4ece66de 100644 --- a/glotaran/optimization/optimization.py +++ b/glotaran/optimization/optimization.py @@ -1,4 +1,7 @@ +from __future__ import annotations + from collections import ChainMap +from typing import TYPE_CHECKING from typing import Literal from warnings import warn @@ -6,7 +9,6 @@ import xarray as xr from scipy.optimize import least_squares -from glotaran.model import Element from glotaran.model import ExperimentModel from glotaran.model import GlotaranModelIssues from glotaran.model import GlotaranUserError @@ -18,6 +20,10 @@ from glotaran.typing.types import ArrayLike from glotaran.utils.tee import TeeContext +if TYPE_CHECKING: + from glotaran.project.library import ModelLibrary + + SUPPORTED_OPTIMIZATION_METHODS = { "TrustRegionReflection": "trf", "Dogbox": "dogbox", @@ -47,7 +53,7 @@ def __init__( self, models: list[ExperimentModel], parameters: Parameters, - library: dict[str, Element], + library: ModelLibrary, verbose: bool = True, raise_exception: bool = False, maximum_number_function_evaluations: int | None = None, diff --git a/glotaran/optimization/result.py b/glotaran/optimization/result.py index 6cefcab6e..a98447a38 100644 --- a/glotaran/optimization/result.py +++ b/glotaran/optimization/result.py @@ -3,7 +3,7 @@ import numpy as np from pydantic import BaseModel -from pydantic import Extra +from pydantic import ConfigDict from scipy.optimize import OptimizeResult # TODO: Fix circular import @@ -15,11 +15,7 @@ class OptimizationResult(BaseModel): - class Config: - """Config for pydantic.BaseModel.""" - - arbitrary_types_allowed = True - extra = Extra.forbid + model_config = ConfigDict(arbitrary_types_allowed=True, extra="forbid") """The result of a global analysis.""" @@ -102,7 +98,6 @@ def from_least_squares_result( result_args = { "success": success, - # "glotaran_version": glotaran_version, "free_parameter_labels": free_parameter_labels, "parameter_history": parameter_history, "termination_reason": termination_reason, @@ -110,9 +105,9 @@ def from_least_squares_result( "number_of_function_evaluations": result.nfev # type:ignore[union-attr] if success else parameter_history.number_of_records, + "cost": 0.5 * np.dot(penalty, penalty), } - result_args["cost"] = 0.5 * np.dot(penalty, penalty) if success: result_args["number_clp"] = number_clp result_args["number_of_jacobian_evaluations"] = result.njev # type:ignore[union-attr] @@ -168,5 +163,4 @@ def calculate_covariance_matrix_and_standard_errors( _, jacobian_sv, jacobian_rsv = np.linalg.svd(jacobian, full_matrices=False) jacobian_sv_square = jacobian_sv**2 mask = jacobian_sv_square > np.finfo(float).eps - covariance_matrix = (jacobian_rsv[mask].T / jacobian_sv_square[mask]) @ jacobian_rsv[mask] - return covariance_matrix + return (jacobian_rsv[mask].T / jacobian_sv_square[mask]) @ jacobian_rsv[mask] diff --git a/glotaran/project/library.py b/glotaran/project/library.py index e38a8334a..04ee9f35c 100644 --- a/glotaran/project/library.py +++ b/glotaran/project/library.py @@ -1,7 +1,9 @@ +from __future__ import annotations + from functools import reduce from typing import TypeAlias -from pydantic import BaseModel +from pydantic import RootModel from glotaran.model import Element from glotaran.model import ExtendableElement @@ -12,8 +14,8 @@ ] -class ModelLibrary(BaseModel): - __root__: LibraryType +class ModelLibrary(RootModel[LibraryType]): + root: LibraryType def __init__(self, **data): super().__init__(**data) @@ -23,12 +25,12 @@ def __init__(self, **data): current_size = len(extended_elements) while current_size != 0: for label in extended_elements: - element = self.__root__[label] + element = self.root[label] assert element.extends is not None - extends = [self.__root__[label] for label in element.extends] + extends = [self.root[label] for label in element.extends] if all(e.label not in extended_elements for e in extends): extends += [element] - self.__root__[label] = reduce(lambda a, b: a.extend(b), extends) + self.root[label] = reduce(lambda a, b: a.extend(b), extends) extended_elements.remove(label) if current_size == len(extended_elements): raise GlotaranModelError( @@ -36,13 +38,19 @@ def __init__(self, **data): ) current_size = len(extended_elements) + def __iter__(self): + return iter(self.root) + + def __getitem__(self, item_label: str): + return self.root[item_label] + @classmethod - def from_dict(cls, spec: dict) -> LibraryType: - return cls.parse_obj({label: m | {"label": label} for label, m in spec.items()}).__root__ + def from_dict(cls, spec: dict) -> ModelLibrary: + return cls(**{label: m | {"label": label} for label, m in spec.items()}) def _get_extended_elements(self) -> list[str]: return [ label - for label, element in self.__root__.items() + for label, element in self.root.items() if isinstance(element, ExtendableElement) and element.is_extended() ] diff --git a/glotaran/project/result.py b/glotaran/project/result.py index 20f5564a4..9aeae6cb0 100644 --- a/glotaran/project/result.py +++ b/glotaran/project/result.py @@ -4,7 +4,7 @@ import xarray as xr from pydantic import BaseModel -from pydantic import Extra +from pydantic import ConfigDict from glotaran.builtin.io.yml.utils import write_dict from glotaran.io import save_dataset @@ -27,11 +27,7 @@ class SavingOptions(BaseModel): class Result(BaseModel): - class Config: - """Config for pydantic.BaseModel.""" - - arbitrary_types_allowed = True - extra = Extra.forbid + model_config = ConfigDict(arbitrary_types_allowed=True, extra="forbid") data: dict[str, xr.Dataset] experiments: dict[str, ExperimentModel] @@ -60,7 +56,7 @@ def save( # for label, experiment in self.experiments.items(): # experiment_path = experiment_folder / f"{label}.yml" # result_dict["experiments"][label] = experiment_path - # write_dict(experiment.dict(), experiment_path) + # write_dict(experiment.model_dump(), experiment_path) data_path = path / "data" data_path.mkdir(exist_ok=True) diff --git a/glotaran/project/scheme.py b/glotaran/project/scheme.py index f890f283a..8e039f940 100644 --- a/glotaran/project/scheme.py +++ b/glotaran/project/scheme.py @@ -2,27 +2,22 @@ import xarray as xr from pydantic import BaseModel -from pydantic import Extra +from pydantic import ConfigDict from glotaran.io import load_dataset from glotaran.model import ExperimentModel from glotaran.model.errors import GlotaranUserError from glotaran.optimization import Optimization from glotaran.parameter import Parameters -from glotaran.project.library import LibraryType from glotaran.project.library import ModelLibrary from glotaran.project.result import Result class Scheme(BaseModel): - class Config: - """Config for pydantic.BaseModel.""" - - arbitrary_types_allowed = True - extra = Extra.forbid + model_config = ConfigDict(arbitrary_types_allowed=True, extra="forbid") experiments: dict[str, ExperimentModel] - library: LibraryType + library: ModelLibrary @classmethod def from_dict(cls, spec: dict): diff --git a/glotaran/simulation/simulation.py b/glotaran/simulation/simulation.py index 4546f6f3c..48f41ff4d 100644 --- a/glotaran/simulation/simulation.py +++ b/glotaran/simulation/simulation.py @@ -1,11 +1,12 @@ """Functions for simulating a dataset using a global optimization model.""" from __future__ import annotations +from typing import TYPE_CHECKING + import numpy as np import xarray as xr from glotaran.model import DataModel -from glotaran.model import Element from glotaran.model import GlotaranUserError from glotaran.model import get_data_model_dimension from glotaran.model import resolve_data_model @@ -13,10 +14,13 @@ from glotaran.parameter import Parameters from glotaran.typing.types import ArrayLike +if TYPE_CHECKING: + from glotaran.project.library import ModelLibrary + def simulate( model: DataModel, - library: dict[str, Element], + library: ModelLibrary, parameters: Parameters, coordinates: dict[str, ArrayLike], clp: xr.DataArray | None = None, diff --git a/glotaran/testing/simulated_data/parallel_spectral_decay.py b/glotaran/testing/simulated_data/parallel_spectral_decay.py index f024a7ea7..326f6d665 100644 --- a/glotaran/testing/simulated_data/parallel_spectral_decay.py +++ b/glotaran/testing/simulated_data/parallel_spectral_decay.py @@ -17,7 +17,7 @@ KineticSpectrumDataModel( elements=["parallel"], global_elements=["spectral"], - activation=[GaussianActivation.parse_obj(ACTIVATION)], # type:ignore[call-arg] + activation=[GaussianActivation.model_validate(ACTIVATION)], # type:ignore[call-arg] ), ModelLibrary.from_dict(LIBRARY), SIMULATION_PARAMETERS, diff --git a/glotaran/testing/simulated_data/sequential_spectral_decay.py b/glotaran/testing/simulated_data/sequential_spectral_decay.py index 1e6c8d7aa..f36b00469 100644 --- a/glotaran/testing/simulated_data/sequential_spectral_decay.py +++ b/glotaran/testing/simulated_data/sequential_spectral_decay.py @@ -17,7 +17,7 @@ KineticSpectrumDataModel( elements=["sequential"], global_elements=["spectral"], - activation=[GaussianActivation.parse_obj(ACTIVATION)], # type:ignore[call-arg] + activation=[GaussianActivation.model_validate(ACTIVATION)], # type:ignore[call-arg] ), ModelLibrary.from_dict(LIBRARY), SIMULATION_PARAMETERS, diff --git a/requirements_dev.txt b/requirements_dev.txt index 0ca425f3a..2f8d7bddf 100644 --- a/requirements_dev.txt +++ b/requirements_dev.txt @@ -11,7 +11,7 @@ numpy==1.24.4 odfpy==1.4.1 openpyxl==3.1.2 pandas==2.1.1 -pydantic==1.10.13 +pydantic==2.4.2 ruamel.yaml==0.17.35 scipy==1.11.2 sdtfile==2023.9.28 diff --git a/setup.cfg b/setup.cfg index 0b4246d8c..f7b685ac8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -39,7 +39,7 @@ install_requires = odfpy>=1.4.1 openpyxl>=3.0.10 pandas>=1.3.4 - pydantic>=1.10.2 + pydantic>=2.0 ruamel.yaml>=0.17.17 scipy>=1.7.2 sdtfile>=2020.8.3