fix: pydantic extra keywords deprecated (#753)

Pydantic deprecates using extra keyword arguments on Field.
Used json_schema_extra instead.

Deprecated in Pydantic V2.0 to be removed in V3.0.

Signed-off-by: Bobby Noelte <b0661n0e17e@gmail.com>
This commit is contained in:
Bobby Noelte
2025-11-10 16:57:44 +01:00
committed by GitHub
parent 54b0622a96
commit e7b43782a4
44 changed files with 1956 additions and 1194 deletions

View File

@@ -251,10 +251,14 @@ RetType = TypeVar("RetType")
class CacheFileRecord(PydanticBaseModel):
cache_file: Any = Field(..., description="File descriptor of the cache file.")
until_datetime: DateTime = Field(..., description="Datetime until the cache file is valid.")
cache_file: Any = Field(
..., json_schema_extra={"description": "File descriptor of the cache file."}
)
until_datetime: DateTime = Field(
..., json_schema_extra={"description": "Datetime until the cache file is valid."}
)
ttl_duration: Optional[Duration] = Field(
default=None, description="Duration the cache file is valid."
default=None, json_schema_extra={"description": "Duration the cache file is valid."}
)

View File

@@ -15,11 +15,13 @@ class CacheCommonSettings(SettingsBaseModel):
"""Cache Configuration."""
subpath: Optional[Path] = Field(
default="cache", description="Sub-path for the EOS cache data directory."
default="cache",
json_schema_extra={"description": "Sub-path for the EOS cache data directory."},
)
cleanup_interval: float = Field(
default=5 * 60, description="Intervall in seconds for EOS file cache cleanup."
default=5 * 60,
json_schema_extra={"description": "Intervall in seconds for EOS file cache cleanup."},
)
# Do not make this a pydantic computed field. The pydantic model must be fully initialized

View File

@@ -84,12 +84,16 @@ class DataRecord(DataBase, MutableMapping):
- Supports non-standard data types like `datetime`.
"""
date_time: Optional[DateTime] = Field(default=None, description="DateTime")
date_time: Optional[DateTime] = Field(
default=None, json_schema_extra={"description": "DateTime"}
)
configured_data: dict[str, Any] = Field(
default_factory=dict,
description="Configured field like data",
examples=[{"load0_mr": 40421}],
json_schema_extra={
"description": "Configured field like data",
"examples": [{"load0_mr": 40421}],
},
)
# Pydantic v2 model configuration
@@ -368,10 +372,11 @@ class DataRecord(DataBase, MutableMapping):
return None
# Get all descriptions from the fields
descriptions = {
field_name: field_info.description
for field_name, field_info in cls.model_fields.items()
}
descriptions: dict[str, str] = {}
for field_name in cls.model_fields.keys():
desc = cls.field_description(field_name)
if desc:
descriptions[field_name] = desc
# Use difflib to get close matches
matches = difflib.get_close_matches(
@@ -429,8 +434,7 @@ class DataSequence(DataBase, MutableSequence):
Usage:
# Example of creating, adding, and using DataSequence
class DerivedSequence(DataSquence):
records: List[DerivedDataRecord] = Field(default_factory=list,
description="List of data records")
records: List[DerivedDataRecord] = Field(default_factory=list, json_schema_extra={ "description": "List of data records" })
seq = DerivedSequence()
seq.insert(DerivedDataRecord(date_time=datetime.now(), temperature=72))
@@ -445,7 +449,9 @@ class DataSequence(DataBase, MutableSequence):
"""
# To be overloaded by derived classes.
records: List[DataRecord] = Field(default_factory=list, description="List of data records")
records: List[DataRecord] = Field(
default_factory=list, json_schema_extra={"description": "List of data records"}
)
# Derived fields (computed)
@computed_field # type: ignore[prop-decorator]
@@ -1313,7 +1319,7 @@ class DataProvider(SingletonMixin, DataSequence):
"""
update_datetime: Optional[AwareDatetime] = Field(
None, description="Latest update datetime for generic data"
None, json_schema_extra={"description": "Latest update datetime for generic data"}
)
@abstractmethod
@@ -1780,7 +1786,7 @@ class DataContainer(SingletonMixin, DataBase, MutableMapping):
# To be overloaded by derived classes.
providers: List[DataProvider] = Field(
default_factory=list, description="List of data providers"
default_factory=list, json_schema_extra={"description": "List of data providers"}
)
@field_validator("providers", mode="after")

File diff suppressed because it is too large Load Diff

View File

@@ -24,17 +24,23 @@ class EnergyManagementCommonSettings(SettingsBaseModel):
startup_delay: float = Field(
default=5,
ge=1,
description="Startup delay in seconds for EOS energy management runs.",
json_schema_extra={
"description": "Startup delay in seconds for EOS energy management runs."
},
)
interval: Optional[float] = Field(
default=None,
description="Intervall in seconds between EOS energy management runs.",
examples=["300"],
json_schema_extra={
"description": "Intervall in seconds between EOS energy management runs.",
"examples": ["300"],
},
)
mode: Optional[EnergyManagementMode] = Field(
default=None,
description="Energy management mode [OPTIMIZATION | PREDICTION].",
examples=["OPTIMIZATION", "PREDICTION"],
json_schema_extra={
"description": "Energy management mode [OPTIMIZATION | PREDICTION].",
"examples": ["OPTIMIZATION", "PREDICTION"],
},
)

View File

@@ -17,14 +17,18 @@ class LoggingCommonSettings(SettingsBaseModel):
console_level: Optional[str] = Field(
default=None,
description="Logging level when logging to console.",
examples=LOGGING_LEVELS,
json_schema_extra={
"description": "Logging level when logging to console.",
"examples": LOGGING_LEVELS,
},
)
file_level: Optional[str] = Field(
default=None,
description="Logging level when logging to file.",
examples=LOGGING_LEVELS,
json_schema_extra={
"description": "Logging level when logging to file.",
"examples": LOGGING_LEVELS,
},
)
@computed_field # type: ignore[prop-decorator]

View File

@@ -43,6 +43,7 @@ from pydantic import (
ValidationInfo,
field_validator,
)
from pydantic.fields import ComputedFieldInfo, FieldInfo
from akkudoktoreos.utils.datetimeutil import DateTime, to_datetime, to_duration
@@ -720,6 +721,146 @@ class PydanticBaseModel(PydanticModelNestedValueMixin, BaseModel):
data = json.loads(json_str)
return cls.model_validate(data)
@classmethod
def _field_extra_dict(
cls,
model_field: Union[FieldInfo, ComputedFieldInfo],
) -> Dict[str, Any]:
"""Return the ``json_schema_extra`` dictionary for a given model field.
This method provides a safe and unified way to access the
``json_schema_extra`` metadata associated with a Pydantic field
definition. It supports both standard fields defined via
``Field(...)`` and computed fields, and gracefully handles
cases where ``json_schema_extra`` is not present.
Args:
model_field (Union[FieldInfo, ComputedFieldInfo]):
The Pydantic field object from which to extract
``json_schema_extra`` metadata. This can be obtained
from ``model.model_fields[field_name]`` or
``model.model_computed_fields[field_name]``.
Returns:
Dict[str, Any]:
A dictionary containing the fields ``json_schema_extra``
metadata. If no metadata is available, an empty dictionary
is returned.
Raises:
None:
This method does not raise. Missing metadata is handled
gracefully by returning an empty dictionary.
Examples:
>>> class User(Base):
... name: str = Field(
... json_schema_extra={"description": "User name"}
... )
...
>>> field = User.model_fields["name"]
>>> User.get_field_extra_dict(field)
{'description': 'User name'}
>>> missing = User.model_fields.get("unknown", None)
>>> User.get_field_extra_dict(missing) if missing else {}
{}
"""
if model_field is None:
return {}
# Pydantic v2 primary location
extra = getattr(model_field, "json_schema_extra", None)
if isinstance(extra, dict):
return extra
# Pydantic v1 compatibility fallback
fi = getattr(model_field, "field_info", None)
if fi is not None:
extra = getattr(fi, "json_schema_extra", None)
if isinstance(extra, dict):
return extra
return {}
@classmethod
def field_description(cls, field_name: str) -> Optional[str]:
"""Return the description metadata of a model field, if available.
This method retrieves the `Field` specification from the model's
`model_fields` registry and extracts its description from the field's
`json_schema_extra` / `extra` metadata (as provided by
`_field_extra_dict`). If the field does not exist or no description is
present, ``None`` is returned.
Args:
field_name (str):
Name of the field whose description should be returned.
Returns:
Optional[str]:
The textual description if present, otherwise ``None``.
"""
field = cls.model_fields.get(field_name)
if not field:
return None
extra = cls._field_extra_dict(field)
if "description" in extra:
return str(extra["description"])
return None
@classmethod
def field_deprecated(cls, field_name: str) -> Optional[str]:
"""Return the deprecated metadata of a model field, if available.
This method retrieves the `Field` specification from the model's
`model_fields` registry and extracts its description from the field's
`json_schema_extra` / `extra` metadata (as provided by
`_field_extra_dict`). If the field does not exist or no description is
present, ``None`` is returned.
Args:
field_name (str):
Name of the field whose deprecated info should be returned.
Returns:
Optional[str]:
The textual deprecated info if present, otherwise ``None``.
"""
field = cls.model_fields.get(field_name)
if not field:
return None
extra = cls._field_extra_dict(field)
if "deprecated" in extra:
return str(extra["deprecated"])
return None
@classmethod
def field_examples(cls, field_name: str) -> Optional[list[Any]]:
"""Return the examples metadata of a model field, if available.
This method retrieves the `Field` specification from the model's
`model_fields` registry and extracts its description from the field's
`json_schema_extra` / `extra` metadata (as provided by
`_field_extra_dict`). If the field does not exist or no description is
present, ``None`` is returned.
Args:
field_name (str):
Name of the field whose examples should be returned.
Returns:
Optional[list[Any]]:
The examples if present, otherwise ``None``.
"""
field = cls.model_fields.get(field_name)
if not field:
return None
extra = cls._field_extra_dict(field)
if "examples" in extra:
return extra["examples"]
return None
class PydanticDateTimeData(RootModel):
"""Pydantic model for time series data with consistent value lengths.
@@ -795,9 +936,12 @@ class PydanticDateTimeDataFrame(PydanticBaseModel):
data: Dict[str, Dict[str, Any]]
dtypes: Dict[str, str] = Field(default_factory=dict)
tz: Optional[str] = Field(default=None, description="Timezone for datetime values")
tz: Optional[str] = Field(
default=None, json_schema_extra={"description": "Timezone for datetime values"}
)
datetime_columns: list[str] = Field(
default_factory=lambda: ["date_time"], description="Columns to be treated as datetime"
default_factory=lambda: ["date_time"],
json_schema_extra={"description": "Columns to be treated as datetime"},
)
@field_validator("tz")