"""Defines a class storing data in memory.
.. codeauthor:: David Zwicker <david.zwicker@ds.mpg.de>
"""
from __future__ import annotations
from collections.abc import Sequence
from contextlib import contextmanager
import numpy as np
from ..fields import FieldCollection
from ..fields.base import FieldBase
from .base import InfoDict, StorageBase, WriteModeType
[docs]
class MemoryStorage(StorageBase):
"""Store discretized fields in memory."""
def __init__(
self,
times: Sequence[float] | None = None,
data: list[np.ndarray] | None = None,
*,
info: InfoDict | None = None,
field_obj: FieldBase | None = None,
write_mode: WriteModeType = "truncate_once",
):
"""
Args:
times (:class:`~numpy.ndarray`):
Sequence of times for which data is known
data (list of :class:`~numpy.ndarray`):
The field data at the given times
field_obj (:class:`~pde.fields.base.FieldBase`):
An instance of the field class store data for a single time point.
info (dict):
Supplies extra information that is stored in the storage
write_mode (str):
Determines how new data is added to already existing data. Possible
values are: 'append' (data is always appended), 'truncate' (data is
cleared every time this storage is used for writing), or 'truncate_once'
(data is cleared for the first writing, but appended subsequently).
Alternatively, specifying 'readonly' will disable writing completely.
"""
super().__init__(info=info, write_mode=write_mode)
self.times: list[float] = [] if times is None else list(times)
if field_obj is not None:
self._field = field_obj.copy()
self._grid = field_obj.grid
self._data_shape = field_obj.data.shape
self.data: list[np.ndarray] = [] if data is None else data
if self._data_shape is None and len(self.data) > 0:
self._data_shape = self.data[0].shape
# check consistency
if len(self.times) != len(self.data):
raise ValueError(
"Length of the supplied `times` and `fields` are inconsistent "
f"({len(self.times)} != {len(self.data)})"
)
[docs]
@classmethod
def from_fields(
cls,
times: Sequence[float] | None = None,
fields: Sequence[FieldBase] | None = None,
info: InfoDict | None = None,
write_mode: WriteModeType = "truncate_once",
) -> MemoryStorage:
"""Create MemoryStorage from a list of fields.
Args:
times (:class:`~numpy.ndarray`):
Sequence of times for which data is known
fields (list of :class:`~pde.fields.FieldBase`):
The fields at all given time points
info (dict):
Supplies extra information that is stored in the storage
write_mode (str):
Determines how new data is added to already existing data. Possible
values are: 'append' (data is always appended), 'truncate' (data is
cleared every time this storage is used for writing), or 'truncate_once'
(data is cleared for the first writing, but appended subsequently).
Alternatively, specifying 'readonly' will disable writing completely.
"""
if fields is None:
field_obj = None
data = None
else:
field_obj = fields[0]
data = [fields[0].data]
for field in fields[1:]:
if field_obj.grid != field.grid:
raise ValueError("Grids of the fields are incompatible")
data.append(field.data)
return cls(
times, data=data, field_obj=field_obj, info=info, write_mode=write_mode
)
[docs]
@classmethod
def from_collection(
cls,
storages: Sequence[StorageBase],
label: str | None = None,
*,
rtol: float = 1.0e-5,
atol: float = 1.0e-8,
) -> MemoryStorage:
"""Combine multiple memory storages into one.
This method can be used to combine multiple time series of different fields into
a single representation. This requires that all time series contain data at the
same time points.
Args:
storages (list):
A collection of instances of :class:`~pde.storage.base.StorageBase`
whose data will be concatenated into a single MemoryStorage
label (str, optional):
The label of the instances of :class:`~pde.fields.FieldCollection` that
represent the concatenated data
rtol (float):
Relative tolerance used when checking times for merging
atol (float):
Absolute tolerance used when checking times for merging
Returns:
:class:`~pde.storage.memory.MemoryStorage`: Storage containing all the data.
"""
if len(storages) == 0:
return cls()
# initialize the combined data
times = storages[0].times
data = [[field] for field in storages[0]]
# append data from further storages
for storage in storages[1:]:
if not np.allclose(times, storage.times, rtol=rtol, atol=atol):
raise ValueError("Storages have incompatible times")
for i, field in enumerate(storage):
data[i].append(field)
# convert data format to FieldCollections
fields = [FieldCollection(d, label=label) for d in data] # type: ignore
return cls.from_fields(times, fields=fields)
[docs]
def clear(self, clear_data_shape: bool = False) -> None:
"""Truncate the storage by removing all stored data.
Args:
clear_data_shape (bool):
Flag determining whether the data shape is also deleted.
"""
self.times = []
self.data = []
super().clear(clear_data_shape=clear_data_shape)
[docs]
def start_writing(self, field: FieldBase, info: InfoDict | None = None) -> None:
"""Initialize the storage for writing data.
Args:
field (:class:`~pde.fields.FieldBase`):
An instance of the field class store data for a single time point.
info (dict):
Supplies extra information that is stored in the storage
"""
super().start_writing(field, info=info)
# update info after opening file because otherwise information can be
# overwritten by data that is already present in the file
if info is not None:
self.info.update(info)
# handle the different write modes
if self.write_mode == "truncate_once":
self.clear()
self.write_mode = "append" # do not truncate in subsequent calls
elif self.write_mode == "truncate":
self.clear()
elif self.write_mode == "readonly":
raise RuntimeError("Cannot write in read-only mode")
elif self.write_mode != "append":
raise ValueError(
f"Unknown write mode `{self.write_mode}`. Possible values are "
"`truncate_once`, `truncate`, and `append`"
)
def _append_data(self, data: np.ndarray, time: float) -> None:
"""Append a new data set.
Args:
data (:class:`~numpy.ndarray`): The actual data
time (float, optional): The time point associated with the data
"""
if data.shape != self.data_shape:
raise ValueError(f"Data must have shape {self.data_shape}")
self.data.append(np.array(data)) # store copy of the data
self.times.append(time)
[docs]
@contextmanager
def get_memory_storage(field: FieldBase, info: InfoDict | None = None):
"""A context manager that can be used to create a MemoryStorage.
Example:
This can be used to quickly store data::
with get_memory_storage(field_class) as storage:
storage.append(numpy_array0, 0)
storage.append(numpy_array1, 1)
# use storage thereafter
Args:
field (:class:`~pde.fields.FieldBase`):
An instance of the field class store data for a single time point.
info (dict):
Supplies extra information that is stored in the storage
Yields:
:class:`MemoryStorage`
"""
storage = MemoryStorage()
storage.start_writing(field, info)
yield storage
storage.end_writing()