9 Commits

7 changed files with 67 additions and 5 deletions
+3 -1
View File
@@ -166,8 +166,10 @@ def autosave_data(
@functools.wraps(function)
def autosave(*args, **kwargs):
description = kwargs.pop("description", "")
autosave_dir_overwrite = kwargs.pop("autosave_dir_overwrite", None)
autosave_dir = autosave_dir_overwrite if autosave_dir_overwrite is not None else autosave_directory
autoload = kwargs.pop("autoload", True) and load_autosave_data
if autosave_directory is not None:
if autosave_dir is not None:
relevant_args = list(args[:nargs])
if kwargs_keys is not None:
for key in [*posargs_keys, *kwargs_keys]:
+14 -2
View File
@@ -102,8 +102,20 @@ def checksum(*args, csum=None, _seen=None):
_seen.add(obj_id)
if hasattr(arg, "__checksum__"):
logger.debug("Checksum via __checksum__: %s", str(arg))
csum.update(str(arg.__checksum__()).encode())
method = getattr(arg, "__checksum__")
if callable(method) and not isinstance(arg, type):
logger.debug("Checksum via __checksum__: %s", str(arg))
csum.update(str(method()).encode())
elif isinstance(arg, type):
try:
src = inspect.getsource(arg)
csum.update(strip_comments(src).encode())
logger.debug("Checksum via class source for %s", arg.__name__)
except (OSError, TypeError):
csum.update(arg.__name__.encode())
logger.debug("Checksum via class name for %s", arg.__name__)
else:
logger.debug("Skipping unbound __checksum__ on %s", type(arg))
elif isinstance(arg, bytes):
csum.update(arg)
elif isinstance(arg, str):
+28
View File
@@ -434,6 +434,34 @@ def center_of_masses(
]
).T[mask]
return np.array(positions)
@map_coordinates
def center_of_atoms(
frame: CoordinateFrame, atom_indices=None, shear: bool = False
) -> NDArray:
if atom_indices is None:
atom_indices = list(range(len(frame)))
res_ids = frame.residue_ids[atom_indices]
if shear:
coords = frame[atom_indices]
box = frame.box
sort_ind = res_ids.argsort(kind="stable")
i = np.concatenate([[0], np.where(np.diff(res_ids[sort_ind]) > 0)[0] + 1])
coms = coords[sort_ind[i]][res_ids - min(res_ids)]
cor = pbc_diff(coords, coms, box)
coords = coms + cor
else:
coords = frame.whole[atom_indices]
mask = np.bincount(res_ids)[1:] != 0
positions = np.array(
[
np.bincount(res_ids, weights=c)[1:]
/ np.bincount(res_ids)[1:]
for c in coords.T
]
).T[mask]
return np.array(positions)
@map_coordinates
+2 -1
View File
@@ -147,7 +147,8 @@ def shifted_correlation(
num_frames = int(len(frames) * window)
ls = np.logspace(0, np.log10(num_frames + 1), num=points)
idx = np.unique(np.int_(ls) - 1)
t = np.array([frames[i].time for i in idx]) - frames[0].time
dt = round(frames[1].time - frames[0].time, 6) # round to avoid bad floats
t = idx * dt
result = np.array(
[
+1 -1
View File
@@ -163,7 +163,7 @@ def nojump(frame: CoordinateFrame, usecache: bool = True) -> CoordinateFrame:
).T
delta = delta[selection, :]
delta = delta @ frame.box
delta = np.array(delta @ frame.box)
return frame - delta
+14
View File
@@ -23,6 +23,8 @@ from .logging_util import logger
from . import atoms
from .coordinates import Coordinates
from unittest.mock import MagicMock
CSR_ATTRS = ("data", "indices", "indptr")
NOJUMP_MAGIC = 2016
Group_RE = re.compile(r"\[ ([-+\w]+) \]")
@@ -240,7 +242,18 @@ def generate_nojump_matrices(trajectory: Coordinates):
save_nojump_matrices(trajectory.frames)
def _ensure_xdr(reader: BaseReader):
"""Patch missing _xdr attribute for non-XDR readers (e.g. LAMMPS DumpReader)
with a stable mock so checksums are consistent across runs."""
if not hasattr(reader.rd, '_xdr'):
mock_xdr = MagicMock()
mock_xdr.offsets = np.arange(len(reader))
print(f"Adding mock _xdr attribute for to reader of length {len(reader)}.")
reader.rd._xdr = mock_xdr
def save_nojump_matrices(reader: BaseReader, matrices: npt.ArrayLike = None):
_ensure_xdr(reader)
if matrices is None:
matrices = reader.nojump_matrices
data = {"checksum": checksum(NOJUMP_MAGIC, checksum(reader))}
@@ -253,6 +266,7 @@ def save_nojump_matrices(reader: BaseReader, matrices: npt.ArrayLike = None):
def load_nojump_matrices(reader: BaseReader):
_ensure_xdr(reader)
zipname = nojump_load_filename(reader)
try:
data = np.load(zipname, allow_pickle=True)
+5
View File
@@ -334,6 +334,11 @@ def quick1etau(t: ArrayLike, C: ArrayLike, n: int = 7) -> float:
C is C(t) the correlation function
n is the minimum number of points around 1/e required
"""
# norm, if t=0 provided
if t[0] == 0:
C /= C[0]
C, t = C[t>0], t[t>0] # make sure t=0 is dropped
# first rough estimate, the closest time. This is returned if the interpolation fails!
tau_est = t[np.argmin(np.fabs(C - np.exp(-1)))]
# reduce the data to points around 1/e