6 Commits

11 changed files with 85 additions and 109 deletions

View File

@@ -26,13 +26,9 @@ time, msd = md.correlation.shifted_correlation(
## Installation ## Installation
=== DEPRECATED: 2025-08-19 ===
The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx), The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx),
which handles reading of Gromacs file formats. which handles reading of Gromacs file formats.
Installation of pygmx is described in its own repository. Installation of pygmx is described in its own repository.
=== DEPRECATED: 2025-08-19 ===
The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx),
The mdevaluate package itself is plain Python code and, hence, can be imported from its directory directly, The mdevaluate package itself is plain Python code and, hence, can be imported from its directory directly,
or may be installed via setuptools to the local Python environment by running or may be installed via setuptools to the local Python environment by running

View File

@@ -1,71 +0,0 @@
#!/bin/bash
CONDA_VERSION=2024.10
PYTHON_VERSION=3.12
if [ -z "$1" ]; then
echo "No argument supplied, version to create expected"
exit 1
fi
if [ ! -w "/nfsopt/mdevaluate"]; then
echo "Please remount /nfsopt writable"
exit 2
fi
MD_VERSION=$1
# purge evtl. loaded modules
module purge
echo "Create mdevaluate Python environemnt using conda"
echo "Using conda version: $CONDA_VERSION"
echo "Using Python version: $PYTHON_VERSION"
module load anaconda3/$CONDA_VERSION
conda create -y --prefix /nfsopt/mdevaluate/mdevaluate-${MD_VERSION} \
python=$PYTHON_VERSION
module purge
echo "Create modulefile for mdevaluate/$MD_VERSION"
cat > /nfsopt/modulefiles/mdevaluate/$MD_VERSION <<EOF
#%Module1.0#####################################################################
##
## dot modulefile
##
## modulefiles/dot. Generated from dot.in by configure.
##
module-whatis "Enables the mdevaluate Python environment."
set version ${MD_VERSION}
set module_path /nfsopt/mdevaluate/mdevaluate-\$version/bin
prepend-path PATH \$module_path
EOF
echo "Loading mdevaluate environment and install packages"
module load mdevaluate/${MD_VERSION}
pip install jupyter \
spyder \
mdanalysis \
pathos \
pandas \
dask \
sqlalchemy \
psycopg2-binary \
trimesh \
pyvista \
seaborn \
black \
black[jupyter] \
tables \
pyedr \
pytest
pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/mdevaluate.git
pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/python-store.git
pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/python-tudplot.git

View File

@@ -16,7 +16,7 @@ from . import reader
from . import system from . import system
from . import utils from . import utils
from . import extra from . import extra
from .logging import logger from .logging_util import logger
def open( def open(

View File

@@ -5,7 +5,7 @@ from typing import Optional, Callable, Iterable
import numpy as np import numpy as np
from .checksum import checksum from .checksum import checksum
from .logging import logger from .logging_util import logger
autosave_directory: Optional[str] = None autosave_directory: Optional[str] = None
load_autosave_data = False load_autosave_data = False

View File

@@ -1,9 +1,14 @@
import functools import functools
import hashlib import hashlib
from .logging import logger from .logging_util import logger
from types import ModuleType, FunctionType from types import ModuleType, FunctionType
import inspect import inspect
from typing import Iterable from typing import Iterable
import ast
import io
import tokenize
import re
import textwrap
import numpy as np import numpy as np
@@ -28,19 +33,46 @@ def version(version_nr: int, calls: Iterable = ()):
return decorator return decorator
def strip_comments(s: str): def strip_comments(source: str) -> str:
"""Strips comment lines and docstring from Python source string.""" """Removes docstrings, comments, and irrelevant whitespace from Python source code."""
o = ""
in_docstring = False # Step 1: Remove docstrings using AST
for l in s.split("\n"): def remove_docstrings(node):
if l.strip().startswith(("#", '"', "'")) or in_docstring: if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef, ast.Module)):
in_docstring = l.strip().startswith(('"""', "'''")) + in_docstring == 1 if (doc := ast.get_docstring(node, clean=False)):
first_stmt = node.body[0]
if isinstance(first_stmt, ast.Expr) and isinstance(first_stmt.value, ast.Constant):
node.body.pop(0) # Remove the docstring entirely
for child in ast.iter_child_nodes(node):
remove_docstrings(child)
tree = ast.parse(textwrap.dedent(source))
remove_docstrings(tree)
code_without_docstrings = ast.unparse(tree)
# Step 2: Remove comments using tokenize
tokens = tokenize.generate_tokens(io.StringIO(code_without_docstrings).readline)
result = []
last_lineno = -1
last_col = 0
for toknum, tokval, (srow, scol), (erow, ecol), line in tokens:
if toknum == tokenize.COMMENT:
continue continue
o += l + "\n" if srow > last_lineno:
return o last_col = 0
if scol > last_col:
result.append(" " * (scol - last_col))
result.append(tokval)
last_lineno, last_col = erow, ecol
code_no_comments = ''.join(result)
# Step 3: Remove empty lines (whitespace-only or truly blank)
return "\n".join([line for line in code_no_comments.splitlines() if line.strip() != ""])
def checksum(*args, csum=None): def checksum(*args, csum=None, _seen=None):
""" """
Calculate a checksum of any object, by sha1 hash. Calculate a checksum of any object, by sha1 hash.
@@ -60,7 +92,15 @@ def checksum(*args, csum=None):
csum = hashlib.sha1() csum = hashlib.sha1()
csum.update(str(SALT).encode()) csum.update(str(SALT).encode())
if _seen is None:
_seen = set()
for arg in args: for arg in args:
obj_id = id(arg)
if obj_id in _seen:
continue
_seen.add(obj_id)
if hasattr(arg, "__checksum__"): if hasattr(arg, "__checksum__"):
logger.debug("Checksum via __checksum__: %s", str(arg)) logger.debug("Checksum via __checksum__: %s", str(arg))
csum.update(str(arg.__checksum__()).encode()) csum.update(str(arg.__checksum__()).encode())
@@ -77,15 +117,15 @@ def checksum(*args, csum=None):
for key in sorted(merged): # deterministic ordering for key in sorted(merged): # deterministic ordering
v = merged[key] v = merged[key]
if v is not arg: if v is not arg:
checksum(v, csum=csum) checksum(v, csum=csum, _seen=_seen)
elif isinstance(arg, functools.partial): elif isinstance(arg, functools.partial):
logger.debug("Checksum via partial for %s", str(arg)) logger.debug("Checksum via partial for %s", str(arg))
checksum(arg.func, csum=csum) checksum(arg.func, csum=csum, _seen=_seen)
for x in arg.args: for x in arg.args:
checksum(x, csum=csum) checksum(x, csum=csum, _seen=_seen)
for k in sorted(arg.keywords.keys()): for k in sorted(arg.keywords.keys()):
csum.update(k.encode()) csum.update(k.encode())
checksum(arg.keywords[k], csum=csum) checksum(arg.keywords[k], csum=csum, _seen=_seen)
elif isinstance(arg, np.ndarray): elif isinstance(arg, np.ndarray):
csum.update(arg.tobytes()) csum.update(arg.tobytes())
else: else:

View File

@@ -1,6 +1,6 @@
from functools import partial, wraps from functools import partial, wraps
from copy import copy from copy import copy
from .logging import logger from .logging_util import logger
from typing import Optional, Callable, List, Tuple from typing import Optional, Callable, List, Tuple
import numpy as np import numpy as np

View File

@@ -431,9 +431,9 @@ def non_gaussian_parameter(
trajectory: Coordinates = None, trajectory: Coordinates = None,
axis: str = "all", axis: str = "all",
) -> float: ) -> float:
""" r"""
Calculate the non-Gaussian parameter. Calculate the non-Gaussian parameter.
..math: .. math:
\alpha_2 (t) = \alpha_2 (t) =
\frac{3}{5}\frac{\langle r_i^4(t)\rangle}{\langle r_i^2(t)\rangle^2} - 1 \frac{3}{5}\frac{\langle r_i^4(t)\rangle}{\langle r_i^2(t)\rangle^2} - 1
""" """

View File

@@ -7,7 +7,7 @@ from numpy.typing import ArrayLike, NDArray
from itertools import product from itertools import product
from .logging import logger from .logging_util import logger
if TYPE_CHECKING: if TYPE_CHECKING:
from mdevaluate.coordinates import CoordinateFrame from mdevaluate.coordinates import CoordinateFrame
@@ -149,21 +149,32 @@ def nojump(frame: CoordinateFrame, usecache: bool = True) -> CoordinateFrame:
i0 = 0 i0 = 0
delta = 0 delta = 0
delta = (delta delta = (
+ np.vstack( delta
[m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices] + np.array(
).T) np.vstack(
[m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices]
).T
)
@ frame.box
)
reader._nojump_cache[abstep] = delta reader._nojump_cache[abstep] = delta
while len(reader._nojump_cache) > NOJUMP_CACHESIZE: while len(reader._nojump_cache) > NOJUMP_CACHESIZE:
reader._nojump_cache.popitem(last=False) reader._nojump_cache.popitem(last=False)
delta = delta[selection, :]
else: else:
delta = np.vstack( delta = (
[m[: frame.step + 1, selection].sum(axis=0) for m in reader.nojump_matrices] np.array(
np.vstack(
[
m[: frame.step + 1, selection].sum(axis=0)
for m in reader.nojump_matrices
]
).T ).T
)
delta = delta[selection, :] @ frame.box
delta = np.array(delta @ frame.box) )
return frame - delta return frame - delta

View File

@@ -19,13 +19,13 @@ import MDAnalysis
from scipy import sparse from scipy import sparse
from .checksum import checksum from .checksum import checksum
from .logging import logger from .logging_util import logger
from . import atoms from . import atoms
from .coordinates import Coordinates from .coordinates import Coordinates
CSR_ATTRS = ("data", "indices", "indptr") CSR_ATTRS = ("data", "indices", "indptr")
NOJUMP_MAGIC = 2016 NOJUMP_MAGIC = 2016
Group_RE = re.compile("\[ ([-+\w]+) \]") Group_RE = re.compile(r"\[ ([-+\w]+) \]")
class NojumpError(Exception): class NojumpError(Exception):
@@ -275,7 +275,7 @@ def load_nojump_matrices(reader: BaseReader):
"Loaded Nojump matrices: {}".format(nojump_load_filename(reader)) "Loaded Nojump matrices: {}".format(nojump_load_filename(reader))
) )
else: else:
logger.info("Invlaid Nojump Data: {}".format(nojump_load_filename(reader))) logger.info("Invalid Nojump Data: {}".format(nojump_load_filename(reader)))
except KeyError: except KeyError:
logger.info("Removing zip-File: %s", zipname) logger.info("Removing zip-File: %s", zipname)
os.remove(nojump_load_filename(reader)) os.remove(nojump_load_filename(reader))

View File

@@ -14,7 +14,7 @@ from scipy.ndimage import uniform_filter1d
from scipy.interpolate import interp1d from scipy.interpolate import interp1d
from scipy.optimize import curve_fit from scipy.optimize import curve_fit
from .logging import logger from .logging_util import logger
from .functions import kww, kww_1e from .functions import kww, kww_1e