14 Commits

Author SHA1 Message Date
4f35a234c3 updated readme 2025-08-19 15:10:02 +02:00
893bf31292 added install script for mdevaluate for shared /nfsopt 2025-08-19 15:07:52 +02:00
cf7ef06c67 Merge pull request 'apply selection and scaling with current box after delta in jumps has been cached or calculated directly. this should fix using nojump on NPT simulations' (#6) from fix/nojump_and_npt_caching into main
Reviewed-on: #6
2025-08-19 12:55:43 +00:00
robrobo
9ff3badab1 have to wrap delta with np.array to make sure it is ndarray and result stays CoordinateFrame 2025-08-14 16:33:37 +02:00
robrobo
492098fe01 apply selection and scaling with current box after delta in jumps has been cached or calculated directly. this should fix using nojump on NPT simulations 2025-08-09 16:11:24 +02:00
65ac6e9143 Merge pull request 'using pbc_diff now in tetrahedral_order parameter calcul since reference positions will not be periodic images in the default use case' (ation,#5) from fix/tetrahedral_order_pbc into main
Reviewed-on: #5
2025-07-21 11:57:15 +00:00
robrobo
4047db209c using pbc_diff now in tetrahedral_order parameter calculation, since reference positions will not be periodic images in the default use case 2025-07-11 20:59:30 +02:00
90bd90a608 Merge pull request 'Added some ordering to checksums from FunctionType since these could depending on input fail to be deterministic' (#2) from fix_nondeterministic_checksum into main
Reviewed-on: #2
2025-06-16 18:45:48 +00:00
robrobo
67d3e70a66 Added some ordering to checksums from FunctionType since these could depending on input fail to be deterministic 2025-06-16 20:09:50 +02:00
c09549902a Added Robins adjustments for FEL 2024-05-27 14:27:09 +02:00
b7bb8cb379 Merge branch 'refs/heads/mdeval_dev' 2024-05-02 16:18:18 +02:00
7b9f8b6773 Merge branch 'mdeval_dev' 2024-03-05 13:58:15 +01:00
31eb145a13 Merge branch 'mdeval_dev'
# Conflicts:
#	src/mdevaluate/coordinates.py
2024-02-26 14:20:12 +01:00
b5395098ce Fixed iter of Coordinates after last change 2024-02-26 13:57:44 +01:00
6 changed files with 103 additions and 32 deletions

View File

@@ -26,9 +26,13 @@ time, msd = md.correlation.shifted_correlation(
## Installation ## Installation
=== DEPRECATED: 2025-08-19 ===
The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx), The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx),
which handles reading of Gromacs file formats. which handles reading of Gromacs file formats.
Installation of pygmx is described in its own repository. Installation of pygmx is described in its own repository.
=== DEPRECATED: 2025-08-19 ===
The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx),
The mdevaluate package itself is plain Python code and, hence, can be imported from its directory directly, The mdevaluate package itself is plain Python code and, hence, can be imported from its directory directly,
or may be installed via setuptools to the local Python environment by running or may be installed via setuptools to the local Python environment by running

71
create_mdevaluate_module.sh Executable file
View File

@@ -0,0 +1,71 @@
#!/bin/bash
CONDA_VERSION=2024.10
PYTHON_VERSION=3.12
if [ -z "$1" ]; then
echo "No argument supplied, version to create expected"
exit 1
fi
if [ ! -w "/nfsopt/mdevaluate"]; then
echo "Please remount /nfsopt writable"
exit 2
fi
MD_VERSION=$1
# purge evtl. loaded modules
module purge
echo "Create mdevaluate Python environemnt using conda"
echo "Using conda version: $CONDA_VERSION"
echo "Using Python version: $PYTHON_VERSION"
module load anaconda3/$CONDA_VERSION
conda create -y --prefix /nfsopt/mdevaluate/mdevaluate-${MD_VERSION} \
python=$PYTHON_VERSION
module purge
echo "Create modulefile for mdevaluate/$MD_VERSION"
cat > /nfsopt/modulefiles/mdevaluate/$MD_VERSION <<EOF
#%Module1.0#####################################################################
##
## dot modulefile
##
## modulefiles/dot. Generated from dot.in by configure.
##
module-whatis "Enables the mdevaluate Python environment."
set version ${MD_VERSION}
set module_path /nfsopt/mdevaluate/mdevaluate-\$version/bin
prepend-path PATH \$module_path
EOF
echo "Loading mdevaluate environment and install packages"
module load mdevaluate/${MD_VERSION}
pip install jupyter \
spyder \
mdanalysis \
pathos \
pandas \
dask \
sqlalchemy \
psycopg2-binary \
trimesh \
pyvista \
seaborn \
black \
black[jupyter] \
tables \
pyedr \
pytest
pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/mdevaluate.git
pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/python-store.git
pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/python-tudplot.git

View File

@@ -73,7 +73,9 @@ def checksum(*args, csum=None):
elif isinstance(arg, FunctionType): elif isinstance(arg, FunctionType):
csum.update(strip_comments(inspect.getsource(arg)).encode()) csum.update(strip_comments(inspect.getsource(arg)).encode())
c = inspect.getclosurevars(arg) c = inspect.getclosurevars(arg)
for v in {**c.nonlocals, **c.globals}.values(): merged = {**c.nonlocals, **c.globals}
for key in sorted(merged): # deterministic ordering
v = merged[key]
if v is not arg: if v is not arg:
checksum(v, csum=csum) checksum(v, csum=csum)
elif isinstance(arg, functools.partial): elif isinstance(arg, functools.partial):

View File

@@ -182,10 +182,10 @@ def tetrahedral_order(
) )
# Connection vectors # Connection vectors
neighbors_1 -= atoms neighbors_1 = pbc_diff(neighbors_1, atoms, box=atoms.box)
neighbors_2 -= atoms neighbors_2 = pbc_diff(neighbors_2, atoms, box=atoms.box)
neighbors_3 -= atoms neighbors_3 = pbc_diff(neighbors_3, atoms, box=atoms.box)
neighbors_4 -= atoms neighbors_4 = pbc_diff(neighbors_4, atoms, box=atoms.box)
# Normed Connection vectors # Normed Connection vectors
neighbors_1 /= np.linalg.norm(neighbors_1, axis=-1).reshape(-1, 1) neighbors_1 /= np.linalg.norm(neighbors_1, axis=-1).reshape(-1, 1)

View File

@@ -4,7 +4,6 @@ from typing import Optional
import numpy as np import numpy as np
from numpy.typing import ArrayLike, NDArray from numpy.typing import ArrayLike, NDArray
from numpy.polynomial.polynomial import Polynomial as Poly from numpy.polynomial.polynomial import Polynomial as Poly
import math
from scipy.spatial import KDTree from scipy.spatial import KDTree
import pandas as pd import pandas as pd
import multiprocessing as mp import multiprocessing as mp
@@ -49,7 +48,7 @@ def _pbc_points_reduced(
def _build_tree(points, box, r_max, pore_geometry): def _build_tree(points, box, r_max, pore_geometry):
if np.all(np.diag(np.diag(box)) == box): if np.all(np.diag(np.diag(box)) == box):
tree = KDTree(points, boxsize=box) tree = KDTree(points % box, boxsize=box)
points_pbc_index = None points_pbc_index = None
else: else:
points_pbc, points_pbc_index = _pbc_points_reduced( points_pbc, points_pbc_index = _pbc_points_reduced(
@@ -79,8 +78,7 @@ def occupation_matrix(
z_bins = np.arange(0, box[2][2] + edge_length, edge_length) z_bins = np.arange(0, box[2][2] + edge_length, edge_length)
bins = [x_bins, y_bins, z_bins] bins = [x_bins, y_bins, z_bins]
# Trajectory is split for parallel computing # Trajectory is split for parallel computing
size = math.ceil(len(frame_indices) / nodes) indices = np.array_split(frame_indices, nodes)
indices = [frame_indices[i : i + size] for i in range(0, len(frame_indices), size)]
pool = mp.Pool(nodes) pool = mp.Pool(nodes)
results = pool.map( results = pool.map(
partial(_calc_histogram, trajectory=trajectory, bins=bins), indices partial(_calc_histogram, trajectory=trajectory, bins=bins), indices
@@ -274,7 +272,11 @@ def distance_resolved_energies(
def find_energy_maxima( def find_energy_maxima(
energy_df: pd.DataFrame, r_min: float, r_max: float energy_df: pd.DataFrame,
r_min: float,
r_max: float,
r_eval: float = None,
degree: int = 2,
) -> pd.DataFrame: ) -> pd.DataFrame:
distances = [] distances = []
energies = [] energies = []
@@ -283,6 +285,9 @@ def find_energy_maxima(
x = np.array(data_d["r"]) x = np.array(data_d["r"])
y = np.array(data_d["energy"]) y = np.array(data_d["energy"])
mask = (x >= r_min) * (x <= r_max) mask = (x >= r_min) * (x <= r_max)
p3 = Poly.fit(x[mask], y[mask], deg=2) p3 = Poly.fit(x[mask], y[mask], deg=degree)
if r_eval is None:
energies.append(np.max(p3(np.linspace(r_min, r_max, 1000)))) energies.append(np.max(p3(np.linspace(r_min, r_max, 1000))))
else:
energies.append(p3(r_eval))
return pd.DataFrame({"d": distances, "energy": energies}) return pd.DataFrame({"d": distances, "energy": energies})

View File

@@ -149,32 +149,21 @@ def nojump(frame: CoordinateFrame, usecache: bool = True) -> CoordinateFrame:
i0 = 0 i0 = 0
delta = 0 delta = 0
delta = ( delta = (delta
delta + np.vstack(
+ np.array(
np.vstack(
[m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices] [m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices]
).T ).T)
)
@ frame.box
)
reader._nojump_cache[abstep] = delta reader._nojump_cache[abstep] = delta
while len(reader._nojump_cache) > NOJUMP_CACHESIZE: while len(reader._nojump_cache) > NOJUMP_CACHESIZE:
reader._nojump_cache.popitem(last=False) reader._nojump_cache.popitem(last=False)
delta = delta[selection, :]
else: else:
delta = ( delta = np.vstack(
np.array( [m[: frame.step + 1, selection].sum(axis=0) for m in reader.nojump_matrices]
np.vstack(
[
m[: frame.step + 1, selection].sum(axis=0)
for m in reader.nojump_matrices
]
).T ).T
)
@ frame.box delta = delta[selection, :]
) delta = np.array(delta @ frame.box)
return frame - delta return frame - delta