Compare commits
	
		
			7 Commits
		
	
	
		
			00043637e9
			...
			main
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 4f35a234c3 | |||
| 893bf31292 | |||
| cf7ef06c67 | |||
|  | 9ff3badab1 | ||
|  | 492098fe01 | ||
| 65ac6e9143 | |||
|  | 4047db209c | 
| @@ -26,9 +26,13 @@ time, msd = md.correlation.shifted_correlation( | ||||
|  | ||||
| ## Installation | ||||
|  | ||||
| === DEPRECATED: 2025-08-19 ===  | ||||
| The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx), | ||||
| which handles reading of Gromacs file formats. | ||||
| Installation of pygmx is described in its own repository. | ||||
| === DEPRECATED: 2025-08-19 ===  | ||||
|  | ||||
| The package requires the Python package [pygmx](https://github.com/mdevaluate/pygmx), | ||||
|  | ||||
| The mdevaluate package itself is plain Python code and, hence, can be imported from its directory directly,  | ||||
| or may be installed via setuptools to the local Python environment by running | ||||
|   | ||||
							
								
								
									
										71
									
								
								create_mdevaluate_module.sh
									
									
									
									
									
										Executable file
									
								
							
							
						
						
									
										71
									
								
								create_mdevaluate_module.sh
									
									
									
									
									
										Executable file
									
								
							| @@ -0,0 +1,71 @@ | ||||
| #!/bin/bash | ||||
|  | ||||
| CONDA_VERSION=2024.10 | ||||
| PYTHON_VERSION=3.12 | ||||
|  | ||||
| if [ -z "$1" ]; then | ||||
|     echo "No argument supplied, version to create expected" | ||||
|     exit 1 | ||||
| fi | ||||
|  | ||||
|  | ||||
| if [ ! -w "/nfsopt/mdevaluate"]; then | ||||
| 	echo "Please remount /nfsopt writable" | ||||
| 	exit 2 | ||||
| fi | ||||
|  | ||||
| MD_VERSION=$1 | ||||
|  | ||||
| # purge evtl. loaded modules | ||||
| module purge | ||||
|  | ||||
|  | ||||
| echo "Create mdevaluate Python environemnt using conda" | ||||
| echo "Using conda version: $CONDA_VERSION" | ||||
| echo "Using Python version: $PYTHON_VERSION" | ||||
|  | ||||
| module load anaconda3/$CONDA_VERSION | ||||
| conda create -y --prefix /nfsopt/mdevaluate/mdevaluate-${MD_VERSION} \ | ||||
|              python=$PYTHON_VERSION | ||||
| module purge | ||||
|  | ||||
| echo "Create modulefile for mdevaluate/$MD_VERSION" | ||||
| cat > /nfsopt/modulefiles/mdevaluate/$MD_VERSION <<EOF | ||||
| #%Module1.0##################################################################### | ||||
| ## | ||||
| ## dot modulefile | ||||
| ## | ||||
| ## modulefiles/dot.  Generated from dot.in by configure. | ||||
| ## | ||||
|  | ||||
| module-whatis	"Enables the mdevaluate Python environment." | ||||
|  | ||||
| set version	${MD_VERSION} | ||||
| set module_path /nfsopt/mdevaluate/mdevaluate-\$version/bin | ||||
|  | ||||
| prepend-path PATH \$module_path | ||||
|  | ||||
| EOF | ||||
|  | ||||
| echo "Loading mdevaluate environment and install packages" | ||||
| module load mdevaluate/${MD_VERSION} | ||||
| pip install jupyter \ | ||||
| spyder \ | ||||
| mdanalysis \ | ||||
| pathos \ | ||||
| pandas \ | ||||
| dask \ | ||||
| sqlalchemy \ | ||||
| psycopg2-binary \ | ||||
| trimesh \ | ||||
| pyvista \ | ||||
| seaborn \ | ||||
| black \ | ||||
| black[jupyter] \ | ||||
| tables \ | ||||
| pyedr \ | ||||
| pytest | ||||
|  | ||||
| pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/mdevaluate.git | ||||
| pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/python-store.git | ||||
| pip install git+https://gitea.pkm.physik.tu-darmstadt.de/IPKM/python-tudplot.git | ||||
| @@ -16,7 +16,7 @@ from . import reader | ||||
| from . import system | ||||
| from . import utils | ||||
| from . import extra | ||||
| from .logging_util import logger | ||||
| from .logging import logger | ||||
|  | ||||
|  | ||||
| def open( | ||||
|   | ||||
| @@ -5,7 +5,7 @@ from typing import Optional, Callable, Iterable | ||||
|  | ||||
| import numpy as np | ||||
| from .checksum import checksum | ||||
| from .logging_util import logger | ||||
| from .logging import logger | ||||
|  | ||||
| autosave_directory: Optional[str] = None | ||||
| load_autosave_data = False | ||||
|   | ||||
| @@ -1,14 +1,9 @@ | ||||
| import functools | ||||
| import hashlib | ||||
| from .logging_util import logger | ||||
| from .logging import logger | ||||
| from types import ModuleType, FunctionType | ||||
| import inspect | ||||
| from typing import Iterable | ||||
| import ast | ||||
| import io | ||||
| import tokenize | ||||
| import re | ||||
| import textwrap | ||||
|  | ||||
| import numpy as np | ||||
|  | ||||
| @@ -33,46 +28,19 @@ def version(version_nr: int, calls: Iterable = ()): | ||||
|     return decorator | ||||
|  | ||||
|  | ||||
| def strip_comments(source: str) -> str: | ||||
|     """Removes docstrings, comments, and irrelevant whitespace from Python source code.""" | ||||
|  | ||||
|     # Step 1: Remove docstrings using AST | ||||
|     def remove_docstrings(node): | ||||
|         if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef, ast.Module)): | ||||
|             if (doc := ast.get_docstring(node, clean=False)): | ||||
|                 first_stmt = node.body[0] | ||||
|                 if isinstance(first_stmt, ast.Expr) and isinstance(first_stmt.value, ast.Constant): | ||||
|                     node.body.pop(0)  # Remove the docstring entirely | ||||
|         for child in ast.iter_child_nodes(node): | ||||
|             remove_docstrings(child) | ||||
|  | ||||
|     tree = ast.parse(textwrap.dedent(source)) | ||||
|     remove_docstrings(tree) | ||||
|     code_without_docstrings = ast.unparse(tree) | ||||
|  | ||||
|     # Step 2: Remove comments using tokenize | ||||
|     tokens = tokenize.generate_tokens(io.StringIO(code_without_docstrings).readline) | ||||
|     result = [] | ||||
|     last_lineno = -1 | ||||
|     last_col = 0 | ||||
|  | ||||
|     for toknum, tokval, (srow, scol), (erow, ecol), line in tokens: | ||||
|         if toknum == tokenize.COMMENT: | ||||
| def strip_comments(s: str): | ||||
|     """Strips comment lines and docstring from Python source string.""" | ||||
|     o = "" | ||||
|     in_docstring = False | ||||
|     for l in s.split("\n"): | ||||
|         if l.strip().startswith(("#", '"', "'")) or in_docstring: | ||||
|             in_docstring = l.strip().startswith(('"""', "'''")) + in_docstring == 1 | ||||
|             continue | ||||
|         if srow > last_lineno: | ||||
|             last_col = 0 | ||||
|         if scol > last_col: | ||||
|             result.append(" " * (scol - last_col)) | ||||
|         result.append(tokval) | ||||
|         last_lineno, last_col = erow, ecol | ||||
|  | ||||
|     code_no_comments = ''.join(result) | ||||
|  | ||||
|     # Step 3: Remove empty lines (whitespace-only or truly blank) | ||||
|     return "\n".join([line for line in code_no_comments.splitlines() if line.strip() != ""]) | ||||
|         o += l + "\n" | ||||
|     return o | ||||
|  | ||||
|  | ||||
| def checksum(*args, csum=None, _seen=None): | ||||
| def checksum(*args, csum=None): | ||||
|     """ | ||||
|     Calculate a checksum of any object, by sha1 hash. | ||||
|  | ||||
| @@ -92,15 +60,7 @@ def checksum(*args, csum=None, _seen=None): | ||||
|         csum = hashlib.sha1() | ||||
|         csum.update(str(SALT).encode()) | ||||
|  | ||||
|     if _seen is None: | ||||
|         _seen = set() | ||||
|  | ||||
|     for arg in args: | ||||
|         obj_id = id(arg) | ||||
|         if obj_id in _seen: | ||||
|             continue | ||||
|         _seen.add(obj_id) | ||||
|  | ||||
|         if hasattr(arg, "__checksum__"): | ||||
|             logger.debug("Checksum via __checksum__: %s", str(arg)) | ||||
|             csum.update(str(arg.__checksum__()).encode()) | ||||
| @@ -117,15 +77,15 @@ def checksum(*args, csum=None, _seen=None): | ||||
|             for key in sorted(merged):  # deterministic ordering | ||||
|                 v = merged[key] | ||||
|                 if v is not arg: | ||||
|                     checksum(v, csum=csum, _seen=_seen) | ||||
|                     checksum(v, csum=csum) | ||||
|         elif isinstance(arg, functools.partial): | ||||
|             logger.debug("Checksum via partial for %s", str(arg)) | ||||
|             checksum(arg.func, csum=csum, _seen=_seen) | ||||
|             checksum(arg.func, csum=csum) | ||||
|             for x in arg.args: | ||||
|                 checksum(x, csum=csum, _seen=_seen) | ||||
|                 checksum(x, csum=csum) | ||||
|             for k in sorted(arg.keywords.keys()): | ||||
|                 csum.update(k.encode()) | ||||
|                 checksum(arg.keywords[k], csum=csum, _seen=_seen) | ||||
|                 checksum(arg.keywords[k], csum=csum) | ||||
|         elif isinstance(arg, np.ndarray): | ||||
|             csum.update(arg.tobytes()) | ||||
|         else: | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from functools import partial, wraps | ||||
| from copy import copy | ||||
| from .logging_util import logger | ||||
| from .logging import logger | ||||
| from typing import Optional, Callable, List, Tuple | ||||
|  | ||||
| import numpy as np | ||||
|   | ||||
| @@ -431,9 +431,9 @@ def non_gaussian_parameter( | ||||
|     trajectory: Coordinates = None, | ||||
|     axis: str = "all", | ||||
| ) -> float: | ||||
|     r""" | ||||
|     """ | ||||
|     Calculate the non-Gaussian parameter. | ||||
|     .. math: | ||||
|     ..math: | ||||
|       \alpha_2 (t) = | ||||
|         \frac{3}{5}\frac{\langle r_i^4(t)\rangle}{\langle r_i^2(t)\rangle^2} - 1 | ||||
|     """ | ||||
|   | ||||
| @@ -7,7 +7,7 @@ from numpy.typing import ArrayLike, NDArray | ||||
|  | ||||
| from itertools import product | ||||
|  | ||||
| from .logging_util import logger | ||||
| from .logging import logger | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from mdevaluate.coordinates import CoordinateFrame | ||||
| @@ -149,32 +149,21 @@ def nojump(frame: CoordinateFrame, usecache: bool = True) -> CoordinateFrame: | ||||
|             i0 = 0 | ||||
|             delta = 0 | ||||
|  | ||||
|         delta = ( | ||||
|             delta | ||||
|             + np.array( | ||||
|                 np.vstack( | ||||
|         delta = (delta | ||||
|             + np.vstack( | ||||
|                 [m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices] | ||||
|                 ).T | ||||
|             ) | ||||
|             @ frame.box | ||||
|         ) | ||||
|             ).T) | ||||
|  | ||||
|         reader._nojump_cache[abstep] = delta | ||||
|         while len(reader._nojump_cache) > NOJUMP_CACHESIZE: | ||||
|             reader._nojump_cache.popitem(last=False) | ||||
|         delta = delta[selection, :] | ||||
|     else: | ||||
|         delta = ( | ||||
|             np.array( | ||||
|                 np.vstack( | ||||
|                     [ | ||||
|                         m[: frame.step + 1, selection].sum(axis=0) | ||||
|                         for m in reader.nojump_matrices | ||||
|                     ] | ||||
|         delta = np.vstack( | ||||
|                 [m[: frame.step + 1, selection].sum(axis=0) for m in reader.nojump_matrices] | ||||
|                 ).T | ||||
|             ) | ||||
|             @ frame.box | ||||
|         ) | ||||
|      | ||||
|     delta = delta[selection, :] | ||||
|     delta = np.array(delta @ frame.box) | ||||
|     return frame - delta | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -19,13 +19,13 @@ import MDAnalysis | ||||
| from scipy import sparse | ||||
|  | ||||
| from .checksum import checksum | ||||
| from .logging_util import logger | ||||
| from .logging import logger | ||||
| from . import atoms | ||||
| from .coordinates import Coordinates | ||||
|  | ||||
| CSR_ATTRS = ("data", "indices", "indptr") | ||||
| NOJUMP_MAGIC = 2016 | ||||
| Group_RE = re.compile(r"\[ ([-+\w]+) \]") | ||||
| Group_RE = re.compile("\[ ([-+\w]+) \]") | ||||
|  | ||||
|  | ||||
| class NojumpError(Exception): | ||||
|   | ||||
| @@ -14,7 +14,7 @@ from scipy.ndimage import uniform_filter1d | ||||
| from scipy.interpolate import interp1d | ||||
| from scipy.optimize import curve_fit | ||||
|  | ||||
| from .logging_util import logger | ||||
| from .logging import logger | ||||
| from .functions import kww, kww_1e | ||||
|  | ||||
|  | ||||
|   | ||||
		Reference in New Issue
	
	Block a user