Compare commits
	
		
			36 Commits
		
	
	
		
			02fed343f0
			...
			refactor_l
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | e124506d10 | ||
|  | 8169e76964 | ||
| 65ac6e9143 | |||
|  | 4047db209c | ||
|  | 00043637e9 | ||
|  | 7585e598dc | ||
|  | 6d8b86c1ef | ||
|  | a2a0ae8d7b | ||
| 90bd90a608 | |||
|  | 67d3e70a66 | ||
| c09549902a | |||
| b7bb8cb379 | |||
| 33c4756e34 | |||
| 7b9f8b6773 | |||
| c89cead81c | |||
| 31eb145a13 | |||
| af3758cbef | |||
| 93d020a4de | |||
| b5395098ce | |||
| 5e80701562 | |||
| 363e420cd8 | |||
| 6b77ef78e1 | |||
| 0c940115af | |||
| b0f29907df | |||
| 37bf496b21 | |||
| befaef2dfa | |||
| 8ea7da5d2f | |||
| b405842452 | |||
| f5cf453d61 | |||
| 4394f70530 | |||
| 298da3818d | |||
| d9278eed83 | |||
| 787882810c | |||
| f527d25864 | |||
| 77771738ab | |||
| 3e8fd04726 | 
| @@ -4,11 +4,12 @@ build-backend = "setuptools.build_meta" | |||||||
|  |  | ||||||
| [project] | [project] | ||||||
| name = "mdevaluate" | name = "mdevaluate" | ||||||
| version = "24.01" | version = "24.02" | ||||||
| dependencies = [ | dependencies = [ | ||||||
|     "mdanalysis", |     "mdanalysis", | ||||||
|     "pandas", |     "pandas", | ||||||
|     "dask", |     "dask", | ||||||
|     "pathos", |     "pathos", | ||||||
|     "tables" |     "tables", | ||||||
|  |     "pyedr" | ||||||
| ] | ] | ||||||
|   | |||||||
| @@ -4,3 +4,4 @@ dask | |||||||
| pathos | pathos | ||||||
| tables | tables | ||||||
| pytest | pytest | ||||||
|  | pyedr | ||||||
| @@ -16,7 +16,7 @@ from . import reader | |||||||
| from . import system | from . import system | ||||||
| from . import utils | from . import utils | ||||||
| from . import extra | from . import extra | ||||||
| from .logging import logger | from .logging_util import logger | ||||||
|  |  | ||||||
|  |  | ||||||
| def open( | def open( | ||||||
|   | |||||||
| @@ -5,7 +5,7 @@ from typing import Optional, Callable, Iterable | |||||||
|  |  | ||||||
| import numpy as np | import numpy as np | ||||||
| from .checksum import checksum | from .checksum import checksum | ||||||
| from .logging import logger | from .logging_util import logger | ||||||
|  |  | ||||||
| autosave_directory: Optional[str] = None | autosave_directory: Optional[str] = None | ||||||
| load_autosave_data = False | load_autosave_data = False | ||||||
|   | |||||||
| @@ -1,9 +1,14 @@ | |||||||
| import functools | import functools | ||||||
| import hashlib | import hashlib | ||||||
| from .logging import logger | from .logging_util import logger | ||||||
| from types import ModuleType, FunctionType | from types import ModuleType, FunctionType | ||||||
| import inspect | import inspect | ||||||
| from typing import Iterable | from typing import Iterable | ||||||
|  | import ast | ||||||
|  | import io | ||||||
|  | import tokenize | ||||||
|  | import re | ||||||
|  | import textwrap | ||||||
|  |  | ||||||
| import numpy as np | import numpy as np | ||||||
|  |  | ||||||
| @@ -28,19 +33,46 @@ def version(version_nr: int, calls: Iterable = ()): | |||||||
|     return decorator |     return decorator | ||||||
|  |  | ||||||
|  |  | ||||||
| def strip_comments(s: str): | def strip_comments(source: str) -> str: | ||||||
|     """Strips comment lines and docstring from Python source string.""" |     """Removes docstrings, comments, and irrelevant whitespace from Python source code.""" | ||||||
|     o = "" |  | ||||||
|     in_docstring = False |     # Step 1: Remove docstrings using AST | ||||||
|     for l in s.split("\n"): |     def remove_docstrings(node): | ||||||
|         if l.strip().startswith(("#", '"', "'")) or in_docstring: |         if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef, ast.Module)): | ||||||
|             in_docstring = l.strip().startswith(('"""', "'''")) + in_docstring == 1 |             if (doc := ast.get_docstring(node, clean=False)): | ||||||
|  |                 first_stmt = node.body[0] | ||||||
|  |                 if isinstance(first_stmt, ast.Expr) and isinstance(first_stmt.value, ast.Constant): | ||||||
|  |                     node.body.pop(0)  # Remove the docstring entirely | ||||||
|  |         for child in ast.iter_child_nodes(node): | ||||||
|  |             remove_docstrings(child) | ||||||
|  |  | ||||||
|  |     tree = ast.parse(textwrap.dedent(source)) | ||||||
|  |     remove_docstrings(tree) | ||||||
|  |     code_without_docstrings = ast.unparse(tree) | ||||||
|  |  | ||||||
|  |     # Step 2: Remove comments using tokenize | ||||||
|  |     tokens = tokenize.generate_tokens(io.StringIO(code_without_docstrings).readline) | ||||||
|  |     result = [] | ||||||
|  |     last_lineno = -1 | ||||||
|  |     last_col = 0 | ||||||
|  |  | ||||||
|  |     for toknum, tokval, (srow, scol), (erow, ecol), line in tokens: | ||||||
|  |         if toknum == tokenize.COMMENT: | ||||||
|             continue |             continue | ||||||
|         o += l + "\n" |         if srow > last_lineno: | ||||||
|     return o |             last_col = 0 | ||||||
|  |         if scol > last_col: | ||||||
|  |             result.append(" " * (scol - last_col)) | ||||||
|  |         result.append(tokval) | ||||||
|  |         last_lineno, last_col = erow, ecol | ||||||
|  |  | ||||||
|  |     code_no_comments = ''.join(result) | ||||||
|  |  | ||||||
|  |     # Step 3: Remove empty lines (whitespace-only or truly blank) | ||||||
|  |     return "\n".join([line for line in code_no_comments.splitlines() if line.strip() != ""]) | ||||||
|  |  | ||||||
|  |  | ||||||
| def checksum(*args, csum=None): | def checksum(*args, csum=None, _seen=None): | ||||||
|     """ |     """ | ||||||
|     Calculate a checksum of any object, by sha1 hash. |     Calculate a checksum of any object, by sha1 hash. | ||||||
|  |  | ||||||
| @@ -60,7 +92,15 @@ def checksum(*args, csum=None): | |||||||
|         csum = hashlib.sha1() |         csum = hashlib.sha1() | ||||||
|         csum.update(str(SALT).encode()) |         csum.update(str(SALT).encode()) | ||||||
|  |  | ||||||
|  |     if _seen is None: | ||||||
|  |         _seen = set() | ||||||
|  |  | ||||||
|     for arg in args: |     for arg in args: | ||||||
|  |         obj_id = id(arg) | ||||||
|  |         if obj_id in _seen: | ||||||
|  |             continue | ||||||
|  |         _seen.add(obj_id) | ||||||
|  |  | ||||||
|         if hasattr(arg, "__checksum__"): |         if hasattr(arg, "__checksum__"): | ||||||
|             logger.debug("Checksum via __checksum__: %s", str(arg)) |             logger.debug("Checksum via __checksum__: %s", str(arg)) | ||||||
|             csum.update(str(arg.__checksum__()).encode()) |             csum.update(str(arg.__checksum__()).encode()) | ||||||
| @@ -73,17 +113,19 @@ def checksum(*args, csum=None): | |||||||
|         elif isinstance(arg, FunctionType): |         elif isinstance(arg, FunctionType): | ||||||
|             csum.update(strip_comments(inspect.getsource(arg)).encode()) |             csum.update(strip_comments(inspect.getsource(arg)).encode()) | ||||||
|             c = inspect.getclosurevars(arg) |             c = inspect.getclosurevars(arg) | ||||||
|             for v in {**c.nonlocals, **c.globals}.values(): |             merged = {**c.nonlocals, **c.globals} | ||||||
|  |             for key in sorted(merged):  # deterministic ordering | ||||||
|  |                 v = merged[key] | ||||||
|                 if v is not arg: |                 if v is not arg: | ||||||
|                     checksum(v, csum=csum) |                     checksum(v, csum=csum, _seen=_seen) | ||||||
|         elif isinstance(arg, functools.partial): |         elif isinstance(arg, functools.partial): | ||||||
|             logger.debug("Checksum via partial for %s", str(arg)) |             logger.debug("Checksum via partial for %s", str(arg)) | ||||||
|             checksum(arg.func, csum=csum) |             checksum(arg.func, csum=csum, _seen=_seen) | ||||||
|             for x in arg.args: |             for x in arg.args: | ||||||
|                 checksum(x, csum=csum) |                 checksum(x, csum=csum, _seen=_seen) | ||||||
|             for k in sorted(arg.keywords.keys()): |             for k in sorted(arg.keywords.keys()): | ||||||
|                 csum.update(k.encode()) |                 csum.update(k.encode()) | ||||||
|                 checksum(arg.keywords[k], csum=csum) |                 checksum(arg.keywords[k], csum=csum, _seen=_seen) | ||||||
|         elif isinstance(arg, np.ndarray): |         elif isinstance(arg, np.ndarray): | ||||||
|             csum.update(arg.tobytes()) |             csum.update(arg.tobytes()) | ||||||
|         else: |         else: | ||||||
|   | |||||||
| @@ -1,6 +1,6 @@ | |||||||
| from functools import partial, wraps | from functools import partial, wraps | ||||||
| from copy import copy | from copy import copy | ||||||
| from .logging import logger | from .logging_util import logger | ||||||
| from typing import Optional, Callable, List, Tuple | from typing import Optional, Callable, List, Tuple | ||||||
|  |  | ||||||
| import numpy as np | import numpy as np | ||||||
| @@ -218,7 +218,7 @@ class Coordinates: | |||||||
|             self.get_frame.clear_cache() |             self.get_frame.clear_cache() | ||||||
|  |  | ||||||
|     def __iter__(self): |     def __iter__(self): | ||||||
|         for i in range(len(self))[self._slice]: |         for i in range(len(self.frames))[self._slice]: | ||||||
|             yield self[i] |             yield self[i] | ||||||
|  |  | ||||||
|     @singledispatchmethod |     @singledispatchmethod | ||||||
| @@ -232,7 +232,7 @@ class Coordinates: | |||||||
|         return sliced |         return sliced | ||||||
|  |  | ||||||
|     def __len__(self): |     def __len__(self): | ||||||
|         return len(self.frames) |         return len(self.frames[self._slice]) | ||||||
|  |  | ||||||
|     def __checksum__(self): |     def __checksum__(self): | ||||||
|         return checksum(self.frames, self.atom_filter, self._slice, self.mode) |         return checksum(self.frames, self.atom_filter, self._slice, self.mode) | ||||||
| @@ -692,10 +692,6 @@ def number_of_neighbors( | |||||||
|     elif not distinct: |     elif not distinct: | ||||||
|         dnn = 1 |         dnn = 1 | ||||||
|  |  | ||||||
|     print(atoms[:5]) |  | ||||||
|     print(query_atoms[:5]) |  | ||||||
|     print(" ") |  | ||||||
|  |  | ||||||
|     box = atoms.box |     box = atoms.box | ||||||
|     if np.all(np.diag(np.diag(box)) == box): |     if np.all(np.diag(np.diag(box)) == box): | ||||||
|         atoms = atoms % np.diag(box) |         atoms = atoms % np.diag(box) | ||||||
|   | |||||||
| @@ -2,7 +2,7 @@ from typing import Callable, Optional | |||||||
|  |  | ||||||
| import numpy as np | import numpy as np | ||||||
| from numpy.typing import ArrayLike | from numpy.typing import ArrayLike | ||||||
| from scipy.special import legendre | from scipy.special import legendre, jn | ||||||
| import dask.array as darray | import dask.array as darray | ||||||
| from functools import partial | from functools import partial | ||||||
| from scipy.spatial import KDTree | from scipy.spatial import KDTree | ||||||
| @@ -183,6 +183,12 @@ def msd( | |||||||
|         displacements = displacements_without_drift(start_frame, end_frame, trajectory) |         displacements = displacements_without_drift(start_frame, end_frame, trajectory) | ||||||
|     if axis == "all": |     if axis == "all": | ||||||
|         return (displacements**2).sum(axis=1).mean() |         return (displacements**2).sum(axis=1).mean() | ||||||
|  |     elif axis == "xy" or axis == "yx": | ||||||
|  |         return (displacements[:, [0, 1]]**2).sum(axis=1).mean() | ||||||
|  |     elif axis == "xz" or axis == "zx": | ||||||
|  |         return (displacements[:, [0, 2]]**2).sum(axis=1).mean() | ||||||
|  |     elif axis == "yz" or axis == "zy": | ||||||
|  |         return (displacements[:, [1, 2]]**2).sum(axis=1).mean() | ||||||
|     elif axis == "x": |     elif axis == "x": | ||||||
|         return (displacements[:, 0] ** 2).mean() |         return (displacements[:, 0] ** 2).mean() | ||||||
|     elif axis == "y": |     elif axis == "y": | ||||||
| @@ -211,6 +217,15 @@ def isf( | |||||||
|     if axis == "all": |     if axis == "all": | ||||||
|         distance = (displacements**2).sum(axis=1) ** 0.5 |         distance = (displacements**2).sum(axis=1) ** 0.5 | ||||||
|         return np.sinc(distance * q / np.pi).mean() |         return np.sinc(distance * q / np.pi).mean() | ||||||
|  |     elif axis == "xy" or axis == "yx": | ||||||
|  |         distance = (displacements[:, [0, 1]]**2).sum(axis=1) ** 0.5 | ||||||
|  |         return np.real(jn(0, distance * q)).mean() | ||||||
|  |     elif axis == "xz" or axis == "zx": | ||||||
|  |         distance = (displacements[:, [0, 2]]**2).sum(axis=1) ** 0.5 | ||||||
|  |         return np.real(jn(0, distance * q)).mean() | ||||||
|  |     elif axis == "yz" or axis == "zy": | ||||||
|  |         distance = (displacements[:, [1, 2]]**2).sum(axis=1) ** 0.5 | ||||||
|  |         return np.real(jn(0, distance * q)).mean() | ||||||
|     elif axis == "x": |     elif axis == "x": | ||||||
|         distance = np.abs(displacements[:, 0]) |         distance = np.abs(displacements[:, 0]) | ||||||
|         return np.mean(np.cos(np.abs(q * distance))) |         return np.mean(np.cos(np.abs(q * distance))) | ||||||
| @@ -262,6 +277,12 @@ def van_hove_self( | |||||||
|         vectors = displacements_without_drift(start_frame, end_frame, trajectory) |         vectors = displacements_without_drift(start_frame, end_frame, trajectory) | ||||||
|     if axis == "all": |     if axis == "all": | ||||||
|         delta_r = (vectors**2).sum(axis=1) ** 0.5 |         delta_r = (vectors**2).sum(axis=1) ** 0.5 | ||||||
|  |     elif axis == "xy" or axis == "yx": | ||||||
|  |         delta_r = (vectors[:, [0, 1]]**2).sum(axis=1) ** 0.5 | ||||||
|  |     elif axis == "xz" or axis == "zx": | ||||||
|  |         delta_r = (vectors[:, [0, 2]]**2).sum(axis=1) ** 0.5 | ||||||
|  |     elif axis == "yz" or axis == "zy": | ||||||
|  |         delta_r = (vectors[:, [1, 2]]**2).sum(axis=1) ** 0.5 | ||||||
|     elif axis == "x": |     elif axis == "x": | ||||||
|         delta_r = np.abs(vectors[:, 0]) |         delta_r = np.abs(vectors[:, 0]) | ||||||
|     elif axis == "y": |     elif axis == "y": | ||||||
| @@ -410,9 +431,9 @@ def non_gaussian_parameter( | |||||||
|     trajectory: Coordinates = None, |     trajectory: Coordinates = None, | ||||||
|     axis: str = "all", |     axis: str = "all", | ||||||
| ) -> float: | ) -> float: | ||||||
|     """ |     r""" | ||||||
|     Calculate the non-Gaussian parameter. |     Calculate the non-Gaussian parameter. | ||||||
|     ..math: |     .. math: | ||||||
|       \alpha_2 (t) = |       \alpha_2 (t) = | ||||||
|         \frac{3}{5}\frac{\langle r_i^4(t)\rangle}{\langle r_i^2(t)\rangle^2} - 1 |         \frac{3}{5}\frac{\langle r_i^4(t)\rangle}{\langle r_i^2(t)\rangle^2} - 1 | ||||||
|     """ |     """ | ||||||
| @@ -423,6 +444,15 @@ def non_gaussian_parameter( | |||||||
|     if axis == "all": |     if axis == "all": | ||||||
|         r = (vectors**2).sum(axis=1) |         r = (vectors**2).sum(axis=1) | ||||||
|         dimensions = 3 |         dimensions = 3 | ||||||
|  |     elif axis == "xy" or axis == "yx": | ||||||
|  |         r = (vectors[:, [0, 1]]**2).sum(axis=1) | ||||||
|  |         dimensions = 2 | ||||||
|  |     elif axis == "xz" or axis == "zx": | ||||||
|  |         r = (vectors[:, [0, 2]]**2).sum(axis=1) | ||||||
|  |         dimensions = 2 | ||||||
|  |     elif axis == "yz" or axis == "zy": | ||||||
|  |         r = (vectors[:, [1, 2]]**2).sum(axis=1) | ||||||
|  |         dimensions = 2 | ||||||
|     elif axis == "x": |     elif axis == "x": | ||||||
|         r = vectors[:, 0] ** 2 |         r = vectors[:, 0] ** 2 | ||||||
|         dimensions = 1 |         dimensions = 1 | ||||||
|   | |||||||
| @@ -126,9 +126,6 @@ def rdf( | |||||||
|     particles_in_volume = int( |     particles_in_volume = int( | ||||||
|         np.max(number_of_neighbors(atoms_a, query_atoms=atoms_b, r_max=bins[-1])) * 1.1 |         np.max(number_of_neighbors(atoms_a, query_atoms=atoms_b, r_max=bins[-1])) * 1.1 | ||||||
|     ) |     ) | ||||||
|     print(atoms_a[:5]) |  | ||||||
|     print(atoms_b[:5]) |  | ||||||
|     print(" ") |  | ||||||
|     distances, indices = next_neighbors( |     distances, indices = next_neighbors( | ||||||
|         atoms_a, |         atoms_a, | ||||||
|         atoms_b, |         atoms_b, | ||||||
| @@ -185,10 +182,10 @@ def tetrahedral_order( | |||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     # Connection vectors |     # Connection vectors | ||||||
|     neighbors_1 -= atoms |     neighbors_1 = pbc_diff(neighbors_1, atoms, box=atoms.box) | ||||||
|     neighbors_2 -= atoms |     neighbors_2 = pbc_diff(neighbors_2, atoms, box=atoms.box) | ||||||
|     neighbors_3 -= atoms |     neighbors_3 = pbc_diff(neighbors_3, atoms, box=atoms.box) | ||||||
|     neighbors_4 -= atoms |     neighbors_4 = pbc_diff(neighbors_4, atoms, box=atoms.box) | ||||||
|  |  | ||||||
|     # Normed Connection vectors |     # Normed Connection vectors | ||||||
|     neighbors_1 /= np.linalg.norm(neighbors_1, axis=-1).reshape(-1, 1) |     neighbors_1 /= np.linalg.norm(neighbors_1, axis=-1).reshape(-1, 1) | ||||||
| @@ -309,7 +306,7 @@ def next_neighbor_distribution( | |||||||
|     )[1] |     )[1] | ||||||
|     resname_nn = reference.residue_names[nn] |     resname_nn = reference.residue_names[nn] | ||||||
|     count_nn = (resname_nn == atoms.residue_names.reshape(-1, 1)).sum(axis=1) |     count_nn = (resname_nn == atoms.residue_names.reshape(-1, 1)).sum(axis=1) | ||||||
|     return np.histogram(count_nn, bins=bins, normed=normed)[0] |     return np.histogram(count_nn, bins=bins, density=normed)[0] | ||||||
|  |  | ||||||
|  |  | ||||||
| def hbonds( | def hbonds( | ||||||
|   | |||||||
| @@ -11,7 +11,7 @@ from mdevaluate.coordinates import CoordinateFrame, Coordinates | |||||||
| from mdevaluate.pbc import pbc_points | from mdevaluate.pbc import pbc_points | ||||||
|  |  | ||||||
|  |  | ||||||
| def a_ij(atoms: ArrayLike, N: int = 4, l: int = 3) -> tuple[NDArray, NDArray]: | def calc_aij(atoms: ArrayLike, N: int = 4, l: int = 3) -> tuple[NDArray, NDArray]: | ||||||
|     tree = KDTree(atoms) |     tree = KDTree(atoms) | ||||||
|  |  | ||||||
|     dist, indices = tree.query(atoms, N + 1) |     dist, indices = tree.query(atoms, N + 1) | ||||||
| @@ -84,18 +84,18 @@ def count_ice_types(iceTypes: NDArray) -> NDArray: | |||||||
|  |  | ||||||
|  |  | ||||||
| def selector_ice( | def selector_ice( | ||||||
|     start_frame: CoordinateFrame, |     oxygen_atoms_water: CoordinateFrame, | ||||||
|     traj: Coordinates, |  | ||||||
|     chosen_ice_types: ArrayLike, |     chosen_ice_types: ArrayLike, | ||||||
|     combined: bool = True, |     combined: bool = True, | ||||||
|  |     next_neighbor_distance: float = 0.35, | ||||||
| ) -> NDArray: | ) -> NDArray: | ||||||
|     oxygen = traj.subset(atom_name="OW") |     atoms = oxygen_atoms_water | ||||||
|     atoms = oxygen[start_frame.step] |     atoms_PBC = pbc_points(atoms, thickness=next_neighbor_distance * 2.2) | ||||||
|     atoms = atoms % np.diag(atoms.box) |     aij, indices = calc_aij(atoms_PBC) | ||||||
|     atoms_PBC = pbc_points(atoms, thickness=1) |  | ||||||
|     aij, indices = a_ij(atoms_PBC) |  | ||||||
|     tree = KDTree(atoms_PBC) |     tree = KDTree(atoms_PBC) | ||||||
|     neighbors = tree.query_ball_point(atoms_PBC, 0.35, return_length=True) |     neighbors = tree.query_ball_point( | ||||||
|  |         atoms_PBC, next_neighbor_distance, return_length=True | ||||||
|  |     ) - 1 | ||||||
|     index_SOL = atoms_PBC.tolist().index(atoms[0].tolist()) |     index_SOL = atoms_PBC.tolist().index(atoms[0].tolist()) | ||||||
|     index_SOL = np.arange(index_SOL, index_SOL + len(atoms)) |     index_SOL = np.arange(index_SOL, index_SOL + len(atoms)) | ||||||
|     ice_Types = classify_ice(aij, indices, neighbors, index_SOL) |     ice_Types = classify_ice(aij, indices, neighbors, index_SOL) | ||||||
| @@ -117,9 +117,9 @@ def selector_ice( | |||||||
| def ice_types(trajectory: Coordinates, segments: int = 10000) -> pd.DataFrame: | def ice_types(trajectory: Coordinates, segments: int = 10000) -> pd.DataFrame: | ||||||
|     def ice_types_distribution(frame: CoordinateFrame, selector: Callable) -> NDArray: |     def ice_types_distribution(frame: CoordinateFrame, selector: Callable) -> NDArray: | ||||||
|         atoms_PBC = pbc_points(frame, thickness=1) |         atoms_PBC = pbc_points(frame, thickness=1) | ||||||
|         aij, indices = a_ij(atoms_PBC) |         aij, indices = calc_aij(atoms_PBC) | ||||||
|         tree = KDTree(atoms_PBC) |         tree = KDTree(atoms_PBC) | ||||||
|         neighbors = tree.query_ball_point(atoms_PBC, 0.35, return_length=True) |         neighbors = tree.query_ball_point(atoms_PBC, 0.35, return_length=True) - 1 | ||||||
|         index = selector(frame, atoms_PBC) |         index = selector(frame, atoms_PBC) | ||||||
|         ice_types_data = classify_ice(aij, indices, neighbors, index) |         ice_types_data = classify_ice(aij, indices, neighbors, index) | ||||||
|         ice_parts_data = count_ice_types(ice_types_data) |         ice_parts_data = count_ice_types(ice_types_data) | ||||||
| @@ -161,8 +161,8 @@ def ice_types(trajectory: Coordinates, segments: int = 10000) -> pd.DataFrame: | |||||||
| def ice_clusters_traj( | def ice_clusters_traj( | ||||||
|     traj: Coordinates, segments: int = 10000, skip: float = 0.1 |     traj: Coordinates, segments: int = 10000, skip: float = 0.1 | ||||||
| ) -> list: | ) -> list: | ||||||
|     def ice_clusters(frame: CoordinateFrame, traj: Coordinates) -> Tuple[float, list]: |     def ice_clusters(frame: CoordinateFrame) -> Tuple[float, list]: | ||||||
|         selection = selector_ice(frame, traj, [0, 1, 2]) |         selection = selector_ice(frame, [0, 1, 2]) | ||||||
|         if len(selection) == 0: |         if len(selection) == 0: | ||||||
|             return frame.time, [] |             return frame.time, [] | ||||||
|         else: |         else: | ||||||
| @@ -194,6 +194,6 @@ def ice_clusters_traj( | |||||||
|         np.int_(np.linspace(len(traj) * skip, len(traj) - 1, num=segments)) |         np.int_(np.linspace(len(traj) * skip, len(traj) - 1, num=segments)) | ||||||
|     ) |     ) | ||||||
|     all_clusters = [ |     all_clusters = [ | ||||||
|         ice_clusters(traj[frame_index], traj) for frame_index in frame_indices |         ice_clusters(traj[frame_index]) for frame_index in frame_indices | ||||||
|     ] |     ] | ||||||
|     return all_clusters |     return all_clusters | ||||||
|   | |||||||
| @@ -1,9 +1,9 @@ | |||||||
| from functools import partial | from functools import partial | ||||||
|  | from typing import Optional | ||||||
|  |  | ||||||
| import numpy as np | import numpy as np | ||||||
| from numpy.typing import ArrayLike, NDArray | from numpy.typing import ArrayLike, NDArray | ||||||
| from numpy.polynomial.polynomial import Polynomial as Poly | from numpy.polynomial.polynomial import Polynomial as Poly | ||||||
| import math |  | ||||||
| from scipy.spatial import KDTree | from scipy.spatial import KDTree | ||||||
| import pandas as pd | import pandas as pd | ||||||
| import multiprocessing as mp | import multiprocessing as mp | ||||||
| @@ -11,6 +11,56 @@ import multiprocessing as mp | |||||||
| from ..coordinates import Coordinates | from ..coordinates import Coordinates | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _pbc_points_reduced( | ||||||
|  |     coordinates: ArrayLike, | ||||||
|  |     pore_geometry: str, | ||||||
|  |     box: Optional[NDArray] = None, | ||||||
|  |     thickness: Optional[float] = None, | ||||||
|  | ) -> tuple[NDArray, NDArray]: | ||||||
|  |     if box is None: | ||||||
|  |         box = coordinates.box | ||||||
|  |     if pore_geometry == "cylindrical": | ||||||
|  |         grid = np.array([[i, j, k] for k in [-1, 0, 1] for j in [0] for i in [0]]) | ||||||
|  |         indices = np.tile(np.arange(len(coordinates)), 3) | ||||||
|  |     elif pore_geometry == "slit": | ||||||
|  |         grid = np.array( | ||||||
|  |             [[i, j, k] for k in [0] for j in [1, 0, -1] for i in [-1, 0, 1]] | ||||||
|  |         ) | ||||||
|  |         indices = np.tile(np.arange(len(coordinates)), 9) | ||||||
|  |     else: | ||||||
|  |         raise ValueError( | ||||||
|  |             f"pore_geometry is {pore_geometry}, should either be " | ||||||
|  |             f"'cylindrical' or 'slit'" | ||||||
|  |         ) | ||||||
|  |     coordinates_pbc = np.concatenate([coordinates + v @ box for v in grid], axis=0) | ||||||
|  |     size = np.diag(box) | ||||||
|  |  | ||||||
|  |     if thickness is not None: | ||||||
|  |         mask = np.all(coordinates_pbc > -thickness, axis=1) | ||||||
|  |         coordinates_pbc = coordinates_pbc[mask] | ||||||
|  |         indices = indices[mask] | ||||||
|  |         mask = np.all(coordinates_pbc < size + thickness, axis=1) | ||||||
|  |         coordinates_pbc = coordinates_pbc[mask] | ||||||
|  |         indices = indices[mask] | ||||||
|  |  | ||||||
|  |     return coordinates_pbc, indices | ||||||
|  |  | ||||||
|  |  | ||||||
|  | def _build_tree(points, box, r_max, pore_geometry): | ||||||
|  |     if np.all(np.diag(np.diag(box)) == box): | ||||||
|  |         tree = KDTree(points % box, boxsize=box) | ||||||
|  |         points_pbc_index = None | ||||||
|  |     else: | ||||||
|  |         points_pbc, points_pbc_index = _pbc_points_reduced( | ||||||
|  |             points, | ||||||
|  |             pore_geometry, | ||||||
|  |             box, | ||||||
|  |             thickness=r_max + 0.01, | ||||||
|  |         ) | ||||||
|  |         tree = KDTree(points_pbc) | ||||||
|  |     return tree, points_pbc_index | ||||||
|  |  | ||||||
|  |  | ||||||
| def occupation_matrix( | def occupation_matrix( | ||||||
|     trajectory: Coordinates, |     trajectory: Coordinates, | ||||||
|     edge_length: float = 0.05, |     edge_length: float = 0.05, | ||||||
| @@ -28,11 +78,7 @@ def occupation_matrix( | |||||||
|     z_bins = np.arange(0, box[2][2] + edge_length, edge_length) |     z_bins = np.arange(0, box[2][2] + edge_length, edge_length) | ||||||
|     bins = [x_bins, y_bins, z_bins] |     bins = [x_bins, y_bins, z_bins] | ||||||
|     # Trajectory is split for parallel computing |     # Trajectory is split for parallel computing | ||||||
|     size = math.ceil(len(frame_indices) / nodes) |     indices = np.array_split(frame_indices, nodes) | ||||||
|     indices = [ |  | ||||||
|         np.arange(len(frame_indices))[i : i + size] |  | ||||||
|         for i in range(0, len(frame_indices), size) |  | ||||||
|     ] |  | ||||||
|     pool = mp.Pool(nodes) |     pool = mp.Pool(nodes) | ||||||
|     results = pool.map( |     results = pool.map( | ||||||
|         partial(_calc_histogram, trajectory=trajectory, bins=bins), indices |         partial(_calc_histogram, trajectory=trajectory, bins=bins), indices | ||||||
| @@ -72,19 +118,19 @@ def _calc_histogram( | |||||||
|  |  | ||||||
|  |  | ||||||
| def find_maxima( | def find_maxima( | ||||||
|     occupation_df: pd.DataFrame, box: ArrayLike, edge_length: float = 0.05 |     occupation_df: pd.DataFrame, box: ArrayLike, radius: float, pore_geometry: str | ||||||
| ) -> pd.DataFrame: | ) -> pd.DataFrame: | ||||||
|     maxima_df = occupation_df.copy() |     maxima_df = occupation_df.copy() | ||||||
|     maxima_df["maxima"] = None |     maxima_df["maxima"] = None | ||||||
|     points = np.array(maxima_df[["x", "y", "z"]]) |     points = np.array(maxima_df[["x", "y", "z"]]) | ||||||
|     tree = KDTree(points, boxsize=box) |     tree, points_pbc_index = _build_tree(points, box, radius, pore_geometry) | ||||||
|     all_neighbors = tree.query_ball_point( |  | ||||||
|         points, edge_length * 3 ** (1 / 2) + edge_length / 100 |  | ||||||
|     ) |  | ||||||
|     for i in range(len(maxima_df)): |     for i in range(len(maxima_df)): | ||||||
|         if maxima_df.loc[i, "maxima"] is not None: |         if maxima_df.loc[i, "maxima"] is not None: | ||||||
|             continue |             continue | ||||||
|         neighbors = np.array(all_neighbors[i]) |         maxima_pos = maxima_df.loc[i, ["x", "y", "z"]] | ||||||
|  |         neighbors = np.array(tree.query_ball_point(maxima_pos, radius)) | ||||||
|  |         if points_pbc_index is not None: | ||||||
|  |             neighbors = points_pbc_index[neighbors] | ||||||
|         neighbors = neighbors[neighbors != i] |         neighbors = neighbors[neighbors != i] | ||||||
|         if len(neighbors) == 0: |         if len(neighbors) == 0: | ||||||
|             maxima_df.loc[i, "maxima"] = True |             maxima_df.loc[i, "maxima"] = True | ||||||
| @@ -104,23 +150,39 @@ def _calc_energies( | |||||||
|     maxima_df: pd.DataFrame, |     maxima_df: pd.DataFrame, | ||||||
|     bins: ArrayLike, |     bins: ArrayLike, | ||||||
|     box: NDArray, |     box: NDArray, | ||||||
|  |     pore_geometry: str, | ||||||
|     T: float, |     T: float, | ||||||
|  |     nodes: int = 8, | ||||||
| ) -> NDArray: | ) -> NDArray: | ||||||
|     points = np.array(maxima_df[["x", "y", "z"]]) |     points = np.array(maxima_df[["x", "y", "z"]]) | ||||||
|     tree = KDTree(points, boxsize=box) |     tree, points_pbc_index = _build_tree(points, box, bins[-1], pore_geometry) | ||||||
|     maxima = maxima_df.loc[maxima_indices, ["x", "y", "z"]] |     maxima = maxima_df.loc[maxima_indices, ["x", "y", "z"]] | ||||||
|     maxima_occupations = np.array(maxima_df.loc[maxima_indices, "occupation"]) |     maxima_occupations = np.array(maxima_df.loc[maxima_indices, "occupation"]) | ||||||
|     num_of_neighbors = np.max( |     num_of_neighbors = np.max( | ||||||
|         tree.query_ball_point(maxima, bins[-1], return_length=True) |         tree.query_ball_point(maxima, bins[-1], return_length=True) | ||||||
|     ) |     ) | ||||||
|     distances, indices = tree.query( |     split_maxima = [] | ||||||
|         maxima, k=num_of_neighbors, distance_upper_bound=bins[-1] |     for i in range(0, len(maxima), 1000): | ||||||
|  |         split_maxima.append(maxima[i : i + 1000]) | ||||||
|  |  | ||||||
|  |     distances = [] | ||||||
|  |     indices = [] | ||||||
|  |     for maxima in split_maxima: | ||||||
|  |         distances_step, indices_step = tree.query( | ||||||
|  |             maxima, k=num_of_neighbors, distance_upper_bound=bins[-1], workers=nodes | ||||||
|         ) |         ) | ||||||
|  |         distances.append(distances_step) | ||||||
|  |         indices.append(indices_step) | ||||||
|  |     distances = np.concatenate(distances) | ||||||
|  |     indices = np.concatenate(indices) | ||||||
|     all_energy_hist = [] |     all_energy_hist = [] | ||||||
|     all_occupied_bins_hist = [] |     all_occupied_bins_hist = [] | ||||||
|     if distances.ndim == 1: |     if distances.ndim == 1: | ||||||
|         current_distances = distances[1:][distances[1:] <= bins[-1]] |         current_distances = distances[1:][distances[1:] <= bins[-1]] | ||||||
|  |         if points_pbc_index is None: | ||||||
|             current_indices = indices[1:][distances[1:] <= bins[-1]] |             current_indices = indices[1:][distances[1:] <= bins[-1]] | ||||||
|  |         else: | ||||||
|  |             current_indices = points_pbc_index[indices[1:][distances[1:] <= bins[-1]]] | ||||||
|         energy = ( |         energy = ( | ||||||
|             -np.log(maxima_df.loc[current_indices, "occupation"] / maxima_occupations) |             -np.log(maxima_df.loc[current_indices, "occupation"] / maxima_occupations) | ||||||
|             * T |             * T | ||||||
| @@ -131,8 +193,12 @@ def _calc_energies( | |||||||
|         return result |         return result | ||||||
|     for i, maxima_occupation in enumerate(maxima_occupations): |     for i, maxima_occupation in enumerate(maxima_occupations): | ||||||
|         current_distances = distances[i, 1:][distances[i, 1:] <= bins[-1]] |         current_distances = distances[i, 1:][distances[i, 1:] <= bins[-1]] | ||||||
|  |         if points_pbc_index is None: | ||||||
|             current_indices = indices[i, 1:][distances[i, 1:] <= bins[-1]] |             current_indices = indices[i, 1:][distances[i, 1:] <= bins[-1]] | ||||||
|  |         else: | ||||||
|  |             current_indices = points_pbc_index[ | ||||||
|  |                 indices[i, 1:][distances[i, 1:] <= bins[-1]] | ||||||
|  |             ] | ||||||
|         energy = ( |         energy = ( | ||||||
|             -np.log(maxima_df.loc[current_indices, "occupation"] / maxima_occupation) |             -np.log(maxima_df.loc[current_indices, "occupation"] / maxima_occupation) | ||||||
|             * T |             * T | ||||||
| @@ -168,9 +234,12 @@ def distance_resolved_energies( | |||||||
|     distance_bins: ArrayLike, |     distance_bins: ArrayLike, | ||||||
|     r_bins: ArrayLike, |     r_bins: ArrayLike, | ||||||
|     box: NDArray, |     box: NDArray, | ||||||
|  |     pore_geometry: str, | ||||||
|     temperature: float, |     temperature: float, | ||||||
|  |     nodes: int = 8, | ||||||
| ) -> pd.DataFrame: | ) -> pd.DataFrame: | ||||||
|     results = [] |     results = [] | ||||||
|  |     distances = [] | ||||||
|     for i in range(len(distance_bins) - 1): |     for i in range(len(distance_bins) - 1): | ||||||
|         maxima_indices = np.array( |         maxima_indices = np.array( | ||||||
|             maxima_df.index[ |             maxima_df.index[ | ||||||
| @@ -179,11 +248,22 @@ def distance_resolved_energies( | |||||||
|                 * (maxima_df["maxima"] == True) |                 * (maxima_df["maxima"] == True) | ||||||
|             ] |             ] | ||||||
|         ) |         ) | ||||||
|  |         try: | ||||||
|             results.append( |             results.append( | ||||||
|             _calc_energies(maxima_indices, maxima_df, r_bins, box, temperature) |                 _calc_energies( | ||||||
|  |                     maxima_indices, | ||||||
|  |                     maxima_df, | ||||||
|  |                     r_bins, | ||||||
|  |                     box, | ||||||
|  |                     pore_geometry, | ||||||
|  |                     temperature, | ||||||
|  |                     nodes, | ||||||
|                 ) |                 ) | ||||||
|  |             ) | ||||||
|  |             distances.append((distance_bins[i] + distance_bins[i + 1]) / 2) | ||||||
|  |         except ValueError: | ||||||
|  |             pass | ||||||
|  |  | ||||||
|     distances = (distance_bins[:-1] + distance_bins[1:]) / 2 |  | ||||||
|     radii = (r_bins[:-1] + r_bins[1:]) / 2 |     radii = (r_bins[:-1] + r_bins[1:]) / 2 | ||||||
|     d = np.array([d for d in distances for r in radii]) |     d = np.array([d for d in distances for r in radii]) | ||||||
|     r = np.array([r for d in distances for r in radii]) |     r = np.array([r for d in distances for r in radii]) | ||||||
| @@ -192,7 +272,11 @@ def distance_resolved_energies( | |||||||
|  |  | ||||||
|  |  | ||||||
| def find_energy_maxima( | def find_energy_maxima( | ||||||
|     energy_df: pd.DataFrame, r_min: float, r_max: float |     energy_df: pd.DataFrame, | ||||||
|  |     r_min: float, | ||||||
|  |     r_max: float, | ||||||
|  |     r_eval: float = None, | ||||||
|  |     degree: int = 2, | ||||||
| ) -> pd.DataFrame: | ) -> pd.DataFrame: | ||||||
|     distances = [] |     distances = [] | ||||||
|     energies = [] |     energies = [] | ||||||
| @@ -201,6 +285,9 @@ def find_energy_maxima( | |||||||
|         x = np.array(data_d["r"]) |         x = np.array(data_d["r"]) | ||||||
|         y = np.array(data_d["energy"]) |         y = np.array(data_d["energy"]) | ||||||
|         mask = (x >= r_min) * (x <= r_max) |         mask = (x >= r_min) * (x <= r_max) | ||||||
|         p3 = Poly.fit(x[mask], y[mask], deg=2) |         p3 = Poly.fit(x[mask], y[mask], deg=degree) | ||||||
|  |         if r_eval is None: | ||||||
|             energies.append(np.max(p3(np.linspace(r_min, r_max, 1000)))) |             energies.append(np.max(p3(np.linspace(r_min, r_max, 1000)))) | ||||||
|  |         else: | ||||||
|  |             energies.append(p3(r_eval)) | ||||||
|     return pd.DataFrame({"d": distances, "energy": energies}) |     return pd.DataFrame({"d": distances, "energy": energies}) | ||||||
|   | |||||||
| @@ -265,7 +265,7 @@ def LSI( | |||||||
|     ) |     ) | ||||||
|     distributions = np.array( |     distributions = np.array( | ||||||
|         [ |         [ | ||||||
|             LSI_distribution(trajectory[frame_index], trajectory, bins, selector=None) |             LSI_distribution(trajectory[frame_index], bins, selector=None) | ||||||
|             for frame_index in frame_indices |             for frame_index in frame_indices | ||||||
|         ] |         ] | ||||||
|     ) |     ) | ||||||
|   | |||||||
| @@ -7,7 +7,7 @@ from numpy.typing import ArrayLike, NDArray | |||||||
|  |  | ||||||
| from itertools import product | from itertools import product | ||||||
|  |  | ||||||
| from .logging import logger | from .logging_util import logger | ||||||
|  |  | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from mdevaluate.coordinates import CoordinateFrame |     from mdevaluate.coordinates import CoordinateFrame | ||||||
| @@ -156,7 +156,7 @@ def nojump(frame: CoordinateFrame, usecache: bool = True) -> CoordinateFrame: | |||||||
|                     [m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices] |                     [m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices] | ||||||
|                 ).T |                 ).T | ||||||
|             ) |             ) | ||||||
|             * frame.box.diagonal() |             @ frame.box | ||||||
|         ) |         ) | ||||||
|  |  | ||||||
|         reader._nojump_cache[abstep] = delta |         reader._nojump_cache[abstep] = delta | ||||||
| @@ -173,7 +173,7 @@ def nojump(frame: CoordinateFrame, usecache: bool = True) -> CoordinateFrame: | |||||||
|                     ] |                     ] | ||||||
|                 ).T |                 ).T | ||||||
|             ) |             ) | ||||||
|             * frame.box.diagonal() |             @ frame.box | ||||||
|         ) |         ) | ||||||
|     return frame - delta |     return frame - delta | ||||||
|  |  | ||||||
|   | |||||||
| @@ -19,13 +19,13 @@ import MDAnalysis | |||||||
| from scipy import sparse | from scipy import sparse | ||||||
|  |  | ||||||
| from .checksum import checksum | from .checksum import checksum | ||||||
| from .logging import logger | from .logging_util import logger | ||||||
| from . import atoms | from . import atoms | ||||||
| from .coordinates import Coordinates | from .coordinates import Coordinates | ||||||
|  |  | ||||||
| CSR_ATTRS = ("data", "indices", "indptr") | CSR_ATTRS = ("data", "indices", "indptr") | ||||||
| NOJUMP_MAGIC = 2016 | NOJUMP_MAGIC = 2016 | ||||||
| Group_RE = re.compile("\[ ([-+\w]+) \]") | Group_RE = re.compile(r"\[ ([-+\w]+) \]") | ||||||
|  |  | ||||||
|  |  | ||||||
| class NojumpError(Exception): | class NojumpError(Exception): | ||||||
| @@ -152,7 +152,7 @@ def nojump_load_filename(reader: BaseReader): | |||||||
|         ) |         ) | ||||||
|         if os.path.exists(full_path_fallback): |         if os.path.exists(full_path_fallback): | ||||||
|             return full_path_fallback |             return full_path_fallback | ||||||
|     if os.path.exists(fname) or is_writeable(directory): |     if os.path.exists(full_path) or is_writeable(directory): | ||||||
|         return full_path |         return full_path | ||||||
|     else: |     else: | ||||||
|         user_data_dir = os.path.join("/data/", os.environ["HOME"].split("/")[-1]) |         user_data_dir = os.path.join("/data/", os.environ["HOME"].split("/")[-1]) | ||||||
| @@ -187,19 +187,33 @@ def nojump_save_filename(reader: BaseReader): | |||||||
|  |  | ||||||
| def parse_jumps(trajectory: Coordinates): | def parse_jumps(trajectory: Coordinates): | ||||||
|     prev = trajectory[0].whole |     prev = trajectory[0].whole | ||||||
|     box = prev.box.diagonal() |     box = prev.box | ||||||
|     SparseData = namedtuple("SparseData", ["data", "row", "col"]) |     SparseData = namedtuple("SparseData", ["data", "row", "col"]) | ||||||
|     jump_data = ( |     jump_data = ( | ||||||
|         SparseData(data=array("b"), row=array("l"), col=array("l")), |         SparseData(data=array("b"), row=array("l"), col=array("l")), | ||||||
|         SparseData(data=array("b"), row=array("l"), col=array("l")), |         SparseData(data=array("b"), row=array("l"), col=array("l")), | ||||||
|         SparseData(data=array("b"), row=array("l"), col=array("l")), |         SparseData(data=array("b"), row=array("l"), col=array("l")), | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
|     for i, curr in enumerate(trajectory): |     for i, curr in enumerate(trajectory): | ||||||
|         if i % 500 == 0: |         if i % 500 == 0: | ||||||
|             logger.debug("Parse jumps Step: %d", i) |             logger.debug("Parse jumps Step: %d", i) | ||||||
|         delta = ((curr - prev) / box).round().astype(np.int8) |         r3 = np.subtract(curr, prev) | ||||||
|  |         delta_z = np.array(np.rint(np.divide(r3[:, 2], box[2][2])), dtype=np.int8) | ||||||
|  |         r2 = np.subtract( | ||||||
|  |             r3, | ||||||
|  |             (np.rint(np.divide(r3[:, 2], box[2][2])))[:, np.newaxis] | ||||||
|  |             * box[2][np.newaxis, :], | ||||||
|  |             ) | ||||||
|  |         delta_y = np.array(np.rint(np.divide(r2[:, 1], box[1][1])), dtype=np.int8) | ||||||
|  |         r1 = np.subtract( | ||||||
|  |             r2, | ||||||
|  |             (np.rint(np.divide(r2[:, 1], box[1][1])))[:, np.newaxis] | ||||||
|  |             * box[1][np.newaxis, :], | ||||||
|  |             ) | ||||||
|  |         delta_x = np.array(np.rint(np.divide(r1[:, 0], box[0][0])), dtype=np.int8) | ||||||
|  |         delta = np.array([delta_x, delta_y, delta_z]).T | ||||||
|         prev = curr |         prev = curr | ||||||
|  |         box = prev.box | ||||||
|         for d in range(3): |         for d in range(3): | ||||||
|             (col,) = np.where(delta[:, d] != 0) |             (col,) = np.where(delta[:, d] != 0) | ||||||
|             jump_data[d].col.extend(col) |             jump_data[d].col.extend(col) | ||||||
| @@ -261,7 +275,7 @@ def load_nojump_matrices(reader: BaseReader): | |||||||
|                 "Loaded Nojump matrices: {}".format(nojump_load_filename(reader)) |                 "Loaded Nojump matrices: {}".format(nojump_load_filename(reader)) | ||||||
|             ) |             ) | ||||||
|         else: |         else: | ||||||
|             logger.info("Invlaid Nojump Data: {}".format(nojump_load_filename(reader))) |             logger.info("Invalid Nojump Data: {}".format(nojump_load_filename(reader))) | ||||||
|     except KeyError: |     except KeyError: | ||||||
|         logger.info("Removing zip-File: %s", zipname) |         logger.info("Removing zip-File: %s", zipname) | ||||||
|         os.remove(nojump_load_filename(reader)) |         os.remove(nojump_load_filename(reader)) | ||||||
|   | |||||||
| @@ -14,7 +14,7 @@ from scipy.ndimage import uniform_filter1d | |||||||
| from scipy.interpolate import interp1d | from scipy.interpolate import interp1d | ||||||
| from scipy.optimize import curve_fit | from scipy.optimize import curve_fit | ||||||
|  |  | ||||||
| from .logging import logger | from .logging_util import logger | ||||||
| from .functions import kww, kww_1e | from .functions import kww, kww_1e | ||||||
|  |  | ||||||
|  |  | ||||||
|   | |||||||
| @@ -13,42 +13,24 @@ def trajectory(request): | |||||||
|  |  | ||||||
|  |  | ||||||
| def test_get_fel(trajectory): | def test_get_fel(trajectory): | ||||||
|     test_array = np.array( |     test_array = np.array([210., 214., 209., 192., 200., 193., 230., 218., 266.]) | ||||||
|         [ |  | ||||||
|             174.46253634, |  | ||||||
|             174.60905476, |  | ||||||
|             178.57658092, |  | ||||||
|             182.43001192, |  | ||||||
|             180.57916378, |  | ||||||
|             176.49886217, |  | ||||||
|             178.96018547, |  | ||||||
|             181.13561782, |  | ||||||
|             178.31026314, |  | ||||||
|             176.08903996, |  | ||||||
|             180.71215345, |  | ||||||
|             181.59703135, |  | ||||||
|             180.34329368, |  | ||||||
|             187.02474488, |  | ||||||
|             197.99167477, |  | ||||||
|             214.05788031, |  | ||||||
|             245.58571282, |  | ||||||
|             287.52457507, |  | ||||||
|             331.53492965, |  | ||||||
|         ] |  | ||||||
|     ) |  | ||||||
|  |  | ||||||
|     OW = trajectory.subset(atom_name="OW") |     OW = trajectory.subset(atom_name="OW") | ||||||
|  |     box = trajectory[0].box | ||||||
|     box = np.diag(trajectory[0].box) |     box_voxels = (np.diag(box) // [0.05, 0.05, 0.05] + [1, 1, 1]) * [0.05, 0.05, 0.05] | ||||||
|     box_voxels = (box // [0.05, 0.05, 0.05] + [1, 1, 1]) * [0.05, 0.05, 0.05] |     occupation_matrix = fel.occupation_matrix(OW, skip=0, segments=10) | ||||||
|     occupation_matrix = fel.occupation_matrix(OW, skip=0, segments=1000) |     radius_maxima = 0.05 * 3 ** (1 / 2) + 0.05 / 100 | ||||||
|     maxima_matrix = fel.find_maxima(occupation_matrix, box=box_voxels, edge_length=0.05) |     maxima_matrix = fel.find_maxima( | ||||||
|     maxima_matrix = fel.add_distances(maxima_matrix, "cylindrical", box / 2) |         occupation_matrix, | ||||||
|     r_bins = np.arange(0, 2, 0.02) |         box=box_voxels, | ||||||
|     distance_bins = np.arange(0.05, 2.05, 0.1) |         radius=radius_maxima, | ||||||
|  |         pore_geometry="cylindrical", | ||||||
|  |     ) | ||||||
|  |     maxima_matrix = fel.add_distances(maxima_matrix, "cylindrical", np.diag(box) / 2) | ||||||
|  |     r_bins = np.arange(0, 0.5, 0.02) | ||||||
|  |     distance_bins = np.arange(1.8, 1.9, 0.01) | ||||||
|     energy_df = fel.distance_resolved_energies( |     energy_df = fel.distance_resolved_energies( | ||||||
|         maxima_matrix, distance_bins, r_bins, box, 225 |         maxima_matrix, distance_bins, r_bins, box, "cylindrical", 225 | ||||||
|     ) |     ) | ||||||
|     result = fel.find_energy_maxima(energy_df, r_min=0.05, r_max=0.15) |     result = fel.find_energy_maxima(energy_df, r_min=0.05, r_max=0.15) | ||||||
|  |  | ||||||
|     assert (np.round(np.array(result["energy"])) == np.round(test_array)).all() |     assert (np.round(np.array(result["energy"])) == np.round(test_array)).all() | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user