Compare commits
	
		
			30 Commits
		
	
	
		
			33c4756e34
			...
			feature/co
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|  | 6b7641f152 | ||
|  | cd7097ad46 | ||
|  | a0ca2d8657 | ||
|  | 9ff3badab1 | ||
|  | 0f47475f22 | ||
|  | f6ff7606ad | ||
|  | 96c624efee | ||
|  | 492098fe01 | ||
|  | accb43d7e6 | ||
|  | 07b14a6cd6 | ||
|  | e124506d10 | ||
|  | 8169e76964 | ||
| 65ac6e9143 | |||
|  | 9f6af2af11 | ||
|  | 0ffce2f17a | ||
|  | 0eff84910b | ||
|  | dae2d6ed95 | ||
|  | ec4094cd92 | ||
|  | 4047db209c | ||
|  | 00043637e9 | ||
|  | 7585e598dc | ||
|  | 6d8b86c1ef | ||
|  | a2a0ae8d7b | ||
| 90bd90a608 | |||
|  | 67d3e70a66 | ||
| c09549902a | |||
| b7bb8cb379 | |||
| 7b9f8b6773 | |||
| 31eb145a13 | |||
| b5395098ce | 
| @@ -16,7 +16,7 @@ from . import reader | ||||
| from . import system | ||||
| from . import utils | ||||
| from . import extra | ||||
| from .logging import logger | ||||
| from .logging_util import logger | ||||
|  | ||||
|  | ||||
| def open( | ||||
|   | ||||
| @@ -5,7 +5,7 @@ from typing import Optional, Callable, Iterable | ||||
|  | ||||
| import numpy as np | ||||
| from .checksum import checksum | ||||
| from .logging import logger | ||||
| from .logging_util import logger | ||||
|  | ||||
| autosave_directory: Optional[str] = None | ||||
| load_autosave_data = False | ||||
| @@ -166,8 +166,10 @@ def autosave_data( | ||||
|         @functools.wraps(function) | ||||
|         def autosave(*args, **kwargs): | ||||
|             description = kwargs.pop("description", "") | ||||
|             autosave_dir_overwrite = kwargs.pop("autosave_dir_overwrite", None) | ||||
|             autosave_dir = autosave_dir_overwrite if autosave_dir_overwrite is not None else autosave_directory | ||||
|             autoload = kwargs.pop("autoload", True) and load_autosave_data | ||||
|             if autosave_directory is not None: | ||||
|             if autosave_dir is not None: | ||||
|                 relevant_args = list(args[:nargs]) | ||||
|                 if kwargs_keys is not None: | ||||
|                     for key in [*posargs_keys, *kwargs_keys]: | ||||
|   | ||||
| @@ -1,9 +1,14 @@ | ||||
| import functools | ||||
| import hashlib | ||||
| from .logging import logger | ||||
| from .logging_util import logger | ||||
| from types import ModuleType, FunctionType | ||||
| import inspect | ||||
| from typing import Iterable | ||||
| import ast | ||||
| import io | ||||
| import tokenize | ||||
| import re | ||||
| import textwrap | ||||
|  | ||||
| import numpy as np | ||||
|  | ||||
| @@ -28,19 +33,46 @@ def version(version_nr: int, calls: Iterable = ()): | ||||
|     return decorator | ||||
|  | ||||
|  | ||||
| def strip_comments(s: str): | ||||
|     """Strips comment lines and docstring from Python source string.""" | ||||
|     o = "" | ||||
|     in_docstring = False | ||||
|     for l in s.split("\n"): | ||||
|         if l.strip().startswith(("#", '"', "'")) or in_docstring: | ||||
|             in_docstring = l.strip().startswith(('"""', "'''")) + in_docstring == 1 | ||||
| def strip_comments(source: str) -> str: | ||||
|     """Removes docstrings, comments, and irrelevant whitespace from Python source code.""" | ||||
|  | ||||
|     # Step 1: Remove docstrings using AST | ||||
|     def remove_docstrings(node): | ||||
|         if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef, ast.ClassDef, ast.Module)): | ||||
|             if (doc := ast.get_docstring(node, clean=False)): | ||||
|                 first_stmt = node.body[0] | ||||
|                 if isinstance(first_stmt, ast.Expr) and isinstance(first_stmt.value, ast.Constant): | ||||
|                     node.body.pop(0)  # Remove the docstring entirely | ||||
|         for child in ast.iter_child_nodes(node): | ||||
|             remove_docstrings(child) | ||||
|  | ||||
|     tree = ast.parse(textwrap.dedent(source)) | ||||
|     remove_docstrings(tree) | ||||
|     code_without_docstrings = ast.unparse(tree) | ||||
|  | ||||
|     # Step 2: Remove comments using tokenize | ||||
|     tokens = tokenize.generate_tokens(io.StringIO(code_without_docstrings).readline) | ||||
|     result = [] | ||||
|     last_lineno = -1 | ||||
|     last_col = 0 | ||||
|  | ||||
|     for toknum, tokval, (srow, scol), (erow, ecol), line in tokens: | ||||
|         if toknum == tokenize.COMMENT: | ||||
|             continue | ||||
|         o += l + "\n" | ||||
|     return o | ||||
|         if srow > last_lineno: | ||||
|             last_col = 0 | ||||
|         if scol > last_col: | ||||
|             result.append(" " * (scol - last_col)) | ||||
|         result.append(tokval) | ||||
|         last_lineno, last_col = erow, ecol | ||||
|  | ||||
|     code_no_comments = ''.join(result) | ||||
|  | ||||
|     # Step 3: Remove empty lines (whitespace-only or truly blank) | ||||
|     return "\n".join([line for line in code_no_comments.splitlines() if line.strip() != ""]) | ||||
|  | ||||
|  | ||||
| def checksum(*args, csum=None): | ||||
| def checksum(*args, csum=None, _seen=None): | ||||
|     """ | ||||
|     Calculate a checksum of any object, by sha1 hash. | ||||
|  | ||||
| @@ -60,7 +92,15 @@ def checksum(*args, csum=None): | ||||
|         csum = hashlib.sha1() | ||||
|         csum.update(str(SALT).encode()) | ||||
|  | ||||
|     if _seen is None: | ||||
|         _seen = set() | ||||
|  | ||||
|     for arg in args: | ||||
|         obj_id = id(arg) | ||||
|         if obj_id in _seen: | ||||
|             continue | ||||
|         _seen.add(obj_id) | ||||
|  | ||||
|         if hasattr(arg, "__checksum__"): | ||||
|             logger.debug("Checksum via __checksum__: %s", str(arg)) | ||||
|             csum.update(str(arg.__checksum__()).encode()) | ||||
| @@ -73,17 +113,19 @@ def checksum(*args, csum=None): | ||||
|         elif isinstance(arg, FunctionType): | ||||
|             csum.update(strip_comments(inspect.getsource(arg)).encode()) | ||||
|             c = inspect.getclosurevars(arg) | ||||
|             for v in {**c.nonlocals, **c.globals}.values(): | ||||
|             merged = {**c.nonlocals, **c.globals} | ||||
|             for key in sorted(merged):  # deterministic ordering | ||||
|                 v = merged[key] | ||||
|                 if v is not arg: | ||||
|                     checksum(v, csum=csum) | ||||
|                     checksum(v, csum=csum, _seen=_seen) | ||||
|         elif isinstance(arg, functools.partial): | ||||
|             logger.debug("Checksum via partial for %s", str(arg)) | ||||
|             checksum(arg.func, csum=csum) | ||||
|             checksum(arg.func, csum=csum, _seen=_seen) | ||||
|             for x in arg.args: | ||||
|                 checksum(x, csum=csum) | ||||
|                 checksum(x, csum=csum, _seen=_seen) | ||||
|             for k in sorted(arg.keywords.keys()): | ||||
|                 csum.update(k.encode()) | ||||
|                 checksum(arg.keywords[k], csum=csum) | ||||
|                 checksum(arg.keywords[k], csum=csum, _seen=_seen) | ||||
|         elif isinstance(arg, np.ndarray): | ||||
|             csum.update(arg.tobytes()) | ||||
|         else: | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| from functools import partial, wraps | ||||
| from copy import copy | ||||
| from .logging import logger | ||||
| from .logging_util import logger | ||||
| from typing import Optional, Callable, List, Tuple | ||||
|  | ||||
| import numpy as np | ||||
| @@ -434,6 +434,34 @@ def center_of_masses( | ||||
|         ] | ||||
|     ).T[mask] | ||||
|     return np.array(positions) | ||||
|      | ||||
|      | ||||
| @map_coordinates | ||||
| def center_of_atoms( | ||||
|     frame: CoordinateFrame, atom_indices=None, shear: bool = False | ||||
| ) -> NDArray: | ||||
|     if atom_indices is None: | ||||
|         atom_indices = list(range(len(frame))) | ||||
|     res_ids = frame.residue_ids[atom_indices] | ||||
|     if shear: | ||||
|         coords = frame[atom_indices] | ||||
|         box = frame.box | ||||
|         sort_ind = res_ids.argsort(kind="stable") | ||||
|         i = np.concatenate([[0], np.where(np.diff(res_ids[sort_ind]) > 0)[0] + 1]) | ||||
|         coms = coords[sort_ind[i]][res_ids - min(res_ids)] | ||||
|         cor = pbc_diff(coords, coms, box) | ||||
|         coords = coms + cor | ||||
|     else: | ||||
|         coords = frame.whole[atom_indices] | ||||
|     mask = np.bincount(res_ids)[1:] != 0 | ||||
|     positions = np.array( | ||||
|         [ | ||||
|             np.bincount(res_ids, weights=c)[1:] | ||||
|             / np.bincount(res_ids)[1:] | ||||
|             for c in coords.T | ||||
|         ] | ||||
|     ).T[mask] | ||||
|     return np.array(positions) | ||||
|  | ||||
|  | ||||
| @map_coordinates | ||||
|   | ||||
| @@ -18,7 +18,7 @@ def log_indices(first: int, last: int, num: int = 100) -> np.ndarray: | ||||
|     return np.unique(np.int_(ls) - 1 + first) | ||||
|  | ||||
|  | ||||
| @autosave_data(2) | ||||
| @autosave_data(nargs=2, kwargs_keys=('selector', 'segments', 'skip', 'window', 'average', 'points',), version=1.0) | ||||
| def shifted_correlation( | ||||
|     function: Callable, | ||||
|     frames: Coordinates, | ||||
| @@ -147,7 +147,8 @@ def shifted_correlation( | ||||
|     num_frames = int(len(frames) * window) | ||||
|     ls = np.logspace(0, np.log10(num_frames + 1), num=points) | ||||
|     idx = np.unique(np.int_(ls) - 1) | ||||
|     t = np.array([frames[i].time for i in idx]) - frames[0].time | ||||
|     dt = round(frames[1].time - frames[0].time, 6) # round to avoid bad floats | ||||
|     t = idx * dt | ||||
|  | ||||
|     result = np.array( | ||||
|         [ | ||||
| @@ -199,7 +200,7 @@ def msd( | ||||
|         raise ValueError('Parameter axis has to be ether "all", "x", "y", or "z"!') | ||||
|  | ||||
|  | ||||
| def isf( | ||||
| def isf_raw( | ||||
|     start_frame: CoordinateFrame, | ||||
|     end_frame: CoordinateFrame, | ||||
|     q: float = 22.7, | ||||
| @@ -216,29 +217,59 @@ def isf( | ||||
|         displacements = displacements_without_drift(start_frame, end_frame, trajectory) | ||||
|     if axis == "all": | ||||
|         distance = (displacements**2).sum(axis=1) ** 0.5 | ||||
|         return np.sinc(distance * q / np.pi).mean() | ||||
|         return np.sinc(distance * q / np.pi) | ||||
|     elif axis == "xy" or axis == "yx": | ||||
|         distance = (displacements[:, [0, 1]]**2).sum(axis=1) ** 0.5 | ||||
|         return np.real(jn(0, distance * q)).mean() | ||||
|         return np.real(jn(0, distance * q)) | ||||
|     elif axis == "xz" or axis == "zx": | ||||
|         distance = (displacements[:, [0, 2]]**2).sum(axis=1) ** 0.5 | ||||
|         return np.real(jn(0, distance * q)).mean() | ||||
|         return np.real(jn(0, distance * q)) | ||||
|     elif axis == "yz" or axis == "zy": | ||||
|         distance = (displacements[:, [1, 2]]**2).sum(axis=1) ** 0.5 | ||||
|         return np.real(jn(0, distance * q)).mean() | ||||
|         return np.real(jn(0, distance * q)) | ||||
|     elif axis == "x": | ||||
|         distance = np.abs(displacements[:, 0]) | ||||
|         return np.mean(np.cos(np.abs(q * distance))) | ||||
|         return np.cos(np.abs(q * distance)) | ||||
|     elif axis == "y": | ||||
|         distance = np.abs(displacements[:, 1]) | ||||
|         return np.mean(np.cos(np.abs(q * distance))) | ||||
|         return np.cos(np.abs(q * distance)) | ||||
|     elif axis == "z": | ||||
|         distance = np.abs(displacements[:, 2]) | ||||
|         return np.mean(np.cos(np.abs(q * distance))) | ||||
|         return np.cos(np.abs(q * distance)) | ||||
|     else: | ||||
|         raise ValueError('Parameter axis has to be ether "all", "x", "y", or "z"!') | ||||
|  | ||||
|  | ||||
| def isf( | ||||
|     start_frame: CoordinateFrame, | ||||
|     end_frame: CoordinateFrame, | ||||
|     q: float = 22.7, | ||||
|     trajectory: Coordinates = None, | ||||
|     axis: str = "all", | ||||
| ) -> float: | ||||
|     """ | ||||
|     Incoherent intermediate scattering function averaged over all particles. | ||||
|     See isf_raw for details. | ||||
|     """ | ||||
|     return isf_raw(start_frame, end_frame, q=q, trajectory=trajectory, axis=axis).mean() | ||||
|  | ||||
|  | ||||
| def isf_mean_var( | ||||
|     start_frame: CoordinateFrame, | ||||
|     end_frame: CoordinateFrame, | ||||
|     q: float = 22.7, | ||||
|     trajectory: Coordinates = None, | ||||
|     axis: str = "all", | ||||
| ) -> float: | ||||
|     """ | ||||
|     Incoherent intermediate scattering function averaged over all particles and the | ||||
|     variance. | ||||
|     See isf_raw for details. | ||||
|     """ | ||||
|     values = isf_raw(start_frame, end_frame, q=q, trajectory=trajectory, axis=axis) | ||||
|     return values.mean(), values.var() | ||||
|  | ||||
|  | ||||
| def rotational_autocorrelation( | ||||
|     start_frame: CoordinateFrame, end_frame: CoordinateFrame, order: int = 2 | ||||
| ) -> float: | ||||
| @@ -430,10 +461,11 @@ def non_gaussian_parameter( | ||||
|     end_frame: CoordinateFrame, | ||||
|     trajectory: Coordinates = None, | ||||
|     axis: str = "all", | ||||
|     full_output = False, | ||||
| ) -> float: | ||||
|     """ | ||||
|     r""" | ||||
|     Calculate the non-Gaussian parameter. | ||||
|     ..math: | ||||
|     .. math: | ||||
|       \alpha_2 (t) = | ||||
|         \frac{3}{5}\frac{\langle r_i^4(t)\rangle}{\langle r_i^2(t)\rangle^2} - 1 | ||||
|     """ | ||||
| @@ -442,27 +474,41 @@ def non_gaussian_parameter( | ||||
|     else: | ||||
|         vectors = displacements_without_drift(start_frame, end_frame, trajectory) | ||||
|     if axis == "all": | ||||
|         r = (vectors**2).sum(axis=1) | ||||
|         r2 = (vectors**2).sum(axis=1) | ||||
|         dimensions = 3 | ||||
|     elif axis == "xy" or axis == "yx": | ||||
|         r = (vectors[:, [0, 1]]**2).sum(axis=1) | ||||
|         r2 = (vectors[:, [0, 1]]**2).sum(axis=1) | ||||
|         dimensions = 2 | ||||
|     elif axis == "xz" or axis == "zx": | ||||
|         r = (vectors[:, [0, 2]]**2).sum(axis=1) | ||||
|         r2 = (vectors[:, [0, 2]]**2).sum(axis=1) | ||||
|         dimensions = 2 | ||||
|     elif axis == "yz" or axis == "zy": | ||||
|         r = (vectors[:, [1, 2]]**2).sum(axis=1) | ||||
|         r2 = (vectors[:, [1, 2]]**2).sum(axis=1) | ||||
|         dimensions = 2 | ||||
|     elif axis == "x": | ||||
|         r = vectors[:, 0] ** 2 | ||||
|         r2 = vectors[:, 0] ** 2 | ||||
|         dimensions = 1 | ||||
|     elif axis == "y": | ||||
|         r = vectors[:, 1] ** 2 | ||||
|         r2 = vectors[:, 1] ** 2 | ||||
|         dimensions = 1 | ||||
|     elif axis == "z": | ||||
|         r = vectors[:, 2] ** 2 | ||||
|         r2 = vectors[:, 2] ** 2 | ||||
|         dimensions = 1 | ||||
|     else: | ||||
|         raise ValueError('Parameter axis has to be ether "all", "x", "y", or "z"!') | ||||
|      | ||||
|     m2 = np.mean(r2) | ||||
|     m4 = np.mean(r2**2) | ||||
|     if m2 == 0.0: | ||||
|         if full_output: | ||||
|             return 0.0, 0.0, 0.0 | ||||
|         else: | ||||
|             return 0.0 | ||||
|  | ||||
|     alpha_2 = (m4 / ((1 + 2 / dimensions) * m2**2)) - 1 | ||||
|     if full_output: | ||||
|         return alpha_2, m2, m4 | ||||
|     else: | ||||
|         return alpha_2 | ||||
|  | ||||
|  | ||||
|     return (np.mean(r**2) / ((1 + 2 / dimensions) * (np.mean(r) ** 2))) - 1 | ||||
|   | ||||
| @@ -182,10 +182,10 @@ def tetrahedral_order( | ||||
|     ) | ||||
|  | ||||
|     # Connection vectors | ||||
|     neighbors_1 -= atoms | ||||
|     neighbors_2 -= atoms | ||||
|     neighbors_3 -= atoms | ||||
|     neighbors_4 -= atoms | ||||
|     neighbors_1 = pbc_diff(neighbors_1, atoms, box=atoms.box) | ||||
|     neighbors_2 = pbc_diff(neighbors_2, atoms, box=atoms.box) | ||||
|     neighbors_3 = pbc_diff(neighbors_3, atoms, box=atoms.box) | ||||
|     neighbors_4 = pbc_diff(neighbors_4, atoms, box=atoms.box) | ||||
|  | ||||
|     # Normed Connection vectors | ||||
|     neighbors_1 /= np.linalg.norm(neighbors_1, axis=-1).reshape(-1, 1) | ||||
|   | ||||
| @@ -4,7 +4,6 @@ from typing import Optional | ||||
| import numpy as np | ||||
| from numpy.typing import ArrayLike, NDArray | ||||
| from numpy.polynomial.polynomial import Polynomial as Poly | ||||
| import math | ||||
| from scipy.spatial import KDTree | ||||
| import pandas as pd | ||||
| import multiprocessing as mp | ||||
| @@ -49,7 +48,7 @@ def _pbc_points_reduced( | ||||
|  | ||||
| def _build_tree(points, box, r_max, pore_geometry): | ||||
|     if np.all(np.diag(np.diag(box)) == box): | ||||
|         tree = KDTree(points, boxsize=box) | ||||
|         tree = KDTree(points % box, boxsize=box) | ||||
|         points_pbc_index = None | ||||
|     else: | ||||
|         points_pbc, points_pbc_index = _pbc_points_reduced( | ||||
| @@ -79,8 +78,7 @@ def occupation_matrix( | ||||
|     z_bins = np.arange(0, box[2][2] + edge_length, edge_length) | ||||
|     bins = [x_bins, y_bins, z_bins] | ||||
|     # Trajectory is split for parallel computing | ||||
|     size = math.ceil(len(frame_indices) / nodes) | ||||
|     indices = [frame_indices[i : i + size] for i in range(0, len(frame_indices), size)] | ||||
|     indices = np.array_split(frame_indices, nodes) | ||||
|     pool = mp.Pool(nodes) | ||||
|     results = pool.map( | ||||
|         partial(_calc_histogram, trajectory=trajectory, bins=bins), indices | ||||
| @@ -274,7 +272,11 @@ def distance_resolved_energies( | ||||
|  | ||||
|  | ||||
| def find_energy_maxima( | ||||
|     energy_df: pd.DataFrame, r_min: float, r_max: float | ||||
|     energy_df: pd.DataFrame, | ||||
|     r_min: float, | ||||
|     r_max: float, | ||||
|     r_eval: float = None, | ||||
|     degree: int = 2, | ||||
| ) -> pd.DataFrame: | ||||
|     distances = [] | ||||
|     energies = [] | ||||
| @@ -283,6 +285,9 @@ def find_energy_maxima( | ||||
|         x = np.array(data_d["r"]) | ||||
|         y = np.array(data_d["energy"]) | ||||
|         mask = (x >= r_min) * (x <= r_max) | ||||
|         p3 = Poly.fit(x[mask], y[mask], deg=2) | ||||
|         energies.append(np.max(p3(np.linspace(r_min, r_max, 1000)))) | ||||
|         p3 = Poly.fit(x[mask], y[mask], deg=degree) | ||||
|         if r_eval is None: | ||||
|             energies.append(np.max(p3(np.linspace(r_min, r_max, 1000)))) | ||||
|         else: | ||||
|             energies.append(p3(r_eval)) | ||||
|     return pd.DataFrame({"d": distances, "energy": energies}) | ||||
|   | ||||
| @@ -7,7 +7,7 @@ from numpy.typing import ArrayLike, NDArray | ||||
|  | ||||
| from itertools import product | ||||
|  | ||||
| from .logging import logger | ||||
| from .logging_util import logger | ||||
|  | ||||
| if TYPE_CHECKING: | ||||
|     from mdevaluate.coordinates import CoordinateFrame | ||||
| @@ -149,32 +149,21 @@ def nojump(frame: CoordinateFrame, usecache: bool = True) -> CoordinateFrame: | ||||
|             i0 = 0 | ||||
|             delta = 0 | ||||
|  | ||||
|         delta = ( | ||||
|             delta | ||||
|             + np.array( | ||||
|                 np.vstack( | ||||
|                     [m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices] | ||||
|                 ).T | ||||
|             ) | ||||
|             @ frame.box | ||||
|         ) | ||||
|         delta = (delta | ||||
|             + np.vstack( | ||||
|                 [m[i0 : abstep + 1].sum(axis=0) for m in reader.nojump_matrices] | ||||
|             ).T) | ||||
|  | ||||
|         reader._nojump_cache[abstep] = delta | ||||
|         while len(reader._nojump_cache) > NOJUMP_CACHESIZE: | ||||
|             reader._nojump_cache.popitem(last=False) | ||||
|         delta = delta[selection, :] | ||||
|     else: | ||||
|         delta = ( | ||||
|             np.array( | ||||
|                 np.vstack( | ||||
|                     [ | ||||
|                         m[: frame.step + 1, selection].sum(axis=0) | ||||
|                         for m in reader.nojump_matrices | ||||
|                     ] | ||||
|         delta = np.vstack( | ||||
|                 [m[: frame.step + 1, selection].sum(axis=0) for m in reader.nojump_matrices] | ||||
|                 ).T | ||||
|             ) | ||||
|             @ frame.box | ||||
|         ) | ||||
|      | ||||
|     delta = delta[selection, :] | ||||
|     delta = np.array(delta @ frame.box) | ||||
|     return frame - delta | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -19,13 +19,13 @@ import MDAnalysis | ||||
| from scipy import sparse | ||||
|  | ||||
| from .checksum import checksum | ||||
| from .logging import logger | ||||
| from .logging_util import logger | ||||
| from . import atoms | ||||
| from .coordinates import Coordinates | ||||
|  | ||||
| CSR_ATTRS = ("data", "indices", "indptr") | ||||
| NOJUMP_MAGIC = 2016 | ||||
| Group_RE = re.compile("\[ ([-+\w]+) \]") | ||||
| Group_RE = re.compile(r"\[ ([-+\w]+) \]") | ||||
|  | ||||
|  | ||||
| class NojumpError(Exception): | ||||
| @@ -275,7 +275,7 @@ def load_nojump_matrices(reader: BaseReader): | ||||
|                 "Loaded Nojump matrices: {}".format(nojump_load_filename(reader)) | ||||
|             ) | ||||
|         else: | ||||
|             logger.info("Invlaid Nojump Data: {}".format(nojump_load_filename(reader))) | ||||
|             logger.info("Invalid Nojump Data: {}".format(nojump_load_filename(reader))) | ||||
|     except KeyError: | ||||
|         logger.info("Removing zip-File: %s", zipname) | ||||
|         os.remove(nojump_load_filename(reader)) | ||||
|   | ||||
| @@ -14,7 +14,7 @@ from scipy.ndimage import uniform_filter1d | ||||
| from scipy.interpolate import interp1d | ||||
| from scipy.optimize import curve_fit | ||||
|  | ||||
| from .logging import logger | ||||
| from .logging_util import logger | ||||
| from .functions import kww, kww_1e | ||||
|  | ||||
|  | ||||
| @@ -357,6 +357,37 @@ def quick1etau(t: ArrayLike, C: ArrayLike, n: int = 7) -> float: | ||||
|     return tau_est | ||||
|  | ||||
|  | ||||
| def quicknongaussfit(t, C, width=2): | ||||
|     """ | ||||
|     Estimates the time and height of the peak in the non-Gaussian function. | ||||
|     C is C(t) the correlation function | ||||
|     """ | ||||
|     def ffunc(t,y0,A_main,log_tau_main,sig_main): | ||||
|         main_peak = A_main*np.exp(-(t - log_tau_main)**2 / (2 * sig_main**2)) | ||||
|         return y0 + main_peak | ||||
|      | ||||
|     # first rough estimate, the closest time. This is returned if the interpolation fails! | ||||
|     tau_est = t[np.argmax(C)] | ||||
|     nG_max = np.amax(C) | ||||
|     try: | ||||
|         with np.errstate(invalid='ignore'): | ||||
|             corr = C[t > 0] | ||||
|             time = np.log10(t[t > 0]) | ||||
|             tau = time[np.argmax(corr)] | ||||
|             mask = (time>tau-width/2) & (time<tau+width/2) | ||||
|             time = time[mask] ; corr = corr[mask] | ||||
|             nG_min = C[t > 0].min() | ||||
|             guess = [nG_min, nG_max-nG_min, tau, 0.6] | ||||
|             popt = curve_fit(ffunc, time, corr, p0=guess, maxfev=10000)[0] | ||||
|             tau_est = 10**popt[-2] | ||||
|             nG_max = popt[0] + popt[1] | ||||
|     except: | ||||
|         pass | ||||
|     if np.isnan(tau_est): | ||||
|         tau_est = np.inf | ||||
|     return tau_est, nG_max | ||||
|  | ||||
|  | ||||
| def susceptibility( | ||||
|     time: NDArray, correlation: NDArray, **kwargs | ||||
| ) -> tuple[NDArray, NDArray]: | ||||
|   | ||||
		Reference in New Issue
	
	Block a user