Skip to content

Commit

Permalink
Merge pull request #10 from glotzerlab/fix/tests
Browse files Browse the repository at this point in the history
Fix/tests
  • Loading branch information
b-butler authored Jan 16, 2024
2 parents b9f5045 + c91c87b commit 3a0afa7
Show file tree
Hide file tree
Showing 32 changed files with 295 additions and 204 deletions.
24 changes: 11 additions & 13 deletions .github/workflows/ci-oldest-reqs.txt
Original file line number Diff line number Diff line change
@@ -1,13 +1,11 @@
click==7.1.2
coverage==5.3.1
filelock==3.0.0
h5py==2.10.0
numpy==1.19.0
packaging==15.0
pandas==1.0.0
pymongo==3.10.0
pytest==6.2.1
pytest-cov==2.10.1
pytest-xdist==2.5.0
tables==3.6.1
tqdm==4.10.0
bottleneck == 1.3.4
freud-analysis == 2.7.0
coverage[toml] == 6.2
kneed == 0.8.2
numpy == 1.21.0
ruptures == 1.1.5
scikit-learn == 0.24.0
pandas == 1.1.3
hypothesis
pytest == 6.2.1
pytest-cov[toml] == 2.12.1
6 changes: 6 additions & 0 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,12 @@ def seed():
return seed


@pytest.fixture(autouse=True, scope="session")
def _np_seed():
"""Set the NumPy random seed for external code."""
np.random.seed(3857834) # noqa: NPY002


@pytest.fixture(scope="session")
def rng():
"""A random number generator for tests that have need for random numbers."""
Expand Down
4 changes: 4 additions & 0 deletions dupin/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,7 @@
__author__ = """Brandon Butler"""
__email__ = "[email protected]"
__version__ = "0.0.1"

from . import data, detect, errors, postprocessing, preprocessing

__all__ = ("data", "detect", "errors", "postprocessing", "preprocessing")
13 changes: 13 additions & 0 deletions dupin/data/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,3 +59,16 @@ def generator():
pass
"""
from . import aggregate, base, freud, logging, map, reduce, spatial
from .base import make_generator

__all__ = (
"aggregate",
"base",
"freud",
"logging",
"map",
"reduce",
"spatial",
"make_generator",
)
8 changes: 4 additions & 4 deletions dupin/data/aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,12 @@
"""


from typing import Any, Dict, Iterator, Optional, Tuple
from collections.abc import Iterator
from typing import Any, Optional

import numpy as np

from dupin import errors

from .. import errors
from . import base, logging

try:
Expand Down Expand Up @@ -58,7 +58,7 @@ def __init__(
self.logger = logger

def compute(
self, iterator: Iterator[Tuple[Tuple[Any, ...], Dict[str, Any]]]
self, iterator: Iterator[tuple[tuple[Any, ...], dict[str, Any]]]
):
"""Compute signals from generator across the iterator.
Expand Down
19 changes: 7 additions & 12 deletions dupin/data/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import typing
from abc import abstractmethod
from collections.abc import Callable, Sequence
from typing import Any, Dict, Optional, Union
from typing import Any, Optional, Union

import numpy as np
import numpy.typing as npt
Expand All @@ -12,7 +12,7 @@
GeneratorLike = Union[
"Generator",
"DataMap",
typing.Callable[..., Dict[str, Union[float, npt.ArrayLike]]],
typing.Callable[..., dict[str, Union[float, npt.ArrayLike]]],
]
GeneratorLike.__doc__ = """
A type hint for objects that act like data generators for dupin.
Expand Down Expand Up @@ -213,7 +213,7 @@ class DataReducer(DataModifier):
"""

@abstractmethod
def compute(self, distribution: npt.ArrayLike) -> Dict[str, float]:
def compute(self, distribution: npt.ArrayLike) -> dict[str, float]:
"""Turn a distribution into scalar features.
Parameters
Expand Down Expand Up @@ -284,7 +284,7 @@ class Generator(Callable, PipeComponent):
@abstractmethod
def __call__(
self, *args, **kwargs
) -> Dict[str, Union[float, npt.ArrayLike]]:
) -> dict[str, Union[float, npt.ArrayLike]]:
"""Return the output signal(s) for given inputs.
This method can have an arbitrary signature in subclasses.
Expand Down Expand Up @@ -318,8 +318,6 @@ class CustomMap(DataMap):
Parameters
----------
generator : :py:obj:`~.GeneratorLike`
A generator like object to transform.
custom_function : ``callable`` [`numpy.ndarray`, `dict` ]
A custom callable that takes in a NumPy array and returns a dictionary
with keys indicating the tranformation and values the transformed
Expand All @@ -333,12 +331,11 @@ class CustomMap(DataMap):

def __init__(
self,
generator: GeneratorLike,
custom_function: typing.Callable[
[npt.ArrayLike], Dict[str, np.ndarray]
[npt.ArrayLike], dict[str, np.ndarray]
],
):
super().__init__(generator)
super().__init__()
self.function = custom_function

def compute(self, data: npt.ArrayLike) -> npt.ArrayLike:
Expand All @@ -351,8 +348,6 @@ class CustomReducer(DataReducer):
Parameters
----------
generator: :py:obj:`~.GeneratorLike`
A generator like object to reduce.
custom_function: ``callable`` [`numpy.ndarray`, `dict` [`str`, `float` ]
A custom callable that takes in a NumPy array and returns a
dictionary with keys indicating the reduction and values the reduced
Expand All @@ -366,7 +361,7 @@ class CustomReducer(DataReducer):

def __init__(
self,
custom_function: typing.Callable[[npt.ArrayLike], Dict[str, float]],
custom_function: typing.Callable[[npt.ArrayLike], dict[str, float]],
):
super().__init__()
self.function = custom_function
Expand Down
6 changes: 3 additions & 3 deletions dupin/data/freud.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"""

from collections.abc import Sequence
from typing import Any, Dict, List, Union
from typing import Any, Union

import numpy.typing as npt

Expand Down Expand Up @@ -70,7 +70,7 @@ class FreudDescriptor(base.Generator):
def __init__(
self,
compute: "freud.util._Compute",
attrs: Union[str, List[str], Dict[str, str]],
attrs: Union[str, list[str], dict[str, str]],
compute_method: str = "compute",
) -> None:
if not hasattr(compute, compute_method):
Expand All @@ -86,7 +86,7 @@ def __init__(

def __call__(
self, *args: Any, **kwargs: Any
) -> Dict[str, Union[float, npt.ArrayLike]]:
) -> dict[str, Union[float, npt.ArrayLike]]:
"""Return computed attributes specified in a dictionary.
The keys of the dictionary are the attributes specified, unless a `dict`
Expand Down
18 changes: 12 additions & 6 deletions dupin/data/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

import numpy as np

from dupin import errors
from .. import errors

try:
import pandas as pd
Expand Down Expand Up @@ -71,8 +71,7 @@ def end_frame(self):
self._current_frame.setdefault(
self._context_key, self._current_context
)
if self._current_frame:
self._data.append(self._current_frame)
self._data.append(self._current_frame)
self._reset()

def _reset(self):
Expand Down Expand Up @@ -104,19 +103,26 @@ def to_dataframe(self):
-------
This only works for floating point logged values.
"""
# TODO: Extend to other dtypes?
if len(self._data) == 0:
frame_data = self._first_non_empty(self._data)
if frame_data is None:
return pd.DataFrame()
frame_data = self._data[0]
column_index = pd.MultiIndex.from_tuples(
_create_column_index(frame_data)
)
# TODO: Extend to other dtypes?
data_arr = _log_data_to_array(
self._data,
np.empty((len(self._data), len(column_index)), dtype=float),
)
return pd.DataFrame(data_arr, columns=column_index)

@staticmethod
def _first_non_empty(data: list[dict]):
for d in data:
if len(d) > 0:
return d
return None


def _create_column_index(log_data):
"""Yield tuples of keys for creating a multi-index from a nested dict."""
Expand Down
9 changes: 6 additions & 3 deletions dupin/data/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
map can be found in `dupin.data.spatial.NeighborAveraging`.
"""

from typing import Dict, List, Union
from typing import Union

import numpy as np
import numpy.typing as npt
Expand Down Expand Up @@ -72,14 +72,17 @@ class Tee(base.DataMap):

def __init__(
self,
maps: List[base.DataMap],
maps: list[base.DataMap],
):
if len(maps) == 0:
msg = "Cannot have empty maps sequence."
raise ValueError(msg)
self._maps = maps
super().__init__()

def compute(
self, distribution: npt.ArrayLike
) -> Dict[str, Union[float, np.ndarray]]:
) -> dict[str, Union[float, np.ndarray]]:
"""Run all composed map computes."""
processed_data = {}
for map_ in self._maps:
Expand Down
Loading

0 comments on commit 3a0afa7

Please sign in to comment.