diff --git a/.cruft.json b/.cruft.json index 6c44a3f1..7b9f5b51 100644 --- a/.cruft.json +++ b/.cruft.json @@ -1,7 +1,7 @@ { "template": "https://github.com/wpk-nist-gov/cookiecutter-pypackage.git", - "commit": "10a9fe1a4f0b341184e41953433c344020f92c72", - "checkout": "feature/markdown", + "commit": "49d100c749e6cfa2cba3460ed06e4a06be07cc63", + "checkout": "feature/nox", "skip": [ "src/thermoextrap/__init__.py", "docs/examples/usage/demo.ipynb", @@ -14,7 +14,7 @@ "email": "wpk@nist.gov", "github_username": "usnistgov", "pypi_username": "wpk-nist", - "conda_channel": "wpk-nist", + "conda_channel": "conda-forge", "project_name": "thermoextrap", "project_slug": "thermoextrap", "_copy_without_render": [ diff --git a/.gitignore b/.gitignore index 1b15b112..9fcae012 100644 --- a/.gitignore +++ b/.gitignore @@ -38,6 +38,7 @@ pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ +.nox/ .coverage .coverage.* .cache @@ -106,7 +107,10 @@ ENV/ pyrightconfig.json .autoenv.zsh .autoenv_leave.zsh +.noxconfig.toml +cruft.patch /docs/**/generated/ /monkeytype.sqlite3 -/dist-conda/* -!/dist-conda/Makefile +/dist-conda/ +/tmp/ +/thermoextrap-feedstock*/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2366eeb8..ef42b3c5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -52,7 +52,7 @@ repos: - id: blacken-docs additional_dependencies: - black==23.3.0 - exclude: ^README.md + # exclude: ^README.md - repo: https://github.com/nbQA-dev/nbQA rev: 1.7.0 hooks: diff --git a/CHANGELOG.md b/CHANGELOG.md index 67d5833f..33e0e1f5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ + + # Changelog Changelog for `thermoextrap` @@ -9,6 +11,23 @@ See the fragment files in +## v0.4.0 — 2023-06-15 + +### Added + +- Package now available on conda-forge + +- Now support python3.11 +- Bumped pymbar version to pymbar>=4.0 + +### Changed + +- Switched from tox to nox for testing. + +### Deprecated + +- No longer support pymbar < 4.0 + ## v0.3.0 — 2023-05-03 ### Changed diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7d3332b4..574b33b1 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -43,7 +43,7 @@ If you are proposing a feature: - Explain in detail how it would work. - Keep the scope as narrow as possible, to make it easier to implement. - Remember that this is a volunteer-driven project, and that contributions are - welcome :) + welcome! ## Get Started @@ -55,15 +55,17 @@ If you are proposing a feature: [conda-fast-setup]: https://www.anaconda.com/blog/a-faster-conda-for-a-growing-community [pre-commit]: https://pre-commit.com/ +[nox]: https://github.com/wntrblm/nox +[noxopt]: https://github.com/rmorshea/noxopt [tox]: https://tox.wiki/en/latest/ -[tox-conda]: https://github.com/tox-dev/tox-conda [cruft]: https://github.com/cruft/cruft -[conda-merge]: https://github.com/amitbeka/conda-merge +[cog]: https://github.com/nedbat/cog [git-flow]: https://github.com/nvie/gitflow [scriv]: https://github.com/nedbat/scriv [conventional-style]: https://www.conventionalcommits.org/en/v1.0.0/ [commitizen]: https://github.com/commitizen-tools/commitizen [nb_conda_kernels]: https://github.com/Anaconda-Platform/nb_conda_kernels +[pyproject2conda]: https://github.com/wpk-nist-gov/pyproject2conda This project uses a host of tools to (hopefully) make development easier. We recommend installing some of these tools system wide. For this, we recommend @@ -75,27 +77,39 @@ details. Additional tools are: - [pre-commit] -- [tox] and [tox-conda] +- [nox] with [noxopt] - [cruft] -- [conda-merge] - [scriv] +- [commitizen] (optional) +- [pyproject2conda] (optional) +- [cog] (optional) These are setup using the following: ```console -condax install pre-commit -condax install tox -condax inject tox tox-conda -condax install cruft -condax install conda-merge -condax install commitizen +condax/pipx install pre-commit +condax/pipx install cruft +condax/pipx install commitizen # optional pipx install scriv +pipx install pyproject2conda # optional +condax/pipx install cogapp # optional ``` -Alternatively, you can install these dependencies using: +if using pipx, nox can be installed with: -```console -conda env update -n {env-name} environment/tools.yaml +```bash +pipx install nox +pipx inject nox ruamel.yaml +pipx inject nox noxopt +``` + +If using condax, you'll need to use: + +```bash +condax install nox +condax inject nox ruamel.yaml +conda activate ~/.condax/nox +pip install noxopt ``` ### Getting the repo @@ -127,49 +141,32 @@ Ready to contribute? Here's how to set up `thermoextrap` for local development. - Create development environment. There are two options to create the development environment. - - The recommended method is to use tox by using either: + - The recommended method is to use nox. First you'll need to create the + environment files using: ```bash - tox -e dev + nox -e pyproject2conda ``` - or + Then run: ```bash - make dev-env + nox -e dev ``` - These create a development environment located at `.tox/dev`. - - ```bash - make tox-ipykernel-display-name - ``` - - This will add a meaningful display name for the kernel (assuming you're - using [nb_conda_kernels]) + This create a development environment located at `.nox/dev`. - Alternativley, you can create centrally located conda environmentment using the command: ```bash - make mamba-dev + conda/mamba env create -n {env-name} -f environment/dev.yaml ``` - This will create a conda environment 'thermoextrap-env' in the default - location. - - To install (an editable version) of the current package: - ```bash pip install -e . --no-deps ``` - or - - ```bash - make install-dev - ``` - - Initiate [pre-commit] with: ```bash @@ -210,16 +207,10 @@ Ready to contribute? Here's how to set up `thermoextrap` for local development. pytest ``` - To test against multiple python versions, use tox: - - ```bash - tox - ``` - - or using the `make`: + To test against multiple python versions, use [nox]: ```bash - make test-all + nox -s test ``` Additionally, you should run the following: @@ -255,26 +246,15 @@ Ready to contribute? Here's how to set up `thermoextrap` for local development. ### Dependency management -Dependencies need to be placed in a few locations, which depend on the nature of -the dependency. - -- Package dependency: `environment.yaml` and `dependencies` section of - `pyproject.toml` -- Documentation dependency: `environment/docs-extras.yaml` and `test` section of - `pyproject.toml` -- Development dependency: `environment/dev-extras.yaml` and `dev` section of - `pyproject.toml` - -Note that total yaml files are build using [conda-merge]. For example, -`environment.yaml` is combined with `environment/docs-extras.yaml` to produce -`environment/docs.yaml`. This is automated in the `Makefile`. You can also run, -after doing any updates, +We use [pyproject2conda] to handle conda `environment.yaml` files. This extracts +the dependencies from `pyproject.toml`. See [pyproject2conda] for info. To make +the `environment.yaml` files, run: ```bash -make environment-files-build +nox -s pyproject2conda -- [--pyproject2conda-force] ``` -which will rebuild all the needed yaml files. +Where the option in brackets is optional. ## Pull Request Guidelines @@ -286,117 +266,137 @@ Before you submit a pull request, check that it meets these guidelines: list in CHANGELOG.md. You should use [scriv] for this. - The pull request should work for Python 3.8, 3.9, 3.10. -## Building the docs +## ipykernel -We use [tox] to isolate the documentation build. Useful commands are as follows. +The environments created by nox `dev` and `docs` will try to add meaningful +display names for ipykernel (assuming you're using [nb_conda_kernels]) -- Build the docs: +## Building the docs - ```bash - tox -e docs -- build - ``` +We use [nox] to isolate the documentation build. Specific tasks can be run with -- Spellcheck the docs: +```bash +nox -s docs -- -d [commands] +``` - ```bash - tox -e docs -- spelling - ``` +where commands can be one of: -- Create a release of the docs: +- clean : remove old doc build +- build/html : build html documentation +- spelling : check spelling +- linkcheck : check the links +- symlink : rebuild symlinks from `examples` to `docs/examples` +- release : make pages branch for documentation hosting (using + [ghp-import](https://github.com/c-w/ghp-import)) +- livehtml : Live documentation updates +- open : open the documentation in a web browser - ```bash - tox -e docs -- release - ``` +## Testing with nox - If you make any changes to `docs/examples`, you should run: +The basic command is: - ```bash - make docs-examples-symlink - ``` +```bash +nox -s test -- [--test-opts] [--no-cov] +``` - to update symlinks from `docs/examples` to `examples`. +where you can pass in additional pytest options (properly escaped) via +`--test-opts`. For example: - After this, the docs can be pushed to the correct branch for distribution. +```bash +nox -s test -- --test-opts "'-v'" +# or +nox -s test -- --test-opts "\-v" +``` -- Live documentation updates using +## Building distribution for conda - ```bash - make docs-livehtml - ``` +[grayskull]: https://github.com/conda/grayskull -## Using tox +For the most part, we use [grayskull] to create the conda recipe. However, I've +had issues getting it to play nice with `pyproject.toml` for some of the 'extra' +variables. So, we use grayskull to build the majority of the recipe, and append +the file `.recipe-append.yaml`. For some edge cases (install name different from +package name, etc), you'll need to manually edit this file to create the final +recipe. -The package is setup to use tox to test, build and release pip and conda -distributions, and release the docs. Most of these tasks have a command in the -`Makefile`. To test against multiple versions, use: +The basic command is: ```bash -make test-all +nox -s dist-conda -- -c [command] ``` -To build the documentation in an isolated environment, use: +Where `command` is one of: -```bash -make docs-build -``` +- clean +- recipe : create recipe via [grayskull] +- build : build the distribution -To release the documentation use: +To upload the recipe, you'll need to run an external command like: ```bash -make docs-release release_args='-m "commit message" -r origin -p' +nox -s dist-conda -- --dist-conda-run "anaconda upload PATH-TO-TARBALL" ``` -Where posargs is are passed to ghp-import. Note that the branch created is -called `nist-pages`. This can be changed in `tox.ini`. +## Building distribution for pypi -To build the distribution, use: +The basic command is: ```bash -make dist-pypi-[build-testrelease-release] +nox -s dist-pypi -- -p [command] ``` -where `build` build to distro, `testrelease` tests putting on `testpypi` and -release puts the distro on pypi. +where `command` is one of: + +- clean : clean out old distribution +- build : build distribution (if specify only this, clean will be called first) +- testrelease : upload to testpypi +- release : upload to pypi + +## Testing pypi or conda installs -To build the conda distribution, use: +Run: ```bash -make dist-conda-[recipe, build] +nox -s testdist-pypi -- --version [version] ``` -where `recipe` makes the conda recipe (using grayskull), and `build` makes the -distro. This can be manually added to a channel. - -To test the created distributions, you can use one of: +to test a specific version from pypi and ```bash -tox -e test-dist-[pypi, conda]-[local,remote]-py[38,39,...] +nox -s testdist-conda -- --version [version] ``` -or +to to likewise from conda. + +## Type checking + +Run: ```bash -make test-dist-[pypi, conda]-[local,remote] py=[38, 39, 310] +nox -s typing -- -m [commands] [options] ``` -where one options in the brackets should be choosen. - ## Package version [setuptools_scm]: https://github.com/pypa/setuptools_scm -Versioning is handled with [setuptools_scm].The pacakge version is set by the -git tag. For convenience, you can override the version in the makefile (calling -tox) by setting `version=v{major}.{minor}.{micro}`. This is useful for updating -the docs, etc. +Versioning is handled with [setuptools_scm].The package version is set by the +git tag. For convenience, you can override the version with nox setting +`--version ...`. This is useful for updating the docs, etc. -## Creating conda recipe +## Notes on [nox] -[grayskull]: https://github.com/conda/grayskull +One downside of using [tox] with this particular workflow is the need for +multiple scripts/makefiles, while with [nox], most everything is self contained +in the file `noxfile.py`. [nox] also is allows for a mix of conda and virtualenv +environments. -For the most part, we use [grayskull] to create the conda recipe. However, I've -had issues getting it to play nice with `pyproject.toml` for some of the 'extra' -variables. So, we use grayskull to build the majority of the recipe, and append -the file `.recipe-append.yaml`. For some edge cases (install name different from -package name, etc), you'll need to manually edit this file to create the final -recipe. +## Serving the documentation + +To view to documentation with js headers/footers, you'll need to serve them: + +```bash +python -m http.server -d docs/_build/html +``` + +Then open the address `localhost:8000` in a webbrowser. diff --git a/Makefile b/Makefile index d875f50d..8da53d37 100644 --- a/Makefile +++ b/Makefile @@ -43,7 +43,7 @@ clean-pyc: ## remove Python file artifacts find . -name '__pycache__' -exec rm -fr {} + clean-test: ## remove test and coverage artifacts - rm -fr .tox/ + rm -fr .nox/ rm -f .coverage rm -fr htmlcov/ rm -fr .pytest_cache @@ -103,7 +103,7 @@ pre-commit-codespell: ## run codespell. Note that this imports allowed words fro ################################################################################ .PHONY: user-venv user-autoenv-zsh user-all user-venv: ## create .venv file with name of conda env - echo $${PWD}/.tox/dev > .venv + echo $${PWD}/.nox/dev > .venv user-autoenv-zsh: ## create .autoenv.zsh files echo conda activate $$(cat .venv) > .autoenv.zsh @@ -119,6 +119,9 @@ user-all: user-venv user-autoenv-zsh ## runs user scripts test: ## run tests quickly with the default Python pytest -x -v +test-accept: ## run tests and accept doctest results. (using pytest-accept) + DOCFILLER_SUB=False pytest -v --accept + coverage: ## check code coverage quickly with the default Python coverage run --source thermoextrap -m pytest coverage report -m @@ -141,159 +144,109 @@ version: version-scm version-import ################################################################################ # Environment files ################################################################################ -ENVIRONMENTS = $(addsuffix .yaml,$(addprefix environment/, dev docs test)) -PRETTIER = bash scripts/run-prettier.sh - -environment/%.yaml: environment.yaml environment/%-extras.yaml ## create combined environment/{dev,docs,test}.yaml - conda-merge $^ > $@ - $(PRETTIER) $@ - -environment/dev.yaml: ## development environment yaml file -environment/test.yaml: ## testing environment yaml file -enviornment/docs.yaml: ## docs environment yaml file - - -# special for linters -environment/lint.yaml: environment.yaml $(addsuffix .yaml, $(addprefix environment/, test-extras lint-extras)) ## mypy environment - echo $^ - conda-merge $^ > $@ - $(PRETTIER) $@ - -ENVIRONMENTS += environment/lint.yaml - .PHONY: environment-files-clean environment-files-clean: ## clean all created environment/{dev,docs,test}.yaml -rm $(ENVIRONMENTS) 2> /dev/null || true .PHONY: environment-files-build -environment-files-build: $(ENVIRONMENTS) ## rebuild all environment files +environment-files-build: pyproject.toml ## rebuild all environment files + nox -s pyproject2conda + +environment/%.yaml: pyproject.toml + nox -s pyproject2conda ################################################################################ # virtual env ################################################################################ -.PHONY: mamba-env mamba-dev mamba-env-update mamba-dev-update +.PHONY: mamba-env-update mamba-dev-update -mamba-env: environment.yaml ## create base environment +mamba-dev: environment/dev.yaml environment-files-build ## create development environment mamba env create -f $< -mamba-env-update: environment.yaml ## update base environment - mamba env update -f $< - -mamba-dev: environment/dev.yaml ## create development environment - mamba env create -f $< - -mamba-dev-update: environment/dev.yaml ## update development environment +mamba-dev-update: environment/dev.yaml environment-files-build ## update development environment mamba env update -f $< ################################################################################ -# TOX +# NOX ############################################################################### -tox_args?=-v -version?= -TOX=CONDA_EXE=mamba SETUPTOOLS_SCM_PRETEND_VERSION=$(version) tox $(tox_args) - -.PHONY: tox-ipykernel-display-name -tox-ipykernel-display-name: ## Update display-name for any tox env with ipykernel - bash ./scripts/tox-ipykernel-display-name.sh thermoextrap - ## dev env +NOX=nox .PHONY: dev-env -dev-env: environment/dev.yaml ## create development environment using tox - tox -e dev +dev-env: environment/dev.yaml ## create development environment using nox + $(NOX) -e dev ## testing .PHONY: test-all -test-all: environment/test.yaml ## run tests on every Python version with tox. can pass posargs=... - $(TOX) -- $(posargs) +test-all: environment/test.yaml ## run tests on every Python version with nox. + $(NOX) -s test ## docs -.PHONY: docs-examples-symlink -docs-examples-symlink: ## create symlinks to notebooks from /examples/ to /docs/examples. - bash ./scripts/docs-examples-symlinks.sh - - .PHONY: docs-build docs-release docs-clean docs-command docs-build: ## build docs in isolation - $(TOX) -e docs -- build + $(NOX) -s docs -- -d build docs-clean: ## clean docs rm -rf docs/_build/* rm -rf docs/generated/* rm -rf docs/reference/generated/* docs-clean-build: docs-clean docs-build ## clean and build -docs-release: ## release docs. use release_args=... to override stuff - $(TOX) -e docs -- release -docs-command: ## run command with command=... - $(TOX) -e docs -- command +docs-release: ## release docs. + $(NOX) -s docs -- release +docs-command: ## run arbitrary command with command=... + $(NOX) -s docs -- --docs-run $(command) .PHONY: .docs-spelling docs-nist-pages docs-open docs-livehtml docs-clean-build docs-linkcheck docs-spelling: ## run spell check with sphinx - $(TOX) -e docs -- spelling + $(NOX) -s docs -- -d spelling docs-livehtml: ## use autobuild for docs - $(TOX) -e docs -- livehtml + $(NOX) -s docs -- -d livehtml docs-open: ## open the build - $(BROWSER) docs/_build/html/index.html + $(NOX) -s docs -- -d open docs-linkcheck: ## check links - $(TOX) -e docs -- linkcheck + $(NOX) -s docs -- -d linkcheck docs-build docs-release docs-command docs-clean docs-livehtml docs-linkcheck: environment/docs.yaml -## linting -.PHONY: lint-mypy lint-pyright lint-pytype lint-all lint-command -lint-mypy: ## run mypy mypy_args=... - $(TOX) -e lint -- mypy -lint-pyright: ## run pyright pyright_args=... - $(TOX) -e lint -- pyright -lint-pytype: ## run pytype pytype_args=... - $(TOX) -e lint -- pytype -lint-all: - $(TOX) -e lint -- all -lint-command: - $(TOX) -e lint -- command - -lint-mypy lint-pyright lint-pytype lint-all lint-command: environment/lint.yaml +## typing +.PHONY: typing-mypy typing-pyright typing-pytype typing-all typing-command +typing-mypy: ## run mypy mypy_args=... + $(NOX) -s typing -- -m mypy +typing-pyright: ## run pyright pyright_args=... + $(NOX) -s typing -- -m pyright +typing-pytype: ## run pytype pytype_args=... + $(NOX) -s typing -- -m pytype +typing-all: + $(NOX) -s typing -- -m mypy pyright pytype +typing-command: + $(NOX) -s typing -- --typing-run $(command) +typing-mypy typing-pyright typing-pytype typing-all typing-command: environment/typing.yaml ## distribution .PHONY: dist-pypi-build dist-pypi-testrelease dist-pypi-release dist-pypi-command dist-pypi-build: ## build dist - $(TOX) -e dist-pypi -- build + $(NOX) -s dist-pypi -- -p build dist-pypi-testrelease: ## test release on testpypi - $(TOX) -e dist-pypi -- testrelease + $(NOX) -s dist-pypi -- -p testrelease dist-pypi-release: ## release to pypi, can pass posargs=... - $(TOX) -e dist-pypi -- release + $(NOX) -s dist-pypi -- -p release dist-pypi-command: ## run command with command=... - $(TOX) -e dist-pypi -- command + $(NOX) -s dist-pypi -- --dist-pypi-run $(command) dist-pypi-build dist-pypi-testrelease dist-pypi-release dist-pypi-command: environment/dist-pypi.yaml .PHONY: dist-conda-recipe dist-conda-build dist-conda-command dist-conda-recipe: ## build conda recipe can pass posargs=... - $(TOX) -e dist-conda -- recipe + $(NOX) -s dist-conda -- -c recipe dist-conda-build: ## build conda recipe can pass posargs=... - $(TOX) -e dist-conda -- build + $(NOX) -s dist-conda -- -c build dist-conda-command: ## run command with command=... - $(TOX) -e dist-conda -- command + $(NOX) -s dist-conda -- -dist-conda-run $(command) dist-conda-build dist-conda-recipe dist-conda-command: environment/dist-conda.yaml -## test distribution -.PHONY: testdist-pypi-remote testdist-conda-remote testdist-pypi-local testdist-conda-local - -py?=310 -testdist-pypi-remote: ## testdist-pypi-remote: ## test pypi install, can run as `make test-dist-pypi-remote py=39` to run test-dist-pypi-local-py39. Can specify version setting, eg, TEST_VERSION='==0.1.0'. Note that the the format should be '=={version}'. - $(TOX) -e $@-py$(py) -- $(posargs) -testdist-conda-remote: ## test conda install, can run as `make test-dist-conda-remote py=39` to run test-dist-conda-local-py39 - $(TOX) -e $@-py$(py) -- $(poasargs) -testdist-pypi-local: ## test pypi install, can run as `make test-dist-pypi-local py=39` to run test-dist-pypi-local-py39 - $(TOX) -e $@-py$(py) -- $(posargs) -testdist-conda-local: ## test conda install, can run as `make test-dist-conda-local py=39` to run test-dist-conda-local-py39 - $(TOX) -e $@-py$(py) -- $(poasargs) - -testdist-pypi-remote testdist-conda-remote testdist-pypi-local testdist-conda-local: environment/test.yaml - ## list all options -.PHONY: tox-list -tox-list: - $(TOX) -a +.PHONY: nox-list +nox-list: + $(NOX) --list ################################################################################ diff --git a/README.md b/README.md index a4174c43..b320eb0f 100644 --- a/README.md +++ b/README.md @@ -20,8 +20,8 @@ https://naereen.github.io/badges/ [docs-link]: https://pages.nist.gov/thermoextrap/ [repo-badge]: https://img.shields.io/badge/--181717?logo=github&logoColor=ffffff [repo-link]: https://github.com/usnistgov/thermoextrap -[conda-badge]: https://img.shields.io/conda/v/wpk-nist/thermoextrap -[conda-link]: https://anaconda.org/wpk-nist/thermoextrap +[conda-badge]: https://img.shields.io/conda/v/conda-forge/thermoextrap +[conda-link]: https://anaconda.org/conda-forge/thermoextrap [license-badge]: https://img.shields.io/pypi/l/cmomy?color=informational [license-link]: https://github.com/usnistgov/thermoextrap/blob/main/LICENSE @@ -82,7 +82,7 @@ request for wanted features and suggestions! `thermoextrap` may be installed with either (recommended) ```bash -conda install -c wpk-nist thermoextrap +conda install -c conda-forge thermoextrap ``` or diff --git a/docs/installation.md b/docs/installation.md index ca43cee1..d524e602 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -11,7 +11,7 @@ pip install thermoextrap or, if you use conda, run: ```bash -conda install c wpk-nist thermoextrap +conda install -c conda-forge thermoextrap ``` ## Additional dependencies @@ -27,7 +27,7 @@ If using conda, then you'll have to manually install some dependencies. For example, you can run: ```bash -conda install bottleneck dask pymbar<4.0 +conda install bottleneck dask pymbar>=4.0 ``` At this time, it is recommended to install the Gaussian Process Regression (GPR) @@ -39,7 +39,7 @@ pip install tensorflow tensorflow-probability gpflow ## From sources -The sources for thermodynamic-extrapolation can be downloaded from the [Github +The sources for thermodynamic-extrapolation can be downloaded from the [github repo]. You can either clone the public repository: diff --git a/environment.yaml b/environment.yaml deleted file mode 100644 index 23bc2014..00000000 --- a/environment.yaml +++ /dev/null @@ -1,22 +0,0 @@ -name: thermoextrap-env -channels: - - conda-forge - - wpk-nist -dependencies: - - python>=3.8 - - sympy - - numpy - - scipy - - xarray - # - openmm - - cmomy>=0.4 - - custom-inherit - - attrs - - module-utilities>=0.2 - - pymbar<4.0 - - pip - - pip: - - tensorflow - - tensorflow-probability - - gpflow<2.6.0 -# prefix: /Users/wpk/miniforge3/envs/gpflow_test diff --git a/environment/base.yaml b/environment/base.yaml new file mode 100644 index 00000000..991dd739 --- /dev/null +++ b/environment/base.yaml @@ -0,0 +1,17 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# +channels: + - conda-forge +dependencies: + - python>=3.8 + - numpy >= 1.20 + - xarray >= 0.16 + - sympy + - scipy + - cmomy >= 0.5 + - custom-inherit + - attrs + - module-utilities >= 0.2 diff --git a/environment/conda-spec.txt b/environment/conda-spec.txt deleted file mode 100644 index a15481c3..00000000 --- a/environment/conda-spec.txt +++ /dev/null @@ -1 +0,0 @@ -pymbar<4.0 diff --git a/environment/dev-extras.yaml b/environment/dev-extras.yaml deleted file mode 100644 index fb2f6cd6..00000000 --- a/environment/dev-extras.yaml +++ /dev/null @@ -1,56 +0,0 @@ -dependencies: - # numerics/tests - - matplotlib - # - pandas - # development - # - isort - # - black - # - blackdoc - # - flake8 - # testing - - pytest - - pytest-cov - - pytest-xdist - # - tox-conda - # specials - # - pre-commit - # repl - - ipython - - ipykernel - - nbconvert - # build - - setuptools-scm - # - setuptools - # - twine - # - setuptools_scm_git_archive - # - build - # conda specific - # - conda-build - # - anaconda-client - # - greyskull - # doc stuff - # - sphinx - # - sphinx_rtd_theme - # - recommonmark # if want markdown - # - nbsphinx # if want notebooks - # - sphinxcontrib-spelling - # - sphinx-toolbox - # lsp stuff - # - autoflake - # - pyls-mypy - # - pyls-black - # - pyls-isort - # mypy - # - mypy - # - pytest-mypy - # # Monkeytype: autocreate type hints - # - MonkeyType - # - pip - # - pip: - # - pytest-accept - # - mypy-extensions - # - pytest-monkeytype - # - flake8-mypy - # - attr-utils - # spelling? - # - pyenchant diff --git a/environment/dev.yaml b/environment/dev.yaml index 9361d5dc..192b8ce9 100644 --- a/environment/dev.yaml +++ b/environment/dev.yaml @@ -1,28 +1,38 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# channels: - conda-forge - - wpk-nist dependencies: - - attrs - - cmomy>=0.4 + - python>=3.8 + - numpy >= 1.20 + - xarray >= 0.16 + - sympy + - scipy + - cmomy >= 0.5 - custom-inherit - - ipykernel - - ipython - - matplotlib - - module-utilities>=0.2 - - nbconvert - - numpy - - pip - - pymbar<4.0 + - attrs + - module-utilities >= 0.2 - pytest - - pytest-cov - pytest-xdist - - python>=3.8 - - scipy + - pytest-cov + - pytest-sugar + - pandas + - pymbar>=4.0 + - mypy - setuptools-scm - - sympy - - xarray + - ipython + - ipykernel + - bottleneck + - matplotlib + - nox + - ruamel.yaml + - pip - pip: - - gpflow<2.6.0 - tensorflow - tensorflow-probability -name: thermoextrap-env + - gpflow < 2.6.0 + - pytest-accept + - noxopt diff --git a/environment/dist-conda.yaml b/environment/dist-conda.yaml index 51b50da8..b941e85b 100644 --- a/environment/dist-conda.yaml +++ b/environment/dist-conda.yaml @@ -1,4 +1,12 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# +channels: + - conda-forge dependencies: + - python>=3.8 - anaconda-client - grayskull - conda-build diff --git a/environment/dist-pypi.yaml b/environment/dist-pypi.yaml index ca53c882..76f669ac 100644 --- a/environment/dist-pypi.yaml +++ b/environment/dist-pypi.yaml @@ -1,5 +1,11 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# +channels: + - conda-forge dependencies: - - setuptools>=61.2 - - setuptools-scm>=7.0 + - python>=3.8 - twine - build diff --git a/environment/docs-extras.yaml b/environment/docs-extras.yaml deleted file mode 100644 index 1d1fc9a3..00000000 --- a/environment/docs-extras.yaml +++ /dev/null @@ -1,30 +0,0 @@ -channels: - - conda-forge -dependencies: - - setuptools-scm - - ipython - ## package deps - - matplotlib - # - pandas - - pip - - pip: - - pyenchant - # TODO: something goes wonky with sphinx-book-theme and higher versions of sphinx - - ghp-import - - sphinx - ## themes - - sphinx-book-theme - ## error with == 0.13.3 - - pydata-sphinx-theme==0.13.1 - ## optionals - # sphinx-design - - sphinx-copybutton - - sphinxcontrib-spelling - # sphinxext-rediraffe - ## autobuild - - sphinx-autobuild - ## myst - # myst-parser - - myst-nb - ## others - - autodocsumm diff --git a/environment/docs.yaml b/environment/docs.yaml index a44d91b7..0beb0f83 100644 --- a/environment/docs.yaml +++ b/environment/docs.yaml @@ -1,33 +1,35 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# channels: - conda-forge - - wpk-nist dependencies: - - attrs - - cmomy>=0.4 + - python>=3.8 + - numpy >= 1.20 + - xarray >= 0.16 + - sympy + - scipy + - cmomy >= 0.5 - custom-inherit + - attrs + - module-utilities >= 0.2 + - setuptools-scm - ipython + - pyenchant + - ghp-import + - sphinx + - sphinx-copybutton + - sphinxcontrib-spelling + - sphinx-autobuild + - myst-nb + - sphinx-book-theme + - autodocsumm - matplotlib - - module-utilities>=0.2 - - numpy + - pymbar>=4.0 - pip - - pymbar<4.0 - - python>=3.8 - - scipy - - setuptools-scm - - sympy - - xarray - pip: - - autodocsumm - - ghp-import - - gpflow<2.6.0 - - myst-nb - - pydata-sphinx-theme==0.13.1 - - pyenchant - - sphinx - - sphinx-autobuild - - sphinx-book-theme - - sphinx-copybutton - - sphinxcontrib-spelling - tensorflow - tensorflow-probability -name: thermoextrap-env + - gpflow < 2.6.0 diff --git a/environment/lint-extras.yaml b/environment/lint-extras.yaml deleted file mode 100644 index 728cfdd0..00000000 --- a/environment/lint-extras.yaml +++ /dev/null @@ -1,4 +0,0 @@ -dependencies: - - mypy - ## stubs - # - pandas-stubs diff --git a/environment/lint.yaml b/environment/lint.yaml deleted file mode 100644 index db6a9352..00000000 --- a/environment/lint.yaml +++ /dev/null @@ -1,23 +0,0 @@ -channels: - - conda-forge - - wpk-nist -dependencies: - - attrs - - cmomy>=0.4 - - custom-inherit - - module-utilities>=0.2 - - mypy - - numpy - - pandas - - pip - - pymbar<4.0 - - pytest - - python>=3.8 - - scipy - - sympy - - xarray - - pip: - - gpflow<2.6.0 - - tensorflow - - tensorflow-probability -name: thermoextrap-env diff --git a/environment/lock/py310.yaml b/environment/lock/py310.yaml new file mode 100644 index 00000000..57a9f0ca --- /dev/null +++ b/environment/lock/py310.yaml @@ -0,0 +1,5 @@ +channels: + - conda-forge +dependencies: + - python=3.10 + - pip diff --git a/environment/lock/py311.yaml b/environment/lock/py311.yaml new file mode 100644 index 00000000..cf102f67 --- /dev/null +++ b/environment/lock/py311.yaml @@ -0,0 +1,5 @@ +channels: + - conda-forge +dependencies: + - python=3.11 + - pip diff --git a/environment/lock/py38.yaml b/environment/lock/py38.yaml new file mode 100644 index 00000000..ebf67bd1 --- /dev/null +++ b/environment/lock/py38.yaml @@ -0,0 +1,5 @@ +channels: + - conda-forge +dependencies: + - python=3.8 + - pip diff --git a/environment/lock/py39.yaml b/environment/lock/py39.yaml new file mode 100644 index 00000000..b25fb97e --- /dev/null +++ b/environment/lock/py39.yaml @@ -0,0 +1,5 @@ +channels: + - conda-forge +dependencies: + - python=3.9 + - pip diff --git a/environment/test-extras.txt b/environment/test-extras.txt new file mode 100644 index 00000000..94124d3b --- /dev/null +++ b/environment/test-extras.txt @@ -0,0 +1,14 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# +pytest +pytest-xdist +pytest-cov +pytest-sugar +pandas +pymbar>=4.0 +tensorflow +tensorflow-probability +gpflow < 2.6.0 diff --git a/environment/test-extras.yaml b/environment/test-extras.yaml index 9f1d0414..071a9435 100644 --- a/environment/test-extras.yaml +++ b/environment/test-extras.yaml @@ -1,3 +1,20 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# +channels: + - conda-forge dependencies: + - python>=3.8 - pytest + - pytest-xdist + - pytest-cov + - pytest-sugar - pandas + - pymbar>=4.0 + - pip + - pip: + - tensorflow + - tensorflow-probability + - gpflow < 2.6.0 diff --git a/environment/test.yaml b/environment/test.yaml index 613dbbed..c8a060c9 100644 --- a/environment/test.yaml +++ b/environment/test.yaml @@ -1,22 +1,28 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# channels: - conda-forge - - wpk-nist dependencies: - - attrs - - cmomy>=0.4 + - python>=3.8 + - numpy >= 1.20 + - xarray >= 0.16 + - sympy + - scipy + - cmomy >= 0.5 - custom-inherit - - module-utilities>=0.2 - - numpy + - attrs + - module-utilities >= 0.2 + - pytest + - pytest-xdist + - pytest-cov + - pytest-sugar - pandas + - pymbar>=4.0 - pip - - pymbar<4.0 - - pytest - - python>=3.8 - - scipy - - sympy - - xarray - pip: - - gpflow<2.6.0 - tensorflow - tensorflow-probability -name: thermoextrap-env + - gpflow < 2.6.0 diff --git a/environment/tools.yaml b/environment/tools.yaml deleted file mode 100644 index 5efd3087..00000000 --- a/environment/tools.yaml +++ /dev/null @@ -1,11 +0,0 @@ -# Additional tools for development. It is recommended to install these with -# condax/pipx, but you can install them in this environment if you'd prefer. -dependencies: - - pre-commit - - tox - - tox-conda - - cruft - - conda-merge - - scriv - # this isn't needed, but can be helpful - # - commitizen diff --git a/environment/typing.yaml b/environment/typing.yaml new file mode 100644 index 00000000..854996d5 --- /dev/null +++ b/environment/typing.yaml @@ -0,0 +1,29 @@ +# +# This file is autogenerated by pyrpoject2conda. +# You should not manually edit this file. +# Instead edit the corresponding pyproject.toml file. +# +channels: + - conda-forge +dependencies: + - python>=3.8 + - numpy >= 1.20 + - xarray >= 0.16 + - sympy + - scipy + - cmomy >= 0.5 + - custom-inherit + - attrs + - module-utilities >= 0.2 + - mypy + - pytest + - pytest-xdist + - pytest-cov + - pytest-sugar + - pandas + - pymbar>=4.0 + - pip + - pip: + - tensorflow + - tensorflow-probability + - gpflow < 2.6.0 diff --git a/examples/gpr_active_learning/analysis_polymer.ipynb b/examples/gpr_active_learning/analysis_polymer.ipynb index 9aac61bd..d9358ebd 100644 --- a/examples/gpr_active_learning/analysis_polymer.ipynb +++ b/examples/gpr_active_learning/analysis_polymer.ipynb @@ -4630,7 +4630,7 @@ ], "source": [ "for i in range(len(data_list)):\n", - " this_g = timeseries.statisticalInefficiency(cv_vals[i])\n", + " this_g = timeseries.statistical_inefficiency(cv_vals[i])\n", " print(this_g)" ] }, @@ -6667,7 +6667,7 @@ ], "source": [ "for i in range(len(data_list)):\n", - " this_g = timeseries.statisticalInefficiency(pot_vals[i])\n", + " this_g = timeseries.statistical_inefficiency(pot_vals[i])\n", " print(this_g)" ] }, @@ -6695,7 +6695,7 @@ ], "source": [ "for i in range(len(data_list)):\n", - " this_g = timeseries.statisticalInefficiency(cv_vals[i], pot_vals[i])\n", + " this_g = timeseries.statistical_inefficiency(cv_vals[i], pot_vals[i])\n", " print(this_g)" ] }, @@ -6723,7 +6723,7 @@ ], "source": [ "for i in range(len(data_list)):\n", - " this_g = timeseries.statisticalInefficiency(cv_vals[i] * pot_vals[i])\n", + " this_g = timeseries.statistical_inefficiency(cv_vals[i] * pot_vals[i])\n", " print(this_g)" ] }, @@ -6751,7 +6751,7 @@ ], "source": [ "for i in range(len(data_list)):\n", - " this_g = timeseries.statisticalInefficiency(\n", + " this_g = timeseries.statistical_inefficiency(\n", " (cv_vals[i] - np.average(cv_vals[i])) * (pot_vals[i] - np.average(pot_vals[i]))\n", " )\n", " print(this_g)" diff --git a/examples/gpr_active_learning/environment_active.yml b/examples/gpr_active_learning/environment_active.yml index fdb73988..9caca3d1 100644 --- a/examples/gpr_active_learning/environment_active.yml +++ b/examples/gpr_active_learning/environment_active.yml @@ -15,7 +15,7 @@ dependencies: - xarray - openmm - cmomy - - pymbar<4.0 + - pymbar>=4.0 - netcdf4 - parmed - mdtraj diff --git a/examples/gpr_active_learning/run_active_LJ_NPT.py b/examples/gpr_active_learning/run_active_LJ_NPT.py index 2e3121e2..339f6eec 100644 --- a/examples/gpr_active_learning/run_active_LJ_NPT.py +++ b/examples/gpr_active_learning/run_active_LJ_NPT.py @@ -133,8 +133,8 @@ def update(self, beta, sim_info_files, cv_bias_files, x_files): mu = [] var = [] for k in range(dens_info.shape[1]): - this_g = timeseries.statisticalInefficiency(dens_info[:, k]) - timeseries.subsampleCorrelatedData(np.arange(dens_info.shape[0]), this_g) + this_g = timeseries.statistical_inefficiency(dens_info[:, k]) + timeseries.subsample_correlated_data(np.arange(dens_info.shape[0]), this_g) # Take logarithm to ensure density cannot go negative # Note that default transformation function handles this, modeling log(dens) # and transforming back to density for prediction diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 00000000..2e170950 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,1029 @@ +"""Config file for nox""" +from __future__ import annotations + +import shutil +from dataclasses import replace # noqa +from pathlib import Path +from textwrap import dedent +from typing import Annotated, Any, Callable, Collection, Literal, TypeVar, cast + +import nox +from noxopt import NoxOpt, Option, Session + +from tools.noxtools import ( + combine_list_str, + prepend_flag, + session_install_envs, + session_install_envs_lock, + # session_install_package, + session_install_pip, + session_run_commands, + # session_skip_install, + sort_like, + update_target, +) + +# --- nox options ---------------------------------------------------------------------- +ROOT = Path(__file__).parent + +nox.options.reuse_existing_virtualenvs = True +nox.options.sessions = ["test"] + +# --- Options -------------------------------------------------------------------------- + +PACKAGE_NAME = "thermoextrap" +IMPORT_NAME = "thermoextrap" +KERNEL_BASE = "thermoextrap" + +PYTHON_ALL_VERSIONS = ["3.8", "3.9", "3.10", "3.11"] +PYTHON_DEFAULT_VERSION = "3.10" + +# conda/mamba +if shutil.which("mamba"): + CONDA_BACKEND = "mamba" +elif shutil.which("conda"): + CONDA_BACKEND = "conda" +else: + raise ValueError("neigher conda or mamba found") + +SESSION_DEFAULT_KWS = {"python": PYTHON_DEFAULT_VERSION, "venv_backend": CONDA_BACKEND} +SESSION_ALL_KWS = {"python": PYTHON_ALL_VERSIONS, "venv_backend": CONDA_BACKEND} + + +# --- Set PATH to find all python versions --------------------------------------------- + +DEFAULTS: dict[str, Any] = {} + + +def load_nox_config(): + path = Path(".") / ".noxconfig.toml" + if not path.exists(): + return + + import os + from glob import glob + + import tomli + + with path.open("rb") as f: + data = tomli.load(f) + + # python paths + try: + paths = [] + for p in data["nox"]["python"]["paths"]: + paths.extend(glob(os.path.expanduser(p))) + + paths_str = ":".join(map(str, paths)) + os.environ["PATH"] = paths_str + ":" + os.environ["PATH"] + except KeyError: + pass + + # extras: + extras = {"dev": ["nox", "dev"]} + try: + for k, v in data["nox"]["extras"].items(): + extras[k] = v + except KeyError: + pass + + DEFAULTS["environment-extras"] = extras + + # for py in PYTHON_ALL_VERSIONS: + # print(f"which python{py}", shutil.which(f"python{py}")) + + return + + +load_nox_config() + + +# --- noxopt --------------------------------------------------------------------------- +group = NoxOpt(auto_tag=True) + +F = TypeVar("F", bound=Callable[..., Any]) + +DEFAULT_SESSION = cast(Callable[[F], F], group.session(**SESSION_DEFAULT_KWS)) # type: ignore +ALL_SESSION = cast(Callable[[F], F], group.session(**SESSION_ALL_KWS)) # type: ignore + +OPTS_OPT = Option(nargs="*", type=str) +SET_KERNEL_OPT = Option(type=bool, help="If True, try to set the kernel name") +RUN_OPT = Option( + nargs="*", + type=str, + action="append", + help="run passed command_demo using `external=True` flag", +) + +CMD_OPT = Option(nargs="*", type=str, help="cmd to be run") +LOCK_OPT = Option(type=bool, help="If True, use conda-lock") + + +def opts_annotated(**kwargs): + return Annotated[list[str], replace(OPTS_OPT, **kwargs)] + + +def cmd_annotated(**kwargs): + return Annotated[list[str], replace(CMD_OPT, **kwargs)] + + +def run_annotated(**kwargs): + return Annotated[list[list[str]], replace(RUN_OPT, **kwargs)] + + +LOCK_CLI = Annotated[bool, LOCK_OPT] +RUN_CLI = Annotated[list[list[str]], RUN_OPT] +TEST_OPTS_CLI = opts_annotated(help="extra arguments/flags to pytest") + +# CMD_CLI = Annotated[list[str], CMD_OPT] + +FORCE_REINSTALL_CLI = Annotated[ + bool, + Option(type=bool, help="If True, force reinstall even if environment unchanged"), +] + +VERSION_CLI = Annotated[ + str, Option(type=str, help="Version to substitute or check against") +] + +LOG_SESSION_CLI = Annotated[ + bool, + Option( + type=bool, + help="If flag included, log python and package (if installed) version", + ), +] + +# --- installer ------------------------------------------------------------------------ + + +def install_requirements( + session: nox.Session, + name: str, + style: Literal["conda", "conda-lock", "pip"] | None = None, + lock: bool = False, + display_name: str | None = None, + set_kernel: bool = True, + install_package: bool = True, + force_reinstall: bool = False, + log_session: bool = False, + deps: Collection[str] | None = None, + reqs: Collection[str] | None = None, + extras: str | Collection[str] | None = None, + channels: Collection[str] | None = None, + **kwargs, +): + """Install requirements. If need fine control, do it in calling func""" + + if display_name is None and set_kernel: + display_name = f"{KERNEL_BASE}-{name}" + + style = style or ("conda-lock" if lock else "conda") + + if style == "pip": + session_install_pip( + session=session, + extras=extras, + display_name=display_name, + force_reinstall=force_reinstall, + reqs=reqs, + install_package=install_package, + **kwargs, + ) + + elif style == "conda-lock": + py = session.python.replace(".", "") # type: ignore + session_install_envs_lock( + session=session, + lockfile=f"./environment/lock/py{py}-{name}-conda-lock.yml", + display_name=display_name, + force_reinstall=force_reinstall, + install_package=install_package, + **kwargs, + ) + + elif style == "conda": + session_install_envs( + session, + f"./environment/{name}.yaml", + display_name=display_name, + force_reinstall=force_reinstall, + deps=deps, + reqs=reqs, + channels=channels, + install_package=install_package, + **kwargs, + ) + else: + raise ValueError(f"style={style} not recognized") + + if log_session: + session_log_session(session, install_package) + + +def session_log_session(session, has_package=False): + session.run("python", "--version") + if has_package: + session.run( + "python", + "-c", + dedent( + f""" + import {IMPORT_NAME} + print({IMPORT_NAME}.__version__) + """ + ), + ) + + +@DEFAULT_SESSION +def dev( + session: Session, + # set_kernel: SET_KERNEL_CLI = True, + dev_run: RUN_CLI = [], # noqa + lock: LOCK_CLI = False, + force_reinstall: FORCE_REINSTALL_CLI = False, + log_session: bool = False, +): + """Create dev env""" + # using conda + + install_requirements( + session=session, + name="dev", + lock=lock, + set_kernel=True, + install_package=True, + force_reinstall=force_reinstall, + log_session=log_session, + ) + session_run_commands(session, dev_run) + + +@group.session(python=PYTHON_DEFAULT_VERSION) # type: ignore +def pyproject2conda( + session: Session, + force_reinstall: FORCE_REINSTALL_CLI = False, + pyproject2conda_force: bool = False, +): + """Create environment.yaml files from pyproject.toml using pyproject2conda.""" + session_install_envs( + session, + reqs=["pyproject2conda>=0.4.0"], + force_reinstall=force_reinstall, + ) + + def create_env(output, extras=None, python="get", base=True, cmd="yaml"): + def _to_args(flag, val): + if val is None: + return [] + if isinstance(val, str): + val = [val] + return prepend_flag(flag, val) + + if pyproject2conda_force or update_target(output, "pyproject.toml"): + args = [cmd, "-o", output] + _to_args("-e", extras) + + if python and cmd == "yaml": + args.extend(["--python-include", python]) + + if not base: + args.append("--no-base") + + session.run("pyproject2conda", *args) + else: + session.log( + f"{output} up to data. Pass --pyproject2conda-force to force recreation" + ) + + # create root environment + create_env("environment/base.yaml") + + extras = DEFAULTS["environment-extras"] + for k in ["test", "typing", "docs", "dev"]: + create_env(f"environment/{k}.yaml", extras=extras.get(k, k), base=True) + + # isolated + for k in ["dist-pypi", "dist-conda"]: + create_env(f"environment/{k}.yaml", extras=k, base=False) + + # need an isolated set of test requirements + create_env("environment/test-extras.yaml", extras="test", base=False) + create_env( + "environment/test-extras.txt", extras="test", base=False, cmd="requirements" + ) + + +@DEFAULT_SESSION +def conda_lock( + session: Session, + force_reinstall: FORCE_REINSTALL_CLI = False, + conda_lock_channel: cmd_annotated(help="conda channels to use") = (), # type: ignore + conda_lock_platform: cmd_annotated( # type: ignore + help="platforms to build lock files for", + choices=["osx-64", "linux-64", "win-64", "all"], + ) = (), + conda_lock_cmd: cmd_annotated( # type: ignore + help="lock files to create", + choices=["test", "typing", "dev", "dist-pypi", "dist-conda", "all"], + ) = (), + conda_lock_run: RUN_CLI = [], # noqa + conda_lock_mamba: bool = False, + conda_lock_force: bool = False, +): + """Create lock files using conda-lock""" + + session_install_envs( + session, + # reqs=["git+https://github.com/conda/conda-lock.git"], + reqs=["conda-lock>=2.0.0"], + force_reinstall=force_reinstall, + ) + + session.run("conda-lock", "--version") + + platform = conda_lock_platform + if not platform: + platform = ["osx-64"] + elif "all" in platform: + platform = ["linux-64", "osx-64", "win-64"] + channel = conda_lock_channel + if not channel: + channel = ["conda-forge"] + + lock_dir = ROOT / "environment" / "lock" + + def create_lock( + py, + name, + env_path=None, + ): + py = "py" + py.replace(".", "") + + if env_path is None: + env_path = f"environment/{name}.yaml" + + lockfile = lock_dir / f"{py}-{name}-conda-lock.yml" + + deps = [env_path] + # make sure this is last to make python version last + deps.append(lock_dir / f"{py}.yaml") + + if conda_lock_force or update_target(lockfile, *deps): + session.log(f"creating {lockfile}") + # insert -f for each arg + if lockfile.exists(): + lockfile.unlink() + session.run( + "conda-lock", + "--mamba" if conda_lock_mamba else "--no-mamba", + *prepend_flag("-c", *channel), + *prepend_flag("-p", *platform), + *prepend_flag("-f", *deps), + f"--lockfile={lockfile}", + ) + + session_run_commands(session, conda_lock_run) + if not conda_lock_run and not conda_lock_cmd: + conda_lock_cmd = ["all"] + if "all" in conda_lock_cmd: + conda_lock_cmd = ["test", "typing", "dev", "dist-pypi", "dist-conda"] + conda_lock_cmd = list(set(conda_lock_cmd)) + + for c in conda_lock_cmd: + if c == "test": + for py in PYTHON_ALL_VERSIONS: + create_lock(py, "test") + elif c == "typing": + for py in PYTHON_ALL_VERSIONS: + create_lock(py, "typing") + elif c == "dev": + create_lock(PYTHON_DEFAULT_VERSION, "dev") + elif c == "dist-pypi": + create_lock( + PYTHON_DEFAULT_VERSION, + "dist-pypi", + ) + elif c == "dist-conda": + create_lock( + PYTHON_DEFAULT_VERSION, + "dist-conda", + ) + + +@ALL_SESSION +def test( + session: Session, + test_no_pytest: bool = False, + test_opts: TEST_OPTS_CLI = (), # type: ignore + test_run: RUN_CLI = [], # noqa + lock: LOCK_CLI = False, + force_reinstall: FORCE_REINSTALL_CLI = False, + log_session: bool = False, + no_cov: bool = False, +): + """Test environments with conda installs""" + + install_requirements( + session=session, + name="test", + lock=lock, + set_kernel=False, + install_package=True, + force_reinstall=force_reinstall, + log_session=log_session, + ) + + run = test_run + session_run_commands(session, run) + + if not test_no_pytest: + opts = combine_list_str(test_opts) + + if not no_cov: + session.env["COVERAGE_FILE"] = str(Path(session.create_tmp()) / ".coverage") + if "--cov" not in opts: + opts.append("--cov") + session.run("pytest", *opts) + + +@group.session(python=PYTHON_ALL_VERSIONS) # type: ignore +def test_venv( + session: Session, + test_no_pytest: bool = False, + test_opts: TEST_OPTS_CLI = (), # type: ignore + test_run: RUN_CLI = [], # noqa + lock: LOCK_CLI = False, + force_reinstall: FORCE_REINSTALL_CLI = False, + log_session: bool = False, + no_cov: bool = False, +): + """Test environments virtualenv and pip installs""" + + install_requirements( + session=session, + name="test-pip", + extras="test", + install_package=True, + force_reinstall=force_reinstall, + log_session=log_session, + style="pip", + ) + + run = test_run + session_run_commands(session, run) + + if not test_no_pytest: + opts = combine_list_str(test_opts) + + if not no_cov: + session.env["COVERAGE_FILE"] = str(Path(session.create_tmp()) / ".coverage") + if "--cov" not in opts: + opts.append("--cov") + session.run("pytest", *opts) + + +@group.session(python=PYTHON_DEFAULT_VERSION) # type: ignore +def coverage( + session: Session, + coverage_cmd: cmd_annotated( # type: ignore + choices=["erase", "combine", "report", "html", "open"] + ) = (), + coverage_run: RUN_CLI = [], # noqa + coverage_run_internal: run_annotated( # type: ignore + help="Arbitrary commands to run within the session" + ) = [], # noqa + force_reinstall: FORCE_REINSTALL_CLI = False, +): + session_install_envs( + session, + reqs=["coverage[toml]"], + force_reinstall=force_reinstall, + ) + session_run_commands(session, coverage_run) + + if not coverage_cmd and not coverage_run and not coverage_run_internal: + coverage_cmd = ["combine", "report"] + + session.log(f"{coverage_cmd}") + + for cmd in coverage_cmd: + if cmd == "combine": + paths = list(Path(".nox").glob("test*/tmp/.coverage")) + if update_target(".coverage", *paths): + session.run("coverage", "combine", "--keep", "-a", *map(str, paths)) + elif cmd == "open": + _open_webpage(path="htmlcov/index.html") + + else: + session.run("coverage", cmd) + + session_run_commands(session, coverage_run_internal, external=False) + + +@DEFAULT_SESSION +def docs( + session: nox.Session, + docs_cmd: cmd_annotated( # type: ignore + choices=[ + "html", + "build", + "symlink", + "clean", + "livehtml", + "linkcheck", + "spelling", + "showlinks", + "release", + "open", + ], + flags=("--docs-cmd", "-d"), + ) = (), + docs_run: RUN_CLI = [], # noqa + lock: LOCK_CLI = False, + force_reinstall: FORCE_REINSTALL_CLI = False, + version: VERSION_CLI = "", + log_session: bool = False, +): + """Runs make in docs directory. For example, 'nox -s docs -- --docs-cmd html' -> 'make -C docs html'. With 'release' option, you can set the message with 'message=...' in posargs.""" + install_requirements( + session=session, + name="docs", + lock=lock, + set_kernel=True, + install_package=True, + force_reinstall=force_reinstall, + log_session=log_session, + ) + + if version: + session.env["SETUPTOOLS_SCM_PRETEND_VERSION"] = version + + cmd = docs_cmd + run = docs_run + + session_run_commands(session, run) + + if not run and not cmd: + cmd = ["html"] + + if "symlink" in cmd: + cmd.remove("symlink") + _create_doc_examples_symlinks(session) + + if open_page := "open" in cmd: + cmd.remove("open") + + if cmd: + args = ["make", "-C", "docs"] + combine_list_str(cmd) + # if version: + # args.append( f"SETUPTOOLS_SCM_PRETEND_VERSION={version}" ) + session.run(*args, external=True) + + if open_page: + _open_webpage(path="./docs/_build/html/index.html") + + +@DEFAULT_SESSION +def dist_pypi( + session: nox.Session, + dist_pypi_run: RUN_CLI = [], # noqa + dist_pypi_cmd: cmd_annotated( # type: ignore + choices=["clean", "build", "testrelease", "release"], + flags=("--dist-pypi-cmd", "-p"), + ) = (), + lock: LOCK_CLI = False, + force_reinstall: FORCE_REINSTALL_CLI = False, + version: VERSION_CLI = "", + log_session: bool = False, +): + """Run 'nox -s dist_pypi -- {clean, build, testrelease, release}'""" + # conda + + install_requirements( + session=session, + name="dist-pypi", + set_kernel=False, + install_package=False, + force_reinstall=force_reinstall, + log_session=log_session, + ) + + if version: + session.env["SETUPTOOLS_SCM_PRETEND_VERSION"] = version + + run, cmd = dist_pypi_run, dist_pypi_cmd + + session_run_commands(session, run) + + if not run and not cmd: + cmd = ["build"] + + if cmd: + if "build" in cmd: + cmd.append("clean") + cmd = sort_like(cmd, ["clean", "build", "testrelease", "release"]) + + session.log(f"cmd={cmd}") + + for command in cmd: + if command == "clean": + session.run("rm", "-rf", "dist", external=True) + elif command == "build": + session.run("python", "-m", "build", "--outdir", "dist/") + + elif command == "testrelease": + session.run("twine", "upload", "--repository", "testpypi", "dist/*") + + elif command == "release": + session.run("twine", "upload", "dist/*") + + +@DEFAULT_SESSION +def dist_conda( + session: nox.Session, + dist_conda_run: RUN_CLI = [], # noqa + dist_conda_cmd: cmd_annotated( # type: ignore + choices=[ + "recipe", + "build", + "clean", + "clean-recipe", + "clean-build", + "recipe-cat-full", + ], + flags=("--dist-conda-cmd", "-c"), + ) = (), + lock: LOCK_CLI = False, + sdist_path: str = "", + force_reinstall: FORCE_REINSTALL_CLI = False, + log_session: bool = False, + version: VERSION_CLI = "", +): + """Runs make -C dist-conda posargs""" + install_requirements( + session=session, + name="dist-conda", + set_kernel=False, + install_package=False, + force_reinstall=force_reinstall, + log_session=log_session, + ) + + run, cmd = dist_conda_run, dist_conda_cmd + session_run_commands(session, run) + if not run and not cmd: + cmd = ["recipe"] + + if cmd: + if "recipe" in cmd: + cmd.append("clean-recipe") + if "build" in cmd: + cmd.append("clean-build") + if "clean" in cmd: + cmd.extend(["clean-recipe", "clean-build"]) + cmd.remove("clean") + + cmd = sort_like( + cmd, ["recipe-cat-full", "clean-recipe", "recipe", "clean-build", "build"] + ) + + if not sdist_path: + sdist_path = PACKAGE_NAME + if version: + sdist_path = f"{sdist_path}=={version}" + + for command in cmd: + if command == "clean-recipe": + session.run("rm", "-rf", f"dist-conda/{PACKAGE_NAME}", external=True) + elif command == "clean-build": + session.run("rm", "-rf", "dist-conda/build", external=True) + elif command == "recipe": + session.run( + "grayskull", + "pypi", + sdist_path, + "--sections", + "package", + "source", + "build", + "requirements", + "-o", + "dist-conda", + ) + _append_recipe( + f"dist-conda/{PACKAGE_NAME}/meta.yaml", ".recipe-append.yaml" + ) + session.run( + "cat", f"dist-conda/{PACKAGE_NAME}/meta.yaml", external=True + ) + elif command == "recipe-cat-full": + import tempfile + + with tempfile.TemporaryDirectory() as d: + session.run( + "grayskull", + "pypi", + sdist_path, + "-o", + d, + ) + session.run( + "cat", str(Path(d) / PACKAGE_NAME / "meta.yaml"), external=True + ) + + elif command == "build": + session.run( + "conda", + "mambabuild", + "--output-folder=dist-conda/build", + "--no-anaconda-upload", + "dist-conda", + ) + + +def _append_recipe(recipe_path, append_path): + with open(recipe_path) as f: + recipe = f.readlines() + + with open(append_path) as f: + append = f.readlines() + + with open(recipe_path, "w") as f: + f.writelines(recipe + ["\n"] + append) + + +@ALL_SESSION +def typing( + session: nox.Session, + typing_cmd: cmd_annotated( # type: ignore + choices=["mypy", "pyright", "pytype"], + flags=("--typing-cmd", "-m"), + ) = (), + typing_run: RUN_CLI = [], # noqa + typing_run_internal: run_annotated( # type: ignore + help="run arbitrary (internal) commands. For example, --typing-run-internal 'mypy --some-option'", + ) = [], # noqa + lock: LOCK_CLI = False, + force_reinstall: FORCE_REINSTALL_CLI = False, + log_session: bool = False, +): + """Run type checkers (mypy, pyright, pytype)""" + + install_requirements( + session=session, + name="typing", + lock=lock, + set_kernel=False, + install_package=True, + force_reinstall=force_reinstall, + log_session=log_session, + ) + + run, cmd = typing_run, typing_cmd + + session_run_commands(session, run) + + if not run and not typing_run_internal and not cmd: + cmd = ["mypy"] + + for c in cmd: + if c == "mypy": + session.run("mypy", "--color-output") + elif c == "pyright": + session.run("pyright", external=True) + elif c == "pytype": + session.run("pytype") + + session_run_commands(session, typing_run_internal, external=False) + + +@ALL_SESSION +def testdist_conda( + session: Session, + test_no_pytest: bool = False, + test_opts: TEST_OPTS_CLI = (), # type: ignore + testdist_conda_run: RUN_CLI = [], # noqa + force_reinstall: FORCE_REINSTALL_CLI = False, + version: VERSION_CLI = "", + log_session: bool = False, +): + """Test conda distribution""" + + install_str = PACKAGE_NAME + if version: + install_str = f"{install_str}=={version}" + + session_install_envs( + session, + "environment/test-extras.yaml", + deps=[install_str], + channels=["conda-forge"], + force_reinstall=force_reinstall, + install_package=False, + ) + + if log_session: + session_log_session(session, False) + + session_run_commands(session, testdist_conda_run) + + if not test_no_pytest: + opts = combine_list_str(test_opts) + session.run("pytest", *opts) + + +@ALL_SESSION +def testdist_pypi( + session: Session, + test_no_pytest: bool = False, + test_opts: TEST_OPTS_CLI = (), # type: ignore + testdist_pypi_run: RUN_CLI = [], # noqa + testdist_pypi_extras: cmd_annotated(help="extras to install") = (), # type: ignore + force_reinstall: FORCE_REINSTALL_CLI = False, + version: VERSION_CLI = "", + log_session: bool = False, +): + """Test pypi distribution""" + extras = testdist_pypi_extras + install_str = PACKAGE_NAME + + if extras: + install_str = "{}[{}]".format(install_str, ",".join(extras)) + + if version: + install_str = f"{install_str}=={version}" + + session_install_envs( + session, + "environment/test-extras.yaml", + reqs=[install_str], + channels=["conda-forge"], + force_reinstall=force_reinstall, + install_package=False, + ) + + if log_session: + session_log_session(session, False) + + session_run_commands(session, testdist_pypi_run) + if not test_no_pytest: + opts = combine_list_str(test_opts) + session.run("pytest", *opts) + + +@group.session(python=PYTHON_ALL_VERSIONS) # type: ignore +def testdist_pypi_venv( + session: Session, + test_no_pytest: bool = False, + test_opts: TEST_OPTS_CLI = (), # type: ignore + testdist_pypi_run: RUN_CLI = [], # noqa + testdist_pypi_extras: cmd_annotated(help="extras to install") = (), # type: ignore + force_reinstall: FORCE_REINSTALL_CLI = False, + version: VERSION_CLI = "", + log_session: bool = False, +): + """Test pypi distribution""" + extras = testdist_pypi_extras + install_str = PACKAGE_NAME + + if extras: + install_str = "{}[{}]".format(install_str, ",".join(extras)) + + if version: + install_str = f"{install_str}=={version}" + + install_requirements( + session=session, + name="testdist-pypi-venv", + set_kernel=False, + install_package=False, + force_reinstall=force_reinstall, + style="pip", + reqs=["-r", "environment/test-extras.txt", install_str], + ) + + if log_session: + session_log_session(session, False) + + session_run_commands(session, testdist_pypi_run) + if not test_no_pytest: + opts = combine_list_str(test_opts) + session.run("pytest", *opts) + + +# --- Utilities ------------------------------------------------------------------------ +def _create_doc_examples_symlinks(session, clean=True): + """Create symlinks from docs/examples/*.md files to /examples/usage/...""" + + import os + + def usage_paths(path): + with path.open("r") as f: + for line in f: + if line.startswith("usage/"): + yield Path(line.strip()) + + def get_target_path(usage_path, prefix_dir="./examples", exts=(".md", ".ipynb")): + path = Path(prefix_dir) / Path(usage_path) + + if path.exists(): + return path + else: + for ext in exts: + p = path.with_suffix(ext) + if p.exists(): + return p + + raise ValueError(f"no path found for base {path}") + + root = Path("./docs/examples/") + if clean: + import shutil + + shutil.rmtree(root / "usage", ignore_errors=True) + + # get all md files + paths = list(root.glob("*.md")) + + # read usage lines + for path in paths: + for usage_path in usage_paths(path): + target = get_target_path(usage_path) + link = root / usage_path.parent / target.name + + if link.exists(): + link.unlink() + + link.parent.mkdir(parents=True, exist_ok=True) + + target_rel = os.path.relpath(target, start=link.parent) + session.log(f"linking {target_rel} -> {link}") + + os.symlink(target_rel, link) + + +def _open_webpage(path=None, url=None): + import webbrowser + from urllib.request import pathname2url + + if path: + url = "file://" + pathname2url(str(Path(path).absolute())) + if url: + webbrowser.open(url) + + +# @group.session(python=PYTHON_DEFAULT_VERSION) +# def conda_merge( +# session: Session, +# conda_merge_force: bool = False, +# force_reinstall: FORCE_REINSTALL_CLI = False, +# ): +# """Merge environments using conda-merge.""" +# import tempfile +# session_install_envs( +# session, +# reqs=["conda-merge", "ruamel.yaml"], +# force_reinstall=force_reinstall, +# ) + +# env_base = ROOT / "environment.yaml" +# env_dir = ROOT / "environment" + +# def create_env(*extras, others=None, name=None, base=True): +# if name is None: +# name = extras[0] +# env = env_dir / f"{name}.yaml" + +# deps = [] +# if base: +# deps.append(str(env_base)) +# for extra in extras: +# deps.append(str(env_dir / f"{extra}-extras.yaml")) + +# if conda_merge_force or update_target(env, *deps): +# session.log(f"creating {env}") + +# args = ["conda-merge"] + deps +# with tempfile.TemporaryDirectory() as d: +# tmp_path = Path(d) / "tmp_env.yaml" + +# with tmp_path.open("w") as f: +# session.run(*args, stdout=f) + +# run_str = dedent( +# f""" +# from ruamel.yaml import YAML; from pathlib import Path; +# pin, pout = Path("{tmp_path}"), Path("{env}") +# y = YAML(); y.indent(mapping=2, sequence=4, offset=2) +# y.dump(y.load(pin.open("r")), pout.open("w")) +# """ +# ) + +# session.run("python", "-c", run_str, silent=True) + +# for extra in ["test", "docs"]: +# create_env(extra, base=True) + +# create_env("test", "typing", name="typing", base=True) +# create_env("dev", "test", "typing", "nox", name="dev", base=True) diff --git a/pyproject.toml b/pyproject.toml index 4ca5cfe0..adbe32ae 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -23,6 +23,7 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Topic :: Scientific/Engineering", ] dynamic = ["readme", "version"] @@ -32,7 +33,7 @@ dependencies = [ "xarray >= 0.16", "sympy", "scipy", - "cmomy >= 0.4", + "cmomy >= 0.5", "custom-inherit", "attrs", "module-utilities >= 0.2", @@ -44,10 +45,18 @@ documentation = "https://pages.nist.gov/thermoextrap/" [project.optional-dependencies] accel = ["bottleneck"] -parallel = ["dask[complete]"] +parallel = [ + "dask[complete]", # p2c: -s dask + +] viz = ["matplotlib"] -mbar = ["pymbar<4.0"] -gpr = ["tensorflow", "tensorflow-probability", "gpflow"] +mbar = ["pymbar>=4.0"] +gpr = [ + "tensorflow", # p2c: -p + "tensorflow-probability", # p2c: -p + "gpflow < 2.6.0", # p2c: -p + +] openmm = ["openmm"] all = [ "thermoextrap[mbar]", @@ -56,10 +65,81 @@ all = [ "thermoextrap[parallel]", "thermoextrap[viz]", ] -test = ["pytest"] +test = [ + "pytest", # + "pytest-xdist", + "pytest-cov", + "pytest-sugar", + "pandas", + "thermoextrap[mbar]", + "thermoextrap[gpr]", +] +dev-extras = [ + "pytest-accept", # p2c: -p + "setuptools-scm", + "ipython", + "ipykernel", +] +typing-extras = [ + # "pytype; python_version < '3.11'", + "mypy", +] +typing = [ + "thermoextrap[typing-extras]", # + "thermoextrap[test]", +] +nox = [ + "nox", + "noxopt", # p2c: -p + "ruamel.yaml", +] +dev = [ + "thermoextrap[test]", # + "thermoextrap[typing-extras]", + "thermoextrap[dev-extras]", + "thermoextrap[accel]", + "thermoextrap[viz]", +] +tools = [ + "pre-commit", # + "cruft", + "scriv", +] +dev-complete = [ + "thermoextrap[dev]", # + "thermoextrap[tools]", + "thermoextrap[nox]", +] +docs = [ + "setuptools-scm", # + "ipython", + "pyenchant", + "ghp-import", + "sphinx", + "sphinx-copybutton", + "sphinxcontrib-spelling", + "sphinx-autobuild", + "myst-nb", + "sphinx-book-theme", + "autodocsumm", + "thermoextrap[viz]", + "thermoextrap[mbar]", + "thermoextrap[gpr]", +] +# to be parsed with pyproject2conda with --no-base option +dist-pypi = ["twine", "build"] +dist-conda = [ + "anaconda-client", # + "grayskull", + "conda-build", + "conda-verify", + "boa", + "setuptools-scm", +] + +[tool.pyproject2conda] +channels = ["conda-forge"] -# dev = [] -# docs = [] ## grayskull still messes some things up, but use scripts/recipe-append.sh for this [tool.setuptools] zip-safe = true # if using mypy, must be False @@ -72,7 +152,6 @@ where = ["src"] ## include = [] ## exclude = [] -## [tool.setuptools.dynamic] readme = { file = [ "README.md", @@ -87,8 +166,8 @@ fallback_version = "999" test = "pytest" [tool.pytest.ini_options] -addopts = "--verbose" -testpaths = ["tests"] +addopts = "--doctest-modules --doctest-glob='*.md'" +testpaths = ["tests", "README.md"] [tool.isort] profile = "black" @@ -255,7 +334,7 @@ show_error_codes = true warn_unused_ignores = true warn_return_any = true warn_unused_configs = true -exclude = [".eggs", ".tox", "doc", "docs"] +exclude = [".eggs", ".tox", "doc", "docs", ".nox"] check_untyped_defs = true [[tool.mypy.overrides]] diff --git a/scripts/dist-conda.mk b/scripts/dist-conda.mk deleted file mode 100644 index 3dda1bdb..00000000 --- a/scripts/dist-conda.mk +++ /dev/null @@ -1,32 +0,0 @@ - -project_name?=thermoextrap -sdist_path?=$(project_name) - -.PHONY: help clean-recipe clean-build grayskull recipe-append recipe build command - -help: - @echo Makefile for building conda dist -clean-recipe: - rm -rf dist-conda/$(project_name) - -clean-build: - rm -rf build - -# by default, only use a few sections -grayskull_args ?= --maintainers wpk-nist-gov --sections package source build requirements -grayskull: clean-recipe - grayskull pypi $(sdist_path) $(grayskull_args) -o dist-conda - -# append the rest -recipe_base_path ?= dist-conda/$(project_name)/meta.yaml -recipe_append_path ?= .recipe-append.yaml -recipe-append: - bash scripts/recipe-append.sh $(recipe_base_path) $(recipe_append_path) - -recipe: grayskull recipe-append - -build: clean-build - conda mambabuild --output-folder=dist-conda/build --no-anaconda-upload dist-conda - -command: - $(command) diff --git a/scripts/dist-pypi.mk b/scripts/dist-pypi.mk deleted file mode 100644 index b358db33..00000000 --- a/scripts/dist-pypi.mk +++ /dev/null @@ -1,18 +0,0 @@ -.PHONY: help clean build release testrelease command -help: - @echo Makefile for building pypi dist -clean: - -rm -rf dist/* - -build: clean - python -m build --outdir dist/ - -testrelease: - twine upload --repository testpypi dist/* - -release: - twine upload dist/* - -command?= @echo "pass command=..." -command: - $(command) diff --git a/scripts/docs-examples-symlinks.sh b/scripts/docs-examples-symlinks.sh deleted file mode 100644 index ce6b59de..00000000 --- a/scripts/docs-examples-symlinks.sh +++ /dev/null @@ -1,52 +0,0 @@ -# this creates symlinks to filees in /examples/ directory -# you can link to files using (for example) -# ```{eval-rst} -# usage/notebook -# ``` -# the script will creat a link -# /docs/examples/usage/notebook.ipynb -> /examples/usage/notebook.ipynb -# - -exts=(ipynb md) - -rm -rf docs/examples/usage -for path in $(cat docs/examples/*.md | grep '^usage/'); do - - target="examples/"${path} - name=$(basename $target) - - if [ -f $target ]; then - # has extension - - base=${name%.*} - ext=${name##*.} - - else - # no extension. Try to add one - for ext in ${exts[@]}; do - tmp=${target}.${ext} - if [ -f "${tmp}" ] ; then - base=$name - target=$tmp - break - fi - done - fi - - - new_dir=docs/examples/$(dirname $path) - mkdir -p $new_dir - - total_target=$(realpath --relative-to=${new_dir} $target) - - # echo "target $target" - # echo "base $base" - # echo "ext $ext" - echo "target $total_target" - echo "new_dir $new_dir" - echo "" - - - ln -s $total_target $new_dir - -done diff --git a/scripts/lint.mk b/scripts/lint.mk deleted file mode 100644 index 06d3c10d..00000000 --- a/scripts/lint.mk +++ /dev/null @@ -1,19 +0,0 @@ -.PHONY: help mypy pyright pytype all - -help: - @echo Makefile for linting - -mypy: - -mypy --color-output $(mypy_args) - -pyright: - -pyright $(pyright_args) - -pytype: - -pytype $(pytype_args) - -all: mypy pyright pytype - -command?= @echo 'pass command=...' -command: - $(command) diff --git a/scripts/recipe-append.sh b/scripts/recipe-append.sh deleted file mode 100644 index f8b8c7c6..00000000 --- a/scripts/recipe-append.sh +++ /dev/null @@ -1,34 +0,0 @@ -# This is to fix issues using grayskull with pyproject.toml only projects -# We fall back to using grayskull to create the majority of the recipe -# but add in the final sections -# Edit .recipe-append.yaml -if [ $# -lt 2 ]; then - echo "need recipe_base_path, recipe_append_path" - exit 1 -fi - -base_path=$1 -append_path=$2 - - -if [ ! -f $base_path ]; then - echo "no $base_path" - exit 1 -fi - -if [ ! -f $append_path ]; then - echo "no $append_path" - exit -fi - - -tmp_file=$(mktemp) -cp $base_path $tmp_file - -echo "" >> $tmp_file - -cat $append_path >> $tmp_file - -mv $tmp_file $base_path - -cat $base_path diff --git a/scripts/run-prettier.sh b/scripts/run-prettier.sh deleted file mode 100644 index b11d7098..00000000 --- a/scripts/run-prettier.sh +++ /dev/null @@ -1,2 +0,0 @@ -# interface to pre-commit prettier -pre-commit run prettier --files $@ &> /dev/null || true diff --git a/scripts/tox-ipykernel-display-name.sh b/scripts/tox-ipykernel-display-name.sh deleted file mode 100644 index 57350ade..00000000 --- a/scripts/tox-ipykernel-display-name.sh +++ /dev/null @@ -1,22 +0,0 @@ -# This adjusts the display name for ipykernels -# As we (assume) use of nb_conda_kernels, this will -# make kernels findable. - -if [ $# -lt 1 ]; then - echo "Usage: $0 display_name_base" - exit 1 -fi - -base=$1 - -eval "$(conda shell.bash hook)" - -for path in .tox/* ; do - - suffix=$(basename $path) - display_name=${base}-${suffix} - - echo $x $display_name - conda activate $path - conda activate $path && python -m ipykernel install --sys-prefix --display-name "$display_name" -done diff --git a/src/thermoextrap/gpr_active/active_utils.py b/src/thermoextrap/gpr_active/active_utils.py index d004fd76..84230916 100644 --- a/src/thermoextrap/gpr_active/active_utils.py +++ b/src/thermoextrap/gpr_active/active_utils.py @@ -247,16 +247,16 @@ def get_data(self): g_x = 0.0 g_cross = 0.0 for k in range(x.shape[1]): - this_g_x = timeseries.statisticalInefficiency(x[:, k]) - this_g_cross = timeseries.statisticalInefficiency(x[:, k], pot) + this_g_x = timeseries.statistical_inefficiency(x[:, k]) + this_g_cross = timeseries.statistical_inefficiency(x[:, k], pot) if this_g_x > g_x: g_x = this_g_x if this_g_cross > g_cross: g_cross = this_g_cross - g_pot = timeseries.statisticalInefficiency(pot) + g_pot = timeseries.statistical_inefficiency(pot) g_max = np.max([g_x, g_pot, g_cross]) # Get indices of uncorrelated data and subsample everything - uncorr_inds = timeseries.subsampleCorrelatedData(np.arange(x.shape[0]), g_max) + uncorr_inds = timeseries.subsample_correlated_data(np.arange(x.shape[0]), g_max) x = x[uncorr_inds, :] bias = bias[uncorr_inds] pot = pot[uncorr_inds] diff --git a/src/thermoextrap/legacy/old_scripts.py b/src/thermoextrap/legacy/old_scripts.py index 8dce62b9..01752b1e 100644 --- a/src/thermoextrap/legacy/old_scripts.py +++ b/src/thermoextrap/legacy/old_scripts.py @@ -213,7 +213,7 @@ def perturbWithSamples(B, refB, x, U, useMBAR=False): mbarObj = mbar.MBAR(np.array([refB * U]), [U.shape[0]]) outval = np.zeros((len(B), x.shape[1])) for i in range(len(B)): - outval[i, :] = mbarObj.computeMultipleExpectations(x.T, B[i] * U)[0] + outval[i, :] = mbarObj.compute_multiple_expectations(x.T, B[i] * U)["mu"] else: # Compute what goes in the exponent and subtract out the maximum diff --git a/src/thermoextrap/legacy/reweight.py b/src/thermoextrap/legacy/reweight.py index 45f44b1c..e99e1988 100644 --- a/src/thermoextrap/legacy/reweight.py +++ b/src/thermoextrap/legacy/reweight.py @@ -83,9 +83,9 @@ def predict(self, B, params=None, refB=None, useMBAR=False): mbarObj = mbar.MBAR(np.array([refB * U]), [U.shape[0]]) predictVals = np.zeros((len(B), x.shape[1])) for i in range(len(B)): - predictVals[i, :] = mbarObj.computeMultipleExpectations(x.T, B[i] * U)[ - 0 - ] + predictVals[i, :] = mbarObj.compute_multiple_expectations( + x.T, B[i] * U + )["mu"] else: # Compute what goes in the exponent and subtract out the maximum @@ -218,6 +218,8 @@ def predict(self, B, order=None, params=None, refB=None): x = np.reshape(self.x, (self.x.shape[0] * self.x.shape[1], self.x.shape[2])) for i in range(len(B)): - predictVals[i, :] = params.computeMultipleExpectations(x.T, B[i] * allU)[0] + predictVals[i, :] = params.compute_multiple_expectations(x.T, B[i] * allU)[ + "mu" + ] return predictVals diff --git a/src/thermoextrap/models.py b/src/thermoextrap/models.py index df30bfe8..24701719 100644 --- a/src/thermoextrap/models.py +++ b/src/thermoextrap/models.py @@ -1089,7 +1089,7 @@ def predict(self, alpha, alpha_name=None): out = [] for b in alpha.values: - out.append(mbar_obj.computeMultipleExpectations(x_flat.T, b * U)[0]) + out.append(mbar_obj.compute_multiple_expectations(x_flat.T, b * U)["mu"]) out = np.array(out) # reshape diff --git a/tools/noxtools.py b/tools/noxtools.py new file mode 100644 index 00000000..40d2d5e4 --- /dev/null +++ b/tools/noxtools.py @@ -0,0 +1,785 @@ +"""Utilities to work with nox""" + +from __future__ import annotations + +import shlex +import tempfile +from pathlib import Path +from typing import TYPE_CHECKING, Any, Iterable, Literal, cast + +from ruamel.yaml import safe_load + +if TYPE_CHECKING: + from collections.abc import Collection, Sequence + + import nox + + +# --- Basic utilities ------------------------------------------------------------------- +def combine_list_str(opts: list[str]) -> list[str]: + if opts: + return shlex.split(" ".join(opts)) + else: + return [] + + +def combine_list_list_str(opts: list[list[str]]) -> Iterable[list[str]]: + return (combine_list_str(opt) for opt in opts) + + +def sort_like(values: Collection[Any], like: Sequence[Any]) -> list[Any]: + """Sort `values` in order of `like`.""" + # only unique + sorter = {k: i for i, k in enumerate(like)} + return sorted(set(values), key=lambda k: sorter[k]) + + +def update_target(target: str | Path, *deps: str | Path) -> bool: + """Check if target is older than deps:""" + + target_path = Path(target) + + deps_path = tuple(map(Path, deps)) + + for d in deps_path: + if not d.exists(): + raise ValueError(f"dependency {d} does not exist") + + if not target_path.exists(): + update = True + + else: + target_time = target_path.stat().st_mtime + update = any(target_time < dep.stat().st_mtime for dep in deps_path) + + return update + + +def prepend_flag(flag: str, *args: str) -> list[str]: + """ + Add in a flag before each arg. + + >>> prepent_flag("-k", "a", "b") + ["-k", "a", "-k", "b"] + + """ + + if len(args) == 1 and not isinstance(args[0], str): + args = args[0] + + return sum([[flag, _] for _ in args], []) + + +# --- Nox session utilities ------------------------------------------------------------ +def session_skip_install(session: nox.Session) -> bool: + """ + Utility to check if we're skipping install and reusing existing venv + This is a hack and may need to change if upstream changes. + """ + return session._runner.global_config.no_install and session._runner.venv._reused # type: ignore + + +def session_run_commands( + session: nox.Session, commands: list[list[str]], external: bool = True, **kws: Any +) -> None: + """Run commands command""" + + if commands: + kws.update(external=external) + for opt in combine_list_list_str(commands): + session.run(*opt, **kws) + + +def session_set_ipykernel_display_name( + session: nox.Session, display_name: str | None, check_skip_install: bool = True +) -> None: + """Rename ipython kernel display name.""" + if not display_name or (check_skip_install and session_skip_install(session)): + return + else: + command = f"python -m ipykernel install --sys-prefix --display-name {display_name}".split() + # continue if fails + session.run(*command, success_codes=[0, 1]) + + +def session_install_package( + session: nox.Session, + package: str = ".", + develop: bool = True, + no_deps: bool = True, + *args: str, + **kwargs: Any, +) -> None: + """Install package into session.""" + + if session_skip_install(session): + return + + if develop: + command = ["-e"] + else: + command = [] + + command.append(package) + + if no_deps: + command.append("--no-deps") + + session.install(*command, *args, **kwargs) + + +# --- Create env from lock ------------------------------------------------------------- + + +def session_install_envs_lock( + session: nox.Session, + lockfile: str | Path, + extras: str | list[str] | None = None, + display_name: str | None = None, + force_reinstall: bool = False, + install_package: bool = False, +) -> bool: + """Install depedencies using conda-lock""" + + if session_skip_install(session): + return True + + unchanged, hashes = env_unchanged( + session, lockfile, prefix="lock", other=dict(install_package=install_package) + ) + if unchanged and not force_reinstall: + return unchanged + + if extras: + if isinstance(extras, str): + extras = extras.split(",") + extras = cast(list[str], sum([["--extras", _] for _ in extras], [])) + else: + extras = [] + + session.run( + "conda-lock", + "install", + "--mamba", + *extras, + "-p", + str(session.virtualenv.location), + str(lockfile), + silent=True, + external=True, + ) + + if install_package: + session_install_package(session) + + session_set_ipykernel_display_name(session, display_name) + + write_hashfile(hashes, session=session, prefix="lock") + + return unchanged + + +# --- create env from yaml ------------------------------------------------------------- + + +def parse_envs( + *paths: str | Path, + remove_python: bool = True, + deps: Collection[str] | None = None, + reqs: Collection[str] | None = None, + channels: Collection[str] | None = None, +) -> tuple[set[str], set[str], set[str], str | None]: + """Parse an `environment.yaml` file.""" + import re + + def _default(x) -> set[str]: + if x is None: + return set() + elif isinstance(x, str): + x = [x] + return set(x) + + channels = _default(channels) + deps = _default(deps) + reqs = _default(reqs) + name = None + + python_match = re.compile(r"\s*(python)\s*[~<=>].*") + + def _get_context(path): + if hasattr(path, "readline"): + from contextlib import nullcontext + + return nullcontext(path) + else: + return Path(path).open("r") + + for path in paths: + with _get_context(path) as f: + data = safe_load(f) + + channels.update(data.get("channels", [])) + name = data.get("name", name) + + # check dependencies for pip + for d in data.get("dependencies", []): + if isinstance(d, dict): + reqs.update(cast(list[str], d.get("pip"))) + else: + if remove_python and not python_match.match(d): + deps.add(d) + + return channels, deps, reqs, name + + +def session_install_envs( + session: nox.Session, + *paths: str | Path, + remove_python: bool = True, + deps: Collection[str] | None = None, + reqs: Collection[str] | None = None, + channels: Collection[str] | None = None, + conda_install_kws: dict[str, Any] | None = None, + install_kws: dict[str, Any] | None = None, + display_name: str | None = None, + force_reinstall: bool = False, + install_package: bool = False, +) -> bool: + """Parse and install everything. Pass an already merged yaml file.""" + + if session_skip_install(session): + return True + + channels, deps, reqs, name = parse_envs( + *paths, + remove_python=remove_python, + deps=deps, + reqs=reqs, + channels=channels, + ) + + unchanged, hashes = env_unchanged( + session, + prefix="env", + other=dict( + deps=deps, + reqs=reqs, + channels=channels, + install_package=install_package, + ), + ) + if unchanged and not force_reinstall: + return unchanged + + if not channels: + channels = "" + if deps: + conda_install_kws = conda_install_kws or {} + conda_install_kws.update(channel=channels) + session.conda_install(*deps, **(conda_install_kws or {})) + + if reqs: + session.install(*reqs, **(install_kws or {})) + + if install_package: + session_install_package(session) + + session_set_ipykernel_display_name(session, display_name) + + write_hashfile(hashes, session=session, prefix="env") + + return unchanged + + +def session_install_pip( + session: nox.Session, + requirement_paths: Collection[str] | None = None, + constraint_paths: Collection[str] | None = None, + extras: str | Collection[str] | None = None, + reqs: Collection[str] | None = None, + display_name: str | None = None, + force_reinstall: bool = False, + install_package: bool = False, + no_deps: bool = False, +): + if session_skip_install(session): + return True + + if extras: + install_package = True + if not isinstance(extras, str): + extras = ",".join(extras) + install_package_args = ["-e", f".[{extras}]"] + elif install_package: + install_package_args = ["-e", "."] + + if install_package and no_deps: + install_package_args.append("--no-deps") + + requirement_paths = requirement_paths or () + constraint_paths = constraint_paths or () + reqs = reqs or () + paths = requirement_paths + constraint_paths + + unchanged, hashes = env_unchanged( + session, + *paths, + prefix="pip", + other=dict( + reqs=reqs, extras=extras, install_package=install_package, no_deps=no_deps + ), + ) + + if unchanged and not force_reinstall: + return unchanged + + install_args = ( + prepend_flag("-r", *requirement_paths) + + prepend_flag("-c", *constraint_paths) + + list(reqs) + ) + + if install_args: + session.install(*install_args) + + if install_package: + session.install(*install_package_args) + + session_set_ipykernel_display_name(session, display_name) + + write_hashfile(hashes, session=session, prefix="pip") + + +def session_install_envs_merge( + session, + *paths, + remove_python=True, + deps=None, + reqs=None, + channels=None, + conda_install_kws=None, + install_kws=None, + display_name=None, + force_reinstall=False, +) -> bool: + """Merge files (using conda-merge) and then create env""" + + if session_skip_install(session): + return True + + unchanged, hashes = env_unchanged( + session, *paths, prefix="env", other=dict(deps=deps, reqs=reqs) + ) + if unchanged and not force_reinstall: + return unchanged + + # first create a temporary file for the environment + with tempfile.TemporaryDirectory() as d: + yaml = Path(d) / "tmp_env.yaml" + with yaml.open("w") as f: + session.run("conda-merge", *paths, stdout=f, external=True) + session.run("cat", str(yaml), external=True, silent=True) + + channels, deps, reqs, _ = parse_envs( + yaml, remove_python=remove_python, deps=deps, reqs=reqs, channels=channels + ) + + if deps: + if conda_install_kws is None: + conda_install_kws = {} + conda_install_kws.update(channel=channels) + session.conda_install(*deps, **conda_install_kws) + + if reqs: + if install_kws is None: + install_kws = {} + session.install(*reqs, **install_kws) + + session_set_ipykernel_display_name(session, display_name) + + write_hashfile(hashes, session=session, prefix="env") + + return unchanged + + +# --- Hash environment ----------------------------------------------------------------- + +PREFIX_HASH_EXTS = Literal["env", "lock", "pip"] + + +def env_unchanged( + session: nox.Session, + *paths: str | Path, + prefix: PREFIX_HASH_EXTS, + verbose: bool = True, + hashes: dict[str, str] | None = None, + other: dict[str, Any] | None = None, +) -> tuple[bool, dict[str, str]]: + hashfile = hashfile_path(session, prefix) + + if hashes is None: + hashes = get_hashes(*paths, other=other) + + if hashfile.exists(): + if verbose: + session.log(f"hash file {hashfile} exists") + unchanged = hashes == read_hashfile(hashfile) + else: + unchanged = False + + if unchanged: + session.log(f"session {session.name} unchanged") + else: + session.log(f"session {session.name} changed") + + return unchanged, hashes + + +def get_hashes( + *paths: str | Path, + other: dict[str, Any] | None = None, +) -> dict[str, str]: + """Get md5 hashes for paths""" + + out = {"path": {str(path): _get_file_hash(path) for path in paths}} + + if other: + import hashlib + + other_hashes = {} + for k, v in other.items(): + if not isinstance(v, str): + try: + v = str(sorted(v)) + except Exception: + v = str(v) + other_hashes[k] = hashlib.md5(v.encode("utf-8")).hexdigest() + + out["other"] = other_hashes + + return out + + +def hashfile_path(session: nox.Session, prefix: PREFIX_HASH_EXTS) -> Path: + """Path for hashfile for this session""" + return Path(session.create_tmp()) / f"{prefix}.json" + + +def write_hashfile( + hashes: dict[str, str], + session: nox.Session, + prefix: PREFIX_HASH_EXTS, +) -> None: + import json + + path = hashfile_path(session, prefix) + + with open(path, "w") as f: + json.dump(hashes, f) + + +def read_hashfile( + path: str | Path, +) -> dict[str, str]: + import json + + with open(path) as f: + data = json.load(f) + return cast(dict[str, str], data) + + +def _get_file_hash(path: str | Path, buff_size=65536) -> str: + import hashlib + + md5 = hashlib.md5() + with open(path, "rb") as f: + while True: + data = f.read(buff_size) + if not data: + break + md5.update(data) + return md5.hexdigest() + + +# from contextlib import contextmanager +# @contextmanager +# def check_hashed_env( +# session: nox.Session, +# *paths: str | Path, +# prefix: Literal["env", "lock"], +# verbose: bool=True, +# recreate_session=False, +# ): + +# changed, hashes = env_hashes_changed(session, *paths, prefix, verbose=verbose, return_hashes=True) + + +# if changed and recreate_session: +# if verbose: +# session.log("env changed. Recreating {session.virtualenv.location_name}") +# _reuse_original = session.virtualenv.reuse_existing +# _no_install_original = session._runner.global_config.no_install + +# session.virtualenv.reuse_existing = False +# session._runner.global_config.no_install = False + +# session.virtualenv.create() +# env_hashes_changed(session, *paths, prefix, verbose=verbose, hashes=hashes) + +# session.virtualenv.reuse_existing = _reuse_original + + +# def _remove_python_from_yaml(path): +# from yaml import safe_dump + +# path = Path(path) + +# with path.open("r") as f: +# data = safe_load(f) + +# from copy import deepcopy + +# out = deepcopy(data) + +# for dep in list(out["dependencies"]): +# if isinstance(dep, str) and dep[: len("python")] == "python": +# out["dependencies"].remove(dep) + +# path_out = path.with_suffix(".final.yaml") + +# with path_out.open("w") as f: +# safe_dump(out, f) + +# return path_out + + +# def session_install_envs_update( +# session: nox.Session, +# conda_backend: str, +# *paths: str | Path, +# remove_python: bool = True, +# deps: Sequence[str] | None = None, +# reqs: Sequence[str] | None = None, +# conda_install_kws: Mapping[str, str] | None = None, +# install_kws: Mapping[str, str] | None = None, +# display_name: str | None = None, +# ) -> None: +# """Install multiple 'environment.yaml' files.""" + +# if session_skip_install(session): +# return + +# from shutil import which + +# if not which("conda-merge"): +# session.conda_install("conda-merge") + +# # pin the python version + +# with tempfile.TemporaryDirectory() as d: +# yaml = Path(d) / "tmp_env.yaml" +# with yaml.open("w") as f: +# session.run("conda-merge", *paths, stdout=f, external=True) + +# if remove_python: +# yaml = _remove_python_from_yaml(yaml) + +# session.run("cat", str(yaml), external=True, silent=False) + +# session.run( +# conda_backend, +# "env", +# "update", +# "--prefix", +# session.virtualenv.location, +# "--file", +# str(yaml), +# silent=True, +# external=True, +# ) + +# session_set_ipykernel_display_name(session, display_name) + + +# def pin_python_version(session: nox.Session): +# path = Path(session.virtualenv.location) / "conda-meta" / "pinned" + +# with path.open("w") as f: +# session.run( +# "python", +# "-c", +# """import sys; print("python=={v.major}.{v.minor}.{v.micro}".format(v=sys.version_info))""", +# stdout=f, +# ) + +# def session_install_envs_update_pin( +# session: nox.Session, +# conda_backend: str, +# *paths: str | Path, +# display_name: str | None = None, +# **kws, +# ) -> None: +# """Install multiple 'environment.yaml' files.""" + +# if session_skip_install(session): +# return + +# from shutil import which + +# if not which("conda-merge"): +# session.conda_install("conda-merge") + +# # pin the python version +# pin_python_version(session) + +# with tempfile.TemporaryDirectory() as d: +# yaml = Path(d) / "tmp_env.yaml" +# with yaml.open("w") as f: +# session.run("conda-merge", *paths, stdout=f, external=True) + +# session.run("cat", str(yaml), external=True, silent=False) + +# session.run( +# conda_backend, +# "env", +# "update", +# "--prefix", +# session.virtualenv.location, +# "--file", +# str(yaml), +# silent=True, +# external=True, +# **kws, +# ) + +# session_set_ipykernel_display_name(session, display_name) + + +# def parse_args_for_flag(args, flag, action="value"): +# """ +# Parse args for flag and pop it off args + +# Parameters +# ---------- +# args : iterable +# For example, session.posargs. +# flag : string +# For example, `flag='--run-external' +# action : {'value', 'values', 'store_true', 'store_false'} + +# If flag can take multiple values, they should be separated by commas + +# If multiples, return a tuple, else return a string. +# """ +# flag = flag.strip() +# n = len(flag) + +# def process_value(arg): +# if action == "store_true": +# value = True +# elif action == "store_false": +# value = False +# else: +# s = arg.split("=") +# if len(s) != 2: +# raise ValueError(f"must supply {flag}=value") +# if action == "value": +# value = s[-1].strip() +# else: +# value = tuple(_.strip() for _ in s[-1].split(",")) + +# return value + +# def check_for_flag(arg): +# s = arg.strip() +# if action.startswith("value"): +# return s[:n] == f"{flag}" +# else: +# return s == flag + +# # initial value +# if action == "store_true": +# value = False +# elif action == "store_false": +# value = True +# elif action in ["value", "values"]: +# value = None +# else: +# raise ValueError( +# f"action {action} must be one of [store_true, store_false, value, values]" +# ) + +# out = [] +# for arg in args: +# if check_for_flag(arg): +# value = process_value(arg) +# else: +# out.append(arg) + +# return value, out + + +# def parse_args_run_external(args): +# """Parse (and pop) for --run-external flag""" +# return parse_args_for_flag(args, flag="--run-external", action="store_true") + + +# def parse_args_test_version(args): +# """Parse for flag --test-version=...""" +# return parse_args_for_flag(args, flag="--test-version", action="value") + + +# def parse_args_pip_extras(args, default=None, join=True): +# """Parse for flag '--pip-extras=...""" +# extras, args = parse_args_for_flag(args, flag="--pip-extras", action="values") + +# if extras: +# extras = set(extras) + +# if default: +# if extras is None: +# extras = set() +# if isinstance(default, str): +# default = (default,) +# for d in default: +# extras.update(d.split(",")) + +# if extras and join: +# extras = ",".join(extras) + +# return extras, args + + +# def check_args_with_default(args, default=None): +# """If no args and have a default, place it in args.""" +# if not args and default: +# if isinstance(default, str): +# default = default.split() +# args = default +# return args + + +# def run_with_external_check( +# session, args=None, default=None, check_run_external=True, **kws +# ): +# """ +# Use session.run with session.posargs. +# Perform `seesion.run(*args)`, where `args` comes from posargs. +# If no posargs, then use default. +# Also, check for flag '--run-external'. If present, +# call `session.run(*args, external=True)` +# """ + +# if args is None: +# args = session.posargs + +# if check_run_external: +# external, args = parse_args_run_external(args) +# else: +# external = False + +# args = check_args_with_default(args, default=default) + +# # session.log(f"args {args}") +# # session.log(f"external {external}") +# # session.run(*args, external=external, **kws) diff --git a/tox.ini b/tox.ini deleted file mode 100644 index d7dab231..00000000 --- a/tox.ini +++ /dev/null @@ -1,149 +0,0 @@ -[tox] -isolated_build = True -requires = tox-conda -envlist = - # test - test-py3{8, 9, 10} - -[base] -package_name = thermoextrap -import_name = thermoextrap -build_python = python3.10 -conda_env = {toxinidir}/environment.yaml -conda_env_dev = {toxinidir}/environment/dev.yaml -conda_env_test = {toxinidir}/environment/test.yaml -conda_env_docs = {toxinidir}/environment/docs.yaml -conda_env_dist_pypi = {toxinidir}/environment/dist-pypi.yaml -conda_env_dist_conda = {toxinidir}/environment/dist-conda.yaml -conda_spec = {toxinidir}/environment/conda-spec.txt -conda_env_lint = {toxinidir}/environment/lint.yaml -conda_channels = - wpk-nist - conda-forge -conda_deps_test = -allowlist_externals = - bash - make -commands_test_check = - python --version - python -c 'import {[base]import_name}; print( {[base]import_name}.__version__)' - bash -ec 'echo $PWD' - -[testenv] -passenv = - SETUPTOOLS_SCM_PRETEND_VERSION - TEST_VERSION - # general command - command - # linting - mypy_args - pyright_args - pytype_args - release_args - # dist-conda stuff - project_name - sdist_path - grayskull_args - recipe_base_path - recipe_append_path -usedevelop = - test: True -conda_env = - test: {[base]conda_env_test} -allowlist_externals = - {[base]allowlist_externals} -commands = - {[base]commands_test_check} - {posargs:pytest} - -[testenv:dev] -description = - Create development environment. -usedevelop = True -basepython = {[base]build_python} -conda_env = {[base]conda_env_dev} -envdir = {toxworkdir}/dev -commands = - {posargs:bash -ec 'conda list'} - -[testenv:docs] -description = - Runs make in docs directory. - For example, 'tox -e docs -- html' -> 'make -C docs html'. - With 'release' option, you can set the message with 'message=...' in posargs. -usedevelop = True -envdir = {toxworkdir}/docs -basepython = {[base]build_python} -conda_env = {[base]conda_env_docs} -conda_spec = {[base]conda_spec} -changedir = {toxinidir}/docs -commands = - make {posargs:html} - -[testenv:dist-pypi] -description = - Runs make -f scrips/dist-pypi.mk posargs - For example, 'tox -e dist-pypi -- build' -> 'make -f scripts/dist-pypi.mk build' -skip_install = True -envdir = {toxworkdir}/dist-pypi -basepython = {[base]build_python} -conda_env = {[base]conda_env_dist_pypi} -changedir = {toxinidir} -commands = - make -f {toxinidir}/scripts/dist-pypi.mk {posargs:build} - -[testenv:dist-conda] -description = - Runs make -C dist-conda posargs - recipe: build conda recipe using grayskull (can optionally pass a local sdist) - build: build conda distribution - command: run arbitrary command -skip_install = True -envdir = {toxworkdir}/dist-conda -basepython = {[base]build_python} -conda_env = {[base]conda_env_dist_conda} -changedir = {toxinidir} -commands = - make -f {toxinidir}/scripts/dist-conda.mk {posargs} project_name={env:project_name:{[base]package_name}} - -[testenv:testdist-{pypi, conda}-{local,remote}-py3{8, 9, 10, 11}] -conda_channels = - {[base]conda_channels} -description = - Test install from - pypi: pypi - conda: conda - using either - local: local - remote: remote - versions. -skip_install = True -conda_env = {toxinidir}/environment/test-extras.yaml -conda_deps = - conda-remote: {[base]package_name}{env:TEST_VERSION:''} - conda-local: {posargs} - conda: pymbar<4.0 -deps = - pypi-remote: {[base]package_name}[mbar,gpr]{env:TEST_VERSION:''} - pypi-local: {posargs}[mbar,gpr] - conda: tensorflow - conda: tensorflow-probability - conda: gpflow - -[testenv:testpip-py3{8, 9, 10, 11}] -description = - Test package against pip installed packages -usedevelop = True -extras = test -conda_env = {toxinidir}/environment/test-extras.yaml - -[testenv:lint] -description = - Run linters - For example, 'tox -e lint -- mypy mypy_args=...' runs 'mypy $mypy_args' -conda_env = {[base]conda_env_lint} -usedevelop = True -envdir = {toxworkdir}/lint -basepython = {[base]build_python} -commands = - make -f {toxinidir}/scripts/lint.mk {posargs:mypy}