diff --git a/.lib/git-fleximod/.github/workflows/pre-commit b/.lib/git-fleximod/.github/workflows/pre-commit new file mode 100644 index 0000000000..1a6ad0082a --- /dev/null +++ b/.lib/git-fleximod/.github/workflows/pre-commit @@ -0,0 +1,13 @@ +name: pre-commit +on: + pull_request: + push: + branches: [main] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + - uses: pre-commit/action@v3.0.0 diff --git a/.lib/git-fleximod/.github/workflows/pytest.yaml b/.lib/git-fleximod/.github/workflows/pytest.yaml new file mode 100644 index 0000000000..6cb8102b94 --- /dev/null +++ b/.lib/git-fleximod/.github/workflows/pytest.yaml @@ -0,0 +1,80 @@ +# Run this job on pushes to `main`, and for pull requests. If you don't specify +# `branches: [main], then this actions runs _twice_ on pull requests, which is +# annoying. + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + test: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + # If you wanted to use multiple Python versions, you'd have specify a matrix in the job and + # reference the matrixe python version here. + - uses: actions/setup-python@v5 + with: + python-version: '3.12' + + # Cache the installation of Poetry itself, e.g. the next step. This prevents the workflow + # from installing Poetry every time, which can be slow. Note the use of the Poetry version + # number in the cache key, and the "-0" suffix: this allows you to invalidate the cache + # manually if/when you want to upgrade Poetry, or if something goes wrong. This could be + # mildly cleaner by using an environment variable, but I don't really care. + - name: cache poetry install + uses: actions/cache@v4 + with: + path: ~/.local + key: poetry-1.8.2 + + # Install Poetry. You could do this manually, or there are several actions that do this. + # `snok/install-poetry` seems to be minimal yet complete, and really just calls out to + # Poetry's default install script, which feels correct. I pin the Poetry version here + # because Poetry does occasionally change APIs between versions and I don't want my + # actions to break if it does. + # + # The key configuration value here is `virtualenvs-in-project: true`: this creates the + # venv as a `.venv` in your testing directory, which allows the next step to easily + # cache it. + - uses: snok/install-poetry@v1 + with: + version: 1.8.2 + virtualenvs-create: true + virtualenvs-in-project: true + + # Cache your dependencies (i.e. all the stuff in your `pyproject.toml`). Note the cache + # key: if you're using multiple Python versions, or multiple OSes, you'd need to include + # them in the cache key. I'm not, so it can be simple and just depend on the poetry.lock. + - name: cache deps + id: cache-deps + uses: actions/cache@v4 + with: + path: .venv + key: pydeps-${{ hashFiles('**/poetry.lock') }} + + # Install dependencies. `--no-root` means "install all dependencies but not the project + # itself", which is what you want to avoid caching _your_ code. The `if` statement + # ensures this only runs on a cache miss. + - run: poetry install --no-interaction --no-root + if: steps.cache-deps.outputs.cache-hit != 'true' + + # Now install _your_ project. This isn't necessary for many types of projects -- particularly + # things like Django apps don't need this. But it's a good idea since it fully-exercises the + # pyproject.toml and makes that if you add things like console-scripts at some point that + # they'll be installed and working. + - run: poetry install --no-interaction + + # And finally run tests. I'm using pytest and all my pytest config is in my `pyproject.toml` + # so this line is super-simple. But it could be as complex as you need. + - run: | + git config --global user.name "${GITHUB_ACTOR}" + git config --global user.email "${GITHUB_ACTOR_ID}+${GITHUB_ACTOR}@users.noreply.github.com" + poetry run pytest + - name: Setup tmate session + if: ${{ failure() }} + uses: mxschmitt/action-tmate@v3 + diff --git a/.lib/git-fleximod/.pre-commit-config.yaml b/.lib/git-fleximod/.pre-commit-config.yaml new file mode 100644 index 0000000000..2f6089da72 --- /dev/null +++ b/.lib/git-fleximod/.pre-commit-config.yaml @@ -0,0 +1,18 @@ +exclude: ^utils/.*$ + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/pylint + rev: v2.11.1 + hooks: + - id: pylint + args: + - --disable=I,C,R,logging-not-lazy,wildcard-import,unused-wildcard-import,fixme,broad-except,bare-except,eval-used,exec-used,global-statement,logging-format-interpolation,no-name-in-module,arguments-renamed,unspecified-encoding,protected-access,import-error,no-member diff --git a/.lib/git-fleximod/CODE_OF_CONDUCT.md b/.lib/git-fleximod/CODE_OF_CONDUCT.md new file mode 100644 index 0000000000..84f2925bba --- /dev/null +++ b/.lib/git-fleximod/CODE_OF_CONDUCT.md @@ -0,0 +1,107 @@ +# Contributor Code of Conduct +_The Contributor Code of Conduct is for participants in our software projects and community._ + +## Our Pledge +We, as contributors, creators, stewards, and maintainers (participants), of **git-fleximod** pledge to make participation in +our software, system or hardware project and community a safe, productive, welcoming and inclusive experience for everyone. +All participants are required to abide by this Code of Conduct. +This includes respectful treatment of everyone regardless of age, body size, disability, ethnicity, gender identity or expression, +level of experience, nationality, political affiliation, veteran status, pregnancy, genetic information, physical appearance, race, +religion, or sexual orientation, as well as any other characteristic protected under applicable US federal or state law. + +## Our Standards +Examples of behaviors that contribute to a positive environment include: + +* All participants are treated with respect and consideration, valuing a diversity of views and opinions +* Be considerate, respectful, and collaborative +* Communicate openly with respect for others, critiquing ideas rather than individuals and gracefully accepting criticism +* Acknowledging the contributions of others +* Avoid personal attacks directed toward other participants +* Be mindful of your surroundings and of your fellow participants +* Alert UCAR staff and suppliers/vendors if you notice a dangerous situation or someone in distress +* Respect the rules and policies of the project and venue + +Examples of unacceptable behavior include, but are not limited to: + +* Harassment, intimidation, or discrimination in any form +* Physical, verbal, or written abuse by anyone to anyone, including repeated use of pronouns other than those requested +* Unwelcome sexual attention or advances +* Personal attacks directed at other guests, members, participants, etc. +* Publishing others' private information, such as a physical or electronic address, without explicit permission +* Alarming, intimidating, threatening, or hostile comments or conduct +* Inappropriate use of nudity and/or sexual images +* Threatening or stalking anyone, including a participant +* Other conduct which could reasonably be considered inappropriate in a professional setting + +## Scope +This Code of Conduct applies to all spaces managed by the Project whether they be physical, online or face-to-face. +This includes project code, code repository, associated web pages, documentation, mailing lists, project websites and wiki pages, +issue tracker, meetings, telecons, events, project social media accounts, and any other forums created by the project team which the +community uses for communication. +In addition, violations of this Code of Conduct outside these spaces may affect a person's ability to participate within them. +Representation of a project may be further defined and clarified by project maintainers. + +## Community Responsibilities +Everyone in the community is empowered to respond to people who are showing unacceptable behavior. +They can talk to them privately or publicly. +Anyone requested to stop unacceptable behavior is expected to comply immediately. +If the behavior continues concerns may be brought to the project administrators or to any other party listed in the +[Reporting](#reporting) section below. + +## Project Administrator Responsibilities +Project administrators are responsible for clarifying the standards of acceptable behavior and are encouraged to model appropriate +behavior and provide support when people in the community point out inappropriate behavior. +Project administrator(s) are normally the ones that would be tasked to carry out the actions in the [Consequences](#consequences) +section below. + +Project administrators are also expected to keep this Code of Conduct updated with the main one housed at UCAR, as listed below in +the [Attribution](#attribution) section. + +## Reporting +Instances of unacceptable behavior can be brought to the attention of the project administrator(s) who may take any action as +outlined in the [Consequences](#consequences) section below. +However, making a report to a project administrator is not considered an 'official report' to UCAR. + +Instances of unacceptable behavior may also be reported directly to UCAR pursuant to [UCAR's Harassment Reporting and Complaint +Procedure](https://www2.fin.ucar.edu/procedures/hr/harassment-reporting-and-complaint-procedure), or anonymously through [UCAR's +EthicsPoint Hotline](https://www2.fin.ucar.edu/ethics/anonymous-reporting). + +Complaints received by UCAR will be handled pursuant to the procedures outlined in UCAR's Harassment Reporting and Complaint +Procedure. +Complaints to UCAR will be held as confidential as practicable under the circumstances, and retaliation against a person who +initiates a complaint or an inquiry about inappropriate behavior will not be tolerated. + +Any Contributor can use these reporting methods even if they are not directly affiliated with UCAR. +The Frequently Asked Questions (FAQ) page for reporting is [here](https://www2.fin.ucar.edu/procedures/hr/reporting-faqs). + +## Consequences +Upon receipt of a complaint, the project administrator(s) may take any action deemed necessary and appropriate under the +circumstances. +Such action can include things such as: removing, editing, or rejecting comments, commits, code, wiki edits, email, issues, and +other contributions that are not aligned to this Code of Conduct, or banning temporarily or permanently any contributor for other +behaviors that are deemed inappropriate, threatening, offensive, or harmful. +Project administrators also have the right to report violations to UCAR HR and/or UCAR's Office of Diversity, Equity and Inclusion +(ODEI), as well as a participant's home institution and/or law enforcement. +In the event an incident is reported to UCAR, UCAR will follow its Harassment Reporting and Complaint Procedure. + +## Process for Changes +All UCAR managed projects are required to adopt this Contributor Code of Conduct. +Adoption is assumed even if not expressly stated in the repository. +Projects should fill in sections where prompted with project-specific information, including, project name and adoption date. + +Projects that adopt this Code of Conduct need to stay up to date with UCAR's Contributor Code of Conduct, linked with a DOI in the +[Attribution](#attribution) section below. +Projects can make limited substantive changes to the Code of Conduct, however, the changes must be limited in scope and may not +contradict the UCAR Contributor Code of Conduct. + +## Attribution +This Code of Conduct was originally adapted from the [Contributor Covenant](http://contributor-covenant.org/version/1/4), version +1.4. +We then aligned it with the UCAR Participant Code of Conduct, which also borrows from the American Geophysical Union (AGU) Code of +Conduct. +The UCAR Participant Code of Conduct applies to both UCAR employees as well as participants in activities run by UCAR. +The original version of this for all software projects that have strong management from UCAR or UCAR staff is available on the UCAR +website at https://doi.org/10.5065/6w2c-a132. +The date that it was adopted by this project was **Feb/13/2018**. +When responding to complaints, UCAR HR and ODEI will do so based on the latest published version. +Therefore, any project-specific changes should follow the [Process for Changes](#process-for-changes) section above. diff --git a/.lib/git-fleximod/License b/.lib/git-fleximod/License new file mode 100644 index 0000000000..88bc22515e --- /dev/null +++ b/.lib/git-fleximod/License @@ -0,0 +1,20 @@ +Copyright 2024 NSF National Center for Atmospheric Sciences (NCAR) + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +“Software”), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/.lib/git-fleximod/README.md b/.lib/git-fleximod/README.md new file mode 100644 index 0000000000..53917da400 --- /dev/null +++ b/.lib/git-fleximod/README.md @@ -0,0 +1,108 @@ +# git-fleximod + +Flexible, Enhanced Submodule Management for Git + +## Overview + +Git-fleximod is a Python-based tool that extends Git's submodule and sparse checkout capabilities, offering additional features for managing submodules in a more flexible and efficient way. + +## Installation + + If you choose to locate git-fleximod in your path you can access it via command: git fleximod + +## Usage + + Basic Usage: + git fleximod [options] + Available Commands: + status: Display the status of submodules. + update: Update submodules to the tag indicated in .gitmodules variable fxtag. + test: Make sure that fxtags and submodule hashes are consistant, + make sure that official urls (as defined by fxDONOTUSEurl) are set + make sure that fxtags are defined for all submodules + Additional Options: + See git fleximod --help for more details. + +## Supported .gitmodules Variables + + fxtag: Specify a specific tag or branch to checkout for a submodule. + fxrequired: Mark a submodule's checkout behavior, with allowed values: + - ToplevelRequired: Top-level and required (checked out only when this is the Toplevel module). + - ToplevelOptional: Top-level and optional (checked out with --optional flag if this is the Toplevel module). + - AlwaysRequired: Always required (always checked out). + - AlwaysOptional: Always optional (checked out with --optional flag). + fxsparse: Enable sparse checkout for a submodule, pointing to a file containing sparse checkout paths. + fxDONOTUSEurl: This is the url used in the test subcommand to assure that protected branches do not point to forks + **NOTE** the fxDONOTUSEurl variable is only used to identify the official project repository and should not be + changed by users. Use the url variable to change to a fork if desired. + +## Sparse Checkouts + + To enable sparse checkout for a submodule, set the fxsparse variable + in the .gitmodules file to the path of a file containing the desired + sparse checkout paths. Git-fleximod will automatically configure + sparse checkout based on this file when applicable commands are run. + See [git-sparse-checkout](https://git-scm.com/docs/git-sparse-checkout#_internalsfull_pattern_set) + for details on the format of this file. + +## Tests + + The git fleximod test action is designed to be used by, for example, github workflows + to assure that protected branches are consistant with respect to submodule hashes and fleximod fxtags + +## Examples + +Here are some common usage examples: + +Update all submodules, including optional ones: +```bash + git fleximod update --optional +``` + +Updating a specific submodule to the fxtag indicated in .gitmodules: + +```bash + git fleximod update submodule-name +``` +Example .gitmodules entry: +```ini, toml + [submodule "cosp2"] + path = src/physics/cosp2/src + url = https://github.com/CFMIP/COSPv2.0 + fxsparse = ../.cosp_sparse_checkout + fxrequired = AlwaysRequired + fxtag = v2.1.4cesm +``` +Explanation: + +This entry indicates that the submodule named cosp2 at tag v2.1.4cesm +should be checked out into the directory src/physics/cosp2/src +relative to the .gitmodules directory. It should be checked out from +the URL https://github.com/CFMIP/COSPv2.0 and use sparse checkout as +described in the file ../.cosp_sparse_checkout relative to the path +directory. It should be checked out anytime this .gitmodules entry is +read. + +Additional example: +```ini, toml + [submodule "cime"] + path = cime + url = https://github.com/jedwards4b/cime + fxrequired = ToplevelRequired + fxtag = cime6.0.198_rme01 +``` + +Explanation: + +This entry indicates that the submodule cime should be checked out +into a directory named cime at tag cime6.0.198_rme01 from the URL +https://github.com/jedwards4b/cime. This should only be done if +the .gitmodules file is at the top level of the repository clone. + +## Contributing + +We welcome contributions! Please see the CONTRIBUTING.md file for guidelines. + +## License + +Git-fleximod is released under the MIT License. diff --git a/.lib/git-fleximod/doc/Makefile b/.lib/git-fleximod/doc/Makefile new file mode 100644 index 0000000000..d4bb2cbb9e --- /dev/null +++ b/.lib/git-fleximod/doc/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/.lib/git-fleximod/doc/conf.py b/.lib/git-fleximod/doc/conf.py new file mode 100644 index 0000000000..423099eec9 --- /dev/null +++ b/.lib/git-fleximod/doc/conf.py @@ -0,0 +1,26 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "git-fleximod" +author = "Jim Edwards " +release = "0.4.0" + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = ["sphinx_argparse_cli"] + +templates_path = ["_templates"] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +html_theme = "alabaster" +html_static_path = ["_static"] diff --git a/.lib/git-fleximod/doc/index.rst b/.lib/git-fleximod/doc/index.rst new file mode 100644 index 0000000000..0f9c1a7f7e --- /dev/null +++ b/.lib/git-fleximod/doc/index.rst @@ -0,0 +1,24 @@ +.. git-fleximod documentation master file, created by + sphinx-quickstart on Sat Feb 3 12:02:22 2024. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to git-fleximod's documentation! +======================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: +.. module:: sphinxcontrib.autoprogram +.. sphinx_argparse_cli:: + :module: git_fleximod.cli + :func: get_parser + :prog: git-fleximod + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/.lib/git-fleximod/doc/make.bat b/.lib/git-fleximod/doc/make.bat new file mode 100644 index 0000000000..32bb24529f --- /dev/null +++ b/.lib/git-fleximod/doc/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/.lib/git-fleximod/escomp_install b/.lib/git-fleximod/escomp_install new file mode 100644 index 0000000000..ae782e72a4 --- /dev/null +++ b/.lib/git-fleximod/escomp_install @@ -0,0 +1,25 @@ +#!/usr/bin/env python +# updates git-fleximod in an ESCOMP model +# this script should be run from the model root directory, it expects +# git-fleximod to already be installed with the script in bin +# and the classes in lib/python/site-packages +import sys +import shutil +import os + +from glob import iglob + +fleximod_root = sys.argv[1] +fleximod_path = os.path.join(fleximod_root,"src","git-fleximod") +if os.path.isfile(fleximod_path): + with open(fleximod_path,"r") as f: + fleximod = f.readlines() + with open(os.path.join(".","bin","git-fleximod"),"w") as f: + for line in fleximod: + f.write(line) + if "import argparse" in line: + f.write('\nsys.path.append(os.path.join(os.path.dirname(__file__),"..","lib","python","site-packages"))\n\n') + + for file in iglob(os.path.join(fleximod_root, "src", "fleximod", "*.py")): + shutil.copy(file, + os.path.join("lib","python","site-packages","fleximod",os.path.basename(file))) diff --git a/.lib/git-fleximod/git_fleximod/__init__.py b/.lib/git-fleximod/git_fleximod/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/.lib/git-fleximod/git_fleximod/cli.py b/.lib/git-fleximod/git_fleximod/cli.py new file mode 100644 index 0000000000..208bc04684 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/cli.py @@ -0,0 +1,129 @@ +from pathlib import Path +import argparse +from git_fleximod import utils + +__version__ = "0.9.2" + +def find_root_dir(filename=".gitmodules"): + """ finds the highest directory in tree + which contains a file called filename """ + d = Path.cwd() + root = Path(d.root) + dirlist = [] + dl = d + while dl != root: + dirlist.append(dl) + dl = dl.parent + dirlist.append(root) + dirlist.reverse() + + for dl in dirlist: + attempt = dl / filename + if attempt.is_file(): + return str(dl) + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with additional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + # + # user options + # + choices = ["update", "status", "test"] + parser.add_argument( + "action", + choices=choices, + default="update", + help=f"Subcommand of git-fleximod, choices are {choices[:-1]}", + ) + + parser.add_argument( + "components", + nargs="*", + help="Specific component(s) to checkout. By default, " + "all required submodules are checked out.", + ) + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + + parser.add_argument( + "-x", + "--exclude", + nargs="*", + help="Component(s) listed in the gitmodules file which should be ignored.", + ) + parser.add_argument( + "-f", + "--force", + action="store_true", + default=False, + help="Override cautions and update or checkout over locally modified repository.", + ) + + parser.add_argument( + "-o", + "--optional", + action="store_true", + default=False, + help="By default only the required submodules " + "are checked out. This flag will also checkout the " + "optional submodules relative to the toplevel directory.", + ) + + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + + parser.add_argument( + "-V", + "--version", + action="version", + version=f"%(prog)s {__version__}", + help="Print version and exit.", + ) + + # + # developer options + # + parser.add_argument( + "--backtrace", + action="store_true", + help="DEVELOPER: show exception backtraces as extra " "debugging output", + ) + + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser diff --git a/.lib/git-fleximod/git_fleximod/git_fleximod.py b/.lib/git-fleximod/git_fleximod/git_fleximod.py new file mode 100755 index 0000000000..0851e9d095 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/git_fleximod.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python +import sys + +MIN_PYTHON = (3, 7) +if sys.version_info < MIN_PYTHON: + sys.exit("Python %s.%s or later is required." % MIN_PYTHON) + +import os +import shutil +import logging +import textwrap +from git_fleximod import utils +from git_fleximod import cli +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from git_fleximod.submodule import Submodule + +# logger variable is global +logger = None + + +def fxrequired_allowed_values(): + return ["ToplevelRequired", "ToplevelOptional", "AlwaysRequired", "AlwaysOptional", "TopLevelRequired", "TopLevelOptional"] + + +def commandline_arguments(args=None): + parser = cli.get_parser() + + if args: + options = parser.parse_args(args) + else: + options = parser.parse_args() + + # explicitly listing a component overrides the optional flag + if options.optional or options.components: + fxrequired = fxrequired_allowed_values() + else: + fxrequired = ["ToplevelRequired", "AlwaysRequired", "TopLevelRequired"] + + action = options.action + if not action: + action = "update" + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + if hasattr(options, "version"): + exit() + + return ( + options.path, + options.gitmodules, + fxrequired, + options.components, + options.exclude, + options.force, + action, + ) + + +def submodule_sparse_checkout(root_dir, name, url, path, sparsefile, tag="master"): + """ + This function performs a sparse checkout of a git submodule. It does so by first creating the .git/info/sparse-checkout fileq + in the submodule and then checking out the desired tag. If the submodule is already checked out, it will not be checked out again. + Creating the sparse-checkout file first prevents the entire submodule from being checked out and then removed. This is important + because the submodule may have a large number of files and checking out the entire submodule and then removing it would be time + and disk space consuming. + + Parameters: + root_dir (str): The root directory for the git operation. + name (str): The name of the submodule. + url (str): The URL of the submodule. + path (str): The path to the submodule. + sparsefile (str): The sparse file for the submodule. + tag (str, optional): The tag to checkout. Defaults to "master". + + Returns: + None + """ + logger.info("Called sparse_checkout for {}".format(name)) + rgit = GitInterface(root_dir, logger) + superroot = git_toplevelroot(root_dir, logger) + + if superroot: + gitroot = superroot.strip() + else: + gitroot = root_dir.strip() + assert os.path.isdir(os.path.join(gitroot, ".git")) + # first create the module directory + if not os.path.isdir(os.path.join(root_dir, path)): + os.makedirs(os.path.join(root_dir, path)) + + # initialize a new git repo and set the sparse checkout flag + sprep_repo = os.path.join(root_dir, path) + sprepo_git = GitInterface(sprep_repo, logger) + if os.path.exists(os.path.join(sprep_repo, ".git")): + try: + logger.info("Submodule {} found".format(name)) + chk = sprepo_git.config_get_value("core", "sparseCheckout") + if chk == "true": + logger.info("Sparse submodule {} already checked out".format(name)) + return + except NoOptionError: + logger.debug("Sparse submodule {} not present".format(name)) + except Exception as e: + utils.fatal_error("Unexpected error {} occured.".format(e)) + + sprepo_git.config_set_value("core", "sparseCheckout", "true") + + # set the repository remote + + logger.info("Setting remote origin in {}/{}".format(root_dir, path)) + _, remotelist = sprepo_git.git_operation("remote", "-v") + if url not in remotelist: + sprepo_git.git_operation("remote", "add", "origin", url) + + topgit = os.path.join(gitroot, ".git") + + if gitroot != root_dir and os.path.isfile(os.path.join(root_dir, ".git")): + with open(os.path.join(root_dir, ".git")) as f: + gitpath = os.path.relpath( + os.path.join(root_dir, f.read().split()[1]), + start=os.path.join(root_dir, path), + ) + topgit = os.path.join(gitpath, "modules") + else: + topgit = os.path.relpath( + os.path.join(root_dir, ".git", "modules"), + start=os.path.join(root_dir, path), + ) + + with utils.pushd(sprep_repo): + if not os.path.isdir(topgit): + os.makedirs(topgit) + topgit += os.sep + name + + if os.path.isdir(os.path.join(root_dir, path, ".git")): + with utils.pushd(sprep_repo): + if os.path.isdir(os.path.join(topgit,".git")): + shutil.rmtree(os.path.join(topgit,".git")) + shutil.move(".git", topgit) + with open(".git", "w") as f: + f.write("gitdir: " + os.path.relpath(topgit)) + # assert(os.path.isdir(os.path.relpath(topgit, start=sprep_repo))) + gitsparse = os.path.abspath(os.path.join(topgit, "info", "sparse-checkout")) + if os.path.isfile(gitsparse): + logger.warning( + "submodule {} is already initialized {}".format(name, topgit) + ) + return + + with utils.pushd(sprep_repo): + if os.path.isfile(sparsefile): + shutil.copy(sparsefile, gitsparse) + + + # Finally checkout the repo + sprepo_git.git_operation("fetch", "origin", "--tags") + sprepo_git.git_operation("checkout", tag) + + print(f"Successfully checked out {name:>20} at {tag}") + rgit.config_set_value(f'submodule "{name}"', "active", "true") + rgit.config_set_value(f'submodule "{name}"', "url", url) + +def init_submodule_from_gitmodules(gitmodules, name, root_dir, logger): + path = gitmodules.get(name, "path") + url = gitmodules.get(name, "url") + assert path and url, f"Malformed .gitmodules file {path} {url}" + tag = gitmodules.get(name, "fxtag") + fxurl = gitmodules.get(name, "fxDONOTUSEurl") + fxsparse = gitmodules.get(name, "fxsparse") + fxrequired = gitmodules.get(name, "fxrequired") + return Submodule(root_dir, name, path, url, fxtag=tag, fxurl=fxurl, fxsparse=fxsparse, fxrequired=fxrequired, logger=logger) + +def submodules_status(gitmodules, root_dir, toplevel=False, depth=0): + testfails = 0 + localmods = 0 + needsupdate = 0 + wrapper = textwrap.TextWrapper(initial_indent=' '*(depth*10), width=120,subsequent_indent=' '*(depth*20)) + for name in gitmodules.sections(): + submod = init_submodule_from_gitmodules(gitmodules, name, root_dir, logger) + + result,n,l,t = submod.status() + if toplevel or not submod.toplevel(): + print(wrapper.fill(result)) + testfails += t + localmods += l + needsupdate += n + subdir = os.path.join(root_dir, submod.path) + if os.path.exists(os.path.join(subdir, ".gitmodules")): + gsubmod = GitModules(logger, confpath=subdir) + t,l,n = submodules_status(gsubmod, subdir, depth=depth+1) + if toplevel or not submod.toplevel(): + testfails += t + localmods += l + needsupdate += n + + return testfails, localmods, needsupdate + +def git_toplevelroot(root_dir, logger): + rgit = GitInterface(root_dir, logger) + _, superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + return superroot + +def submodules_update(gitmodules, root_dir, requiredlist, force): + for name in gitmodules.sections(): + submod = init_submodule_from_gitmodules(gitmodules, name, root_dir, logger) + + _, needsupdate, localmods, testfails = submod.status() + if not submod.fxrequired: + submod.fxrequired = "AlwaysRequired" + fxrequired = submod.fxrequired + allowedvalues = fxrequired_allowed_values() + assert fxrequired in allowedvalues + + superroot = git_toplevelroot(root_dir, logger) + + if ( + fxrequired + and ((superroot and "Toplevel" in fxrequired) + or fxrequired not in requiredlist) + ): + if "Optional" in fxrequired and "Optional" not in requiredlist: + if fxrequired.startswith("Always"): + print(f"Skipping optional component {name:>20}") + continue + optional = "AlwaysOptional" in requiredlist + + if fxrequired in requiredlist: + submod.update() + repodir = os.path.join(root_dir, submod.path) + if os.path.exists(os.path.join(repodir, ".gitmodules")): + # recursively handle this checkout + print(f"Recursively checking out submodules of {name}") + gitsubmodules = GitModules(submod.logger, confpath=repodir) + newrequiredlist = ["AlwaysRequired"] + if optional: + newrequiredlist.append("AlwaysOptional") + + submodules_update(gitsubmodules, repodir, newrequiredlist, force=force) + +def local_mods_output(): + text = """\ + The submodules labeled with 'M' above are not in a clean state. + The following are options for how to proceed: + (1) Go into each submodule which is not in a clean state and issue a 'git status' + Either revert or commit your changes so that the submodule is in a clean state. + (2) use the --force option to git-fleximod + (3) you can name the particular submodules to update using the git-fleximod command line + (4) As a last resort you can remove the submodule (via 'rm -fr [directory]') + then rerun git-fleximod update. +""" + print(text) + +def submodules_test(gitmodules, root_dir): + """ + This function tests the git submodules based on the provided parameters. + + It first checks that fxtags are present and in sync with submodule hashes. + Then it ensures that urls are consistent with fxurls (not forks and not ssh) + and that sparse checkout files exist. + + Parameters: + gitmodules (ConfigParser): The gitmodules configuration. + root_dir (str): The root directory for the git operation. + + Returns: + int: The number of test failures. + """ + # First check that fxtags are present and in sync with submodule hashes + testfails, localmods, needsupdate = submodules_status(gitmodules, root_dir) + print("") + # Then make sure that urls are consistant with fxurls (not forks and not ssh) + # and that sparse checkout files exist + for name in gitmodules.sections(): + url = gitmodules.get(name, "url") + fxurl = gitmodules.get(name, "fxDONOTUSEurl") + fxsparse = gitmodules.get(name, "fxsparse") + path = gitmodules.get(name, "path") + fxurl = fxurl[:-4] if fxurl.endswith(".git") else fxurl + url = url[:-4] if url.endswith(".git") else url + if not fxurl or url.lower() != fxurl.lower(): + print(f"{name:>20} url {url} not in sync with required {fxurl}") + testfails += 1 + if fxsparse and not os.path.isfile(os.path.join(root_dir, path, fxsparse)): + print(f"{name:>20} sparse checkout file {fxsparse} not found") + testfails += 1 + return testfails + localmods + needsupdate + + +def main(): + ( + root_dir, + file_name, + fxrequired, + includelist, + excludelist, + force, + action, + ) = commandline_arguments() + # Get a logger for the package + global logger + logger = logging.getLogger(__name__) + + logger.info("action is {} root_dir={} file_name={}".format(action, root_dir, file_name)) + + if not root_dir or not os.path.isfile(os.path.join(root_dir, file_name)): + if root_dir: + file_path = utils.find_upwards(root_dir, file_name) + + if root_dir is None or file_path is None: + root_dir = "." + utils.fatal_error( + "No {} found in {} or any of it's parents".format(file_name, root_dir) + ) + + root_dir = os.path.dirname(file_path) + logger.info( + "root_dir is {} includelist={} excludelist={}".format( + root_dir, includelist, excludelist + ) + ) + gitmodules = GitModules( + logger, + confpath=root_dir, + conffile=file_name, + includelist=includelist, + excludelist=excludelist, + ) + if not gitmodules.sections(): + sys.exit("No submodule components found") + retval = 0 + if action == "update": + submodules_update(gitmodules, root_dir, fxrequired, force) + elif action == "status": + tfails, lmods, updates = submodules_status(gitmodules, root_dir, toplevel=True) + if tfails + lmods + updates > 0: + print( + f" testfails = {tfails}, local mods = {lmods}, needs updates {updates}\n" + ) + if lmods > 0: + local_mods_output() + elif action == "test": + retval = submodules_test(gitmodules, root_dir) + else: + utils.fatal_error(f"unrecognized action request {action}") + return retval + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/.lib/git-fleximod/git_fleximod/gitinterface.py b/.lib/git-fleximod/git_fleximod/gitinterface.py new file mode 100644 index 0000000000..fb20883cd0 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/gitinterface.py @@ -0,0 +1,92 @@ +import os +import sys +from . import utils +from pathlib import Path + +class GitInterface: + def __init__(self, repo_path, logger): + logger.debug("Initialize GitInterface for {}".format(repo_path)) + if isinstance(repo_path, str): + self.repo_path = Path(repo_path).resolve() + elif isinstance(repo_path, Path): + self.repo_path = repo_path.resolve() + else: + raise TypeError("repo_path must be a str or Path object") + self.logger = logger + try: + import git + + self._use_module = True + try: + self.repo = git.Repo(str(self.repo_path)) # Initialize GitPython repo + except git.exc.InvalidGitRepositoryError: + self.git = git + self._init_git_repo() + msg = "Using GitPython interface to git" + except ImportError: + self._use_module = False + if not (self.repo_path / ".git").exists(): + self._init_git_repo() + msg = "Using shell interface to git" + self.logger.info(msg) + + def _git_command(self, operation, *args): + self.logger.info(operation) + if self._use_module and operation != "submodule": + try: + return getattr(self.repo.git, operation)(*args) + except Exception as e: + sys.exit(e) + else: + return ["git", "-C", str(self.repo_path), operation] + list(args) + + def _init_git_repo(self): + if self._use_module: + self.repo = self.git.Repo.init(str(self.repo_path)) + else: + command = ("git", "-C", str(self.repo_path), "init") + utils.execute_subprocess(command) + + # pylint: disable=unused-argument + def git_operation(self, operation, *args, **kwargs): + newargs = [] + for a in args: + # Do not use ssh interface + if isinstance(a, str): + a = a.replace("git@github.com:", "https://github.com/") + newargs.append(a) + + command = self._git_command(operation, *newargs) + if isinstance(command, list): + try: + status, output = utils.execute_subprocess(command, status_to_caller=True, output_to_caller=True) + return status, output.rstrip() + except Exception as e: + sys.exit(e) + else: + return 0, command + + def config_get_value(self, section, name): + if self._use_module: + config = self.repo.config_reader() + try: + val = config.get_value(section, name) + except: + val = None + return val + else: + cmd = ("git", "-C", str(self.repo_path), "config", "--get", f"{section}.{name}") + output = utils.execute_subprocess(cmd, output_to_caller=True) + return output.strip() + + def config_set_value(self, section, name, value): + if self._use_module: + with self.repo.config_writer() as writer: + if "." in section: + section = section.replace("."," \"")+'"' + writer.set_value(section, name, value) + writer.release() # Ensure changes are saved + else: + cmd = ("git", "-C", str(self.repo_path), "config", f"{section}.{name}", value) + self.logger.info(cmd) + utils.execute_subprocess(cmd, output_to_caller=True) diff --git a/.lib/git-fleximod/git_fleximod/gitmodules.py b/.lib/git-fleximod/git_fleximod/gitmodules.py new file mode 100644 index 0000000000..cf8b350dd6 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/gitmodules.py @@ -0,0 +1,97 @@ +import shutil, os +from pathlib import Path +from configparser import RawConfigParser, ConfigParser +from .lstripreader import LstripReader + + +class GitModules(RawConfigParser): + def __init__( + self, + logger, + confpath=Path.cwd(), + conffile=".gitmodules", + includelist=None, + excludelist=None, + ): + """ + confpath: Path to the directory containing the .gitmodules file (defaults to the current working directory). + conffile: Name of the configuration file (defaults to .gitmodules). + includelist: Optional list of submodules to include. + excludelist: Optional list of submodules to exclude. + """ + self.logger = logger + self.logger.debug( + "Creating a GitModules object {} {} {} {}".format( + confpath, conffile, includelist, excludelist + ) + ) + super().__init__() + self.conf_file = (Path(confpath) / Path(conffile)) + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=conffile) + self.includelist = includelist + self.excludelist = excludelist + self.isdirty = False + + def reload(self): + self.clear() + if self.conf_file.exists(): + self.read_file(LstripReader(str(self.conf_file)), source=self.conf_file) + + + def set(self, name, option, value): + """ + Sets a configuration value for a specific submodule: + Ensures the appropriate section exists for the submodule. + Calls the parent class's set method to store the value. + """ + self.isdirty = True + self.logger.debug("set called {} {} {}".format(name, option, value)) + section = f'submodule "{name}"' + if not self.has_section(section): + self.add_section(section) + super().set(section, option, str(value)) + + # pylint: disable=redefined-builtin, arguments-differ + def get(self, name, option, raw=False, vars=None, fallback=None): + """ + Retrieves a configuration value for a specific submodule: + Uses the parent class's get method to access the value. + Handles potential errors if the section or option doesn't exist. + """ + self.logger.debug("git get called {} {}".format(name, option)) + section = f'submodule "{name}"' + try: + return ConfigParser.get( + self, section, option, raw=raw, vars=vars, fallback=fallback + ) + except ConfigParser.NoOptionError: + return None + + def save(self): + if self.isdirty: + self.logger.info("Writing {}".format(self.conf_file)) + with open(self.conf_file, "w") as fd: + self.write(fd) + self.isdirty = False + + def __del__(self): + self.save() + + def sections(self): + """Strip the submodule part out of section and just use the name""" + self.logger.debug("calling GitModules sections iterator") + names = [] + for section in ConfigParser.sections(self): + name = section[11:-1] + if self.includelist and name not in self.includelist: + continue + if self.excludelist and name in self.excludelist: + continue + names.append(name) + return names + + def items(self, name, raw=False, vars=None): + self.logger.debug("calling GitModules items for {}".format(name)) + section = f'submodule "{name}"' + return ConfigParser.items(section, raw=raw, vars=vars) diff --git a/.lib/git-fleximod/git_fleximod/lstripreader.py b/.lib/git-fleximod/git_fleximod/lstripreader.py new file mode 100644 index 0000000000..01d5580ee8 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/lstripreader.py @@ -0,0 +1,43 @@ +class LstripReader(object): + "LstripReader formats .gitmodules files to be acceptable for configparser" + + def __init__(self, filename): + with open(filename, "r") as infile: + lines = infile.readlines() + self._lines = list() + self._num_lines = len(lines) + self._index = 0 + for line in lines: + self._lines.append(line.lstrip()) + + def readlines(self): + """Return all the lines from this object's file""" + return self._lines + + def readline(self, size=-1): + """Format and return the next line or raise StopIteration""" + try: + line = self.next() + except StopIteration: + line = "" + + if (size > 0) and (len(line) < size): + return line[0:size] + + return line + + def __iter__(self): + """Begin an iteration""" + self._index = 0 + return self + + def next(self): + """Return the next line or raise StopIteration""" + if self._index >= self._num_lines: + raise StopIteration + + self._index = self._index + 1 + return self._lines[self._index - 1] + + def __next__(self): + return self.next() diff --git a/.lib/git-fleximod/git_fleximod/metoflexi.py b/.lib/git-fleximod/git_fleximod/metoflexi.py new file mode 100755 index 0000000000..cc347db2dd --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/metoflexi.py @@ -0,0 +1,236 @@ +#!/usr/bin/env python +from configparser import ConfigParser +import sys +import shutil +from pathlib import Path +import argparse +import logging +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules +from git_fleximod import utils + +logger = None + +def find_root_dir(filename=".git"): + d = Path.cwd() + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.is_dir(): + return d + d = d.parent + return None + + +def get_parser(): + description = """ + %(prog)s manages checking out groups of gitsubmodules with addtional support for Earth System Models + """ + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawDescriptionHelpFormatter + ) + + parser.add_argument('-e', '--externals', nargs='?', + default='Externals.cfg', + help='The externals description filename. ' + 'Default: %(default)s.') + + parser.add_argument( + "-C", + "--path", + default=find_root_dir(), + help="Toplevel repository directory. Defaults to top git directory relative to current.", + ) + + parser.add_argument( + "-g", + "--gitmodules", + nargs="?", + default=".gitmodules", + help="The submodule description filename. " "Default: %(default)s.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + default=0, + help="Output additional information to " + "the screen and log file. This flag can be " + "used up to two times, increasing the " + "verbosity level each time.", + ) + parser.add_argument( + "-d", + "--debug", + action="store_true", + default=False, + help="DEVELOPER: output additional debugging " + "information to the screen and log file.", + ) + + return parser + +def commandline_arguments(args=None): + parser = get_parser() + + options = parser.parse_args(args) + handlers = [logging.StreamHandler()] + + if options.debug: + try: + open("fleximod.log", "w") + except PermissionError: + sys.exit("ABORT: Could not write file fleximod.log") + level = logging.DEBUG + handlers.append(logging.FileHandler("fleximod.log")) + elif options.verbose: + level = logging.INFO + else: + level = logging.WARNING + # Configure the root logger + logging.basicConfig( + level=level, format="%(name)s - %(levelname)s - %(message)s", handlers=handlers + ) + + return( + options.path, + options.gitmodules, + options.externals + ) + +class ExternalRepoTranslator: + """ + Translates external repositories configured in an INI-style externals file. + """ + + def __init__(self, rootpath, gitmodules, externals): + self.rootpath = rootpath + if gitmodules: + self.gitmodules = GitModules(logger, confpath=rootpath) + self.externals = (rootpath / Path(externals)).resolve() + print(f"Translating {self.externals}") + self.git = GitInterface(rootpath, logger) + +# def __del__(self): +# if (self.rootpath / "save.gitignore"): + + + def translate_single_repo(self, section, tag, url, path, efile, hash_, sparse, protocol): + """ + Translates a single repository based on configuration details. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + tag (str): The tag to use for the external repository. + url (str): The URL of the external repository. + path (str): The relative path within the main repository for the external repository. + efile (str): The external file or file containing submodules. + hash_ (str): The commit hash to checkout (if applicable). + sparse (str): Boolean indicating whether to use sparse checkout (if applicable). + protocol (str): The protocol to use (e.g., 'git', 'http'). + """ + assert protocol != "svn", "SVN protocol is not currently supported" + print(f"Translating repository {section}") + if efile: + file_path = Path(path) / Path(efile) + newroot = (self.rootpath / file_path).parent.resolve() + if not newroot.exists(): + newroot.mkdir(parents=True) + logger.info("Newroot is {}".format(newroot)) + newt = ExternalRepoTranslator(newroot, ".gitmodules", efile) + newt.translate_repo() + if protocol == "externals_only": + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + else: + newpath = (self.rootpath / Path(path)) + if newpath.exists(): + shutil.rmtree(newpath) + logger.info("Creating directory {}".format(newpath)) + newpath.mkdir(parents=True) + if tag: + logger.info("cloning {}".format(section)) + try: + self.git.git_operation("clone", "-b", tag, "--depth", "1", url, path) + except: + self.git.git_operation("clone", url, path) + with utils.pushd(newpath): + ngit = GitInterface(newpath, logger) + ngit.git_operation("checkout", tag) + if hash_: + self.git.git_operation("clone", url, path) + git = GitInterface(newpath, logger) + git.git_operation("fetch", "origin") + git.git_operation("checkout", hash_) + if sparse: + print("setting as sparse submodule {}".format(section)) + sparsefile = (newpath / Path(sparse)) + newfile = (newpath / ".git" / "info" / "sparse-checkout") + print(f"sparsefile {sparsefile} newfile {newfile}") + shutil.copy(sparsefile, newfile) + + logger.info("adding submodule {}".format(section)) + self.gitmodules.save() + self.git.git_operation("submodule", "add", "-f", "--name", section, url, path) + self.git.git_operation("submodule","absorbgitdirs") + self.gitmodules.reload() + if tag: + self.gitmodules.set(section, "fxtag", tag) + if hash_: + self.gitmodules.set(section, "fxtag", hash_) + + self.gitmodules.set(section, "fxDONOTUSEurl", url) + if sparse: + self.gitmodules.set(section, "fxsparse", sparse) + self.gitmodules.set(section, "fxrequired", "ToplevelRequired") + + + def translate_repo(self): + """ + Translates external repositories defined within an external file. + + Args: + rootpath (str): Root path of the main repository. + gitmodules (str): Path to the .gitmodules file. + external_file (str): The path to the external file containing repository definitions. + """ + econfig = ConfigParser() + econfig.read((self.rootpath / Path(self.externals))) + + for section in econfig.sections(): + if section == "externals_description": + logger.info("skipping section {}".format(section)) + return + logger.info("Translating section {}".format(section)) + tag = econfig.get(section, "tag", raw=False, fallback=None) + url = econfig.get(section, "repo_url", raw=False, fallback=None) + path = econfig.get(section, "local_path", raw=False, fallback=None) + efile = econfig.get(section, "externals", raw=False, fallback=None) + hash_ = econfig.get(section, "hash", raw=False, fallback=None) + sparse = econfig.get(section, "sparse", raw=False, fallback=None) + protocol = econfig.get(section, "protocol", raw=False, fallback=None) + + self.translate_single_repo(section, tag, url, path, efile, hash_, sparse, protocol) + + + +def _main(): + rootpath, gitmodules, externals = commandline_arguments() + global logger + logger = logging.getLogger(__name__) + with utils.pushd(rootpath): + t = ExternalRepoTranslator(Path(rootpath), gitmodules, externals) + logger.info("Translating {}".format(rootpath)) + t.translate_repo() + + +if __name__ == "__main__": + sys.exit(_main()) diff --git a/.lib/git-fleximod/git_fleximod/submodule.py b/.lib/git-fleximod/git_fleximod/submodule.py new file mode 100644 index 0000000000..c939d54bc9 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/submodule.py @@ -0,0 +1,427 @@ +import os +import textwrap +import shutil +import string +from configparser import NoOptionError +from git_fleximod import utils +from git_fleximod.gitinterface import GitInterface + +class Submodule(): + """ + Represents a Git submodule with enhanced features for flexible management. + + Attributes: + name (str): The name of the submodule. + root_dir (str): The root directory of the main project. + path (str): The relative path from the root directory to the submodule. + url (str): The URL of the submodule repository. + fxurl (str): The URL for flexible submodule management (optional). + fxtag (str): The tag for flexible submodule management (optional). + fxsparse (str): Path to the sparse checkout file relative to the submodule path, see git-sparse-checkout for details (optional). + fxrequired (str): Indicates if the submodule is optional or required (optional). + logger (logging.Logger): Logger instance for logging (optional). + """ + def __init__(self, root_dir, name, path, url, fxtag=None, fxurl=None, fxsparse=None, fxrequired=None, logger=None): + """ + Initializes a new Submodule instance with the provided attributes. + """ + self.name = name + self.root_dir = root_dir + self.path = path + self.url = url + self.fxurl = fxurl + self.fxtag = fxtag + self.fxsparse = fxsparse + if fxrequired: + self.fxrequired = fxrequired + else: + self.fxrequired = "AlwaysRequired" + self.logger = logger + + def status(self): + """ + Checks the status of the submodule and returns 4 parameters: + - result (str): The status of the submodule. + - needsupdate (bool): An indicator if the submodule needs to be updated. + - localmods (bool): An indicator if the submodule has local modifications. + - testfails (bool): An indicator if the submodule has failed a test, this is used for testing purposes. + """ + + smpath = os.path.join(self.root_dir, self.path) + testfails = False + localmods = False + needsupdate = False + ahash = None + optional = "" + if "Optional" in self.fxrequired: + optional = " (optional)" + required = None + level = None + if not os.path.exists(os.path.join(smpath, ".git")): + rootgit = GitInterface(self.root_dir, self.logger) + # submodule commands use path, not name + status, tags = rootgit.git_operation("ls-remote", "--tags", self.url) + status, result = rootgit.git_operation("submodule","status",smpath) + result = result.split() + + if result: + ahash = result[0][1:] + hhash = None + atag = None + for htag in tags.split("\n"): + if htag.endswith('^{}'): + htag = htag[:-3] + if ahash and not atag and ahash in htag: + atag = (htag.split()[1])[10:] + if self.fxtag and not hhash and htag.endswith(self.fxtag): + hhash = htag.split()[0] + if hhash and atag: + break + if self.fxtag and (ahash == hhash or atag == self.fxtag): + result = f"e {self.name:>20} not checked out, aligned at tag {self.fxtag}{optional}" + needsupdate = True + elif self.fxtag: + status, ahash = rootgit.git_operation( + "submodule", "status", "{}".format(self.path) + ) + ahash = ahash[1 : len(self.fxtag) + 1] + if self.fxtag == ahash: + result = f"e {self.name:>20} not checked out, aligned at hash {ahash}{optional}" + else: + result = f"e {self.name:>20} not checked out, out of sync at tag {atag}, expected tag is {self.fxtag}{optional}" + testfails = True + needsupdate = True + else: + result = f"e {self.name:>20} has no fxtag defined in .gitmodules{optional}" + testfails = False + else: + with utils.pushd(smpath): + git = GitInterface(smpath, self.logger) + status, remote = git.git_operation("remote") + if remote == '': + result = f"e {self.name:>20} has no associated remote" + testfails = True + needsupdate = True + return result, needsupdate, localmods, testfails + status, rurl = git.git_operation("ls-remote","--get-url") + status, lines = git.git_operation("log", "--pretty=format:\"%h %d\"") + line = lines.partition('\n')[0] + parts = line.split() + ahash = parts[0][1:] + atag = None + if len(parts) > 3: + idx = 0 + while idx < len(parts)-1: + idx = idx+1 + if parts[idx] == 'tag:': + atag = parts[idx+1] + while atag.endswith(')') or atag.endswith(',') or atag.endswith("\""): + atag = atag[:-1] + if atag == self.fxtag: + break + + + #print(f"line is {line} ahash is {ahash} atag is {atag} {parts}") + # atag = git.git_operation("describe", "--tags", "--always") + # ahash = git.git_operation("rev-list", "HEAD").partition("\n")[0] + + recurse = False + if rurl != self.url: + remote = self._add_remote(git) + git.git_operation("fetch", remote) + if self.fxtag and atag == self.fxtag: + result = f" {self.name:>20} at tag {self.fxtag}" + recurse = True + testfails = False + elif self.fxtag and (ahash[: len(self.fxtag)] == self.fxtag or (self.fxtag.find(ahash)==0)): + result = f" {self.name:>20} at hash {ahash}" + recurse = True + testfails = False + elif atag == ahash: + result = f" {self.name:>20} at hash {ahash}" + recurse = True + elif self.fxtag: + result = f"s {self.name:>20} {atag} {ahash} is out of sync with .gitmodules {self.fxtag}" + testfails = True + needsupdate = True + else: + if atag: + result = f"e {self.name:>20} has no fxtag defined in .gitmodules, module at {atag}" + else: + result = f"e {self.name:>20} has no fxtag defined in .gitmodules, module at {ahash}" + testfails = False + + status, output = git.git_operation("status", "--ignore-submodules", "-uno") + if "nothing to commit" not in output: + localmods = True + result = "M" + textwrap.indent(output, " ") +# print(f"result {result} needsupdate {needsupdate} localmods {localmods} testfails {testfails}") + return result, needsupdate, localmods, testfails + + + def _add_remote(self, git): + """ + Adds a new remote to the submodule if it does not already exist. + + This method checks the existing remotes of the submodule. If the submodule's URL is not already listed as a remote, + it attempts to add a new remote. The name for the new remote is generated dynamically to avoid conflicts. If no + remotes exist, it defaults to naming the new remote 'origin'. + + Args: + git (GitInterface): An instance of GitInterface to perform git operations. + + Returns: + str: The name of the new remote if added, or the name of the existing remote that matches the submodule's URL. + """ + status, remotes = git.git_operation("remote", "-v") + remotes = remotes.splitlines() + upstream = None + if remotes: + status, upstream = git.git_operation("ls-remote", "--get-url") + newremote = "newremote.00" + tmpurl = self.url.replace("git@github.com:", "https://github.com/") + line = next((s for s in remotes if self.url in s or tmpurl in s), None) + if line: + newremote = line.split()[0] + return newremote + else: + i = 0 + while newremote in remotes: + i = i + 1 + newremote = f"newremote.{i:02d}" + else: + newremote = "origin" + git.git_operation("remote", "add", newremote, self.url) + return newremote + + def toplevel(self): + """ + Returns True if the submodule is Toplevel (either Required or Optional) + """ + return True if "Top" in self.fxrequired else False + + def sparse_checkout(self): + """ + Performs a sparse checkout of the submodule. + + This method optimizes the checkout process by only checking out files specified in the submodule's sparse-checkout configuration, + rather than the entire submodule content. It achieves this by first ensuring the `.git/info/sparse-checkout` file is created and + configured in the submodule's directory. Then, it proceeds to checkout the desired tag. If the submodule has already been checked out, + this method will not perform the checkout again. + + This approach is particularly beneficial for submodules with a large number of files, as it significantly reduces the time and disk space + required for the checkout process by avoiding the unnecessary checkout and subsequent removal of unneeded files. + + Returns: + None + """ + self.logger.info("Called sparse_checkout for {}".format(self.name)) + rgit = GitInterface(self.root_dir, self.logger) + status, superroot = rgit.git_operation("rev-parse", "--show-superproject-working-tree") + if superroot: + gitroot = superroot.strip() + else: + gitroot = self.root_dir.strip() + # Now need to move the .git dir to the submodule location + rootdotgit = os.path.join(self.root_dir, ".git") + while os.path.isfile(rootdotgit): + with open(rootdotgit) as f: + line = f.readline().rstrip() + if line.startswith("gitdir: "): + rootdotgit = os.path.abspath(os.path.join(self.root_dir,line[8:])) + assert os.path.isdir(rootdotgit) + # first create the module directory + if not os.path.isdir(os.path.join(self.root_dir, self.path)): + os.makedirs(os.path.join(self.root_dir, self.path)) + + # initialize a new git repo and set the sparse checkout flag + sprep_repo = os.path.join(self.root_dir, self.path) + sprepo_git = GitInterface(sprep_repo, self.logger) + if os.path.exists(os.path.join(sprep_repo, ".git")): + try: + self.logger.info("Submodule {} found".format(self.name)) + chk = sprepo_git.config_get_value("core", "sparseCheckout") + if chk == "true": + self.logger.info("Sparse submodule {} already checked out".format(self.name)) + return + except (NoOptionError): + self.logger.debug("Sparse submodule {} not present".format(self.name)) + except Exception as e: + utils.fatal_error("Unexpected error {} occured.".format(e)) + + sprepo_git.config_set_value("core", "sparseCheckout", "true") + + # set the repository remote + + self.logger.info("Setting remote origin in {}/{}".format(self.root_dir, self.path)) + status, remotes = sprepo_git.git_operation("remote", "-v") + if self.url not in remotes: + sprepo_git.git_operation("remote", "add", "origin", self.url) + + topgit = os.path.join(gitroot, ".git") + + if gitroot != self.root_dir and os.path.isfile(os.path.join(self.root_dir, ".git")): + with open(os.path.join(self.root_dir, ".git")) as f: + gitpath = os.path.relpath( + os.path.join(self.root_dir, f.read().split()[1]), + start=os.path.join(self.root_dir, self.path), + ) + rootdotgit = os.path.join(gitpath, "modules", self.name) + else: + rootdotgit = os.path.relpath( + os.path.join(self.root_dir, ".git", "modules", self.name), + start=os.path.join(self.root_dir, self.path), + ) + + if os.path.isdir(os.path.join(self.root_dir, self.path, ".git")): + with utils.pushd(sprep_repo): + if os.path.isdir(os.path.join(rootdotgit,".git")): + shutil.rmtree(os.path.join(rootdotgit,".git")) + shutil.move(".git", rootdotgit) + with open(".git", "w") as f: + f.write("gitdir: " + os.path.relpath(rootdotgit)) + infodir = os.path.join(rootdotgit, "info") + if not os.path.isdir(infodir): + os.makedirs(infodir) + gitsparse = os.path.abspath(os.path.join(infodir, "sparse-checkout")) + if os.path.isfile(gitsparse): + self.logger.warning( + "submodule {} is already initialized {}".format(self.name, rootdotgit) + ) + return + + with utils.pushd(sprep_repo): + if os.path.isfile(self.fxsparse): + + shutil.copy(self.fxsparse, gitsparse) + + + # Finally checkout the repo + sprepo_git.git_operation("fetch", "origin", "--tags") + status,_ = sprepo_git.git_operation("checkout", self.fxtag) + if status: + print(f"Error checking out {self.name:>20} at {self.fxtag}") + else: + print(f"Successfully checked out {self.name:>20} at {self.fxtag}") + rgit.config_set_value('submodule.' + self.name, "active", "true") + rgit.config_set_value('submodule.' + self.name, "url", self.url) + rgit.config_set_value('submodule.' + self.name, "path", self.path) + + def update(self): + """ + Updates the submodule to the latest or specified version. + + This method handles the update process of the submodule, including checking out the submodule into the specified path, + handling sparse checkouts if configured, and updating the submodule's URL if necessary. It supports both SSH and HTTPS URLs, + automatically converting SSH URLs to HTTPS to avoid issues for users without SSH keys. + + The update process involves the following steps: + 1. If the submodule is configured for sparse checkout, it performs a sparse checkout. + 2. If the submodule is not already checked out, it clones the submodule using the provided URL. + 3. If a specific tag or hash is provided, it checks out that tag; otherwise, it checks out the latest version. + 4. If the root `.git` is a file (indicating a submodule or a worktree), additional steps are taken to integrate the submodule properly. + + Args: + None + Note: + - SSH URLs are automatically converted to HTTPS to accommodate users without SSH keys. + + Returns: + None + """ + git = GitInterface(self.root_dir, self.logger) + repodir = os.path.join(self.root_dir, self.path) + self.logger.info("Checkout {} into {}/{}".format(self.name, self.root_dir, self.path)) + # if url is provided update to the new url + tag = None + repo_exists = False + if os.path.exists(os.path.join(repodir, ".git")): + self.logger.info("Submodule {} already checked out".format(self.name)) + repo_exists = True + # Look for a .gitmodules file in the newly checkedout repo + if self.fxsparse: + print(f"Sparse checkout {self.name} fxsparse {self.fxsparse}") + self.sparse_checkout() + else: + if not repo_exists and self.url: + # ssh urls cause problems for those who dont have git accounts with ssh keys defined + # but cime has one since e3sm prefers ssh to https, because the .gitmodules file was + # opened with a GitModules object we don't need to worry about restoring the file here + # it will be done by the GitModules class + if self.url.startswith("git@"): + git.git_operation("clone", self.url, self.path) + smgit = GitInterface(repodir, self.logger) + if not tag: + status, tag = smgit.git_operation("describe", "--tags", "--always") + smgit.git_operation("checkout", tag) + # Now need to move the .git dir to the submodule location + rootdotgit = os.path.join(self.root_dir, ".git") + if os.path.isfile(rootdotgit): + with open(rootdotgit) as f: + line = f.readline() + if line.startswith("gitdir: "): + rootdotgit = line[8:] + + newpath = os.path.abspath(os.path.join(self.root_dir, rootdotgit, "modules", self.name)) + if os.path.exists(newpath): + shutil.rmtree(os.path.join(repodir, ".git")) + else: + shutil.move(os.path.join(repodir, ".git"), newpath) + + with open(os.path.join(repodir, ".git"), "w") as f: + f.write("gitdir: " + os.path.relpath(newpath, start=repodir)) + + if not os.path.exists(repodir): + parent = os.path.dirname(repodir) + if not os.path.isdir(parent): + os.makedirs(parent) + git.git_operation("submodule", "add", "--name", self.name, "--", self.url, self.path) + + if not repo_exists: + git.git_operation("submodule", "update", "--init", "--", self.path) + + if self.fxtag: + smgit = GitInterface(repodir, self.logger) + newremote = self._add_remote(smgit) + # Trying to distingush a tag from a hash + allowed = set(string.digits + 'abcdef') + if not set(self.fxtag) <= allowed: + # This is a tag + tag = f"refs/tags/{self.fxtag}:refs/tags/{self.fxtag}" + smgit.git_operation("fetch", newremote, tag) + smgit.git_operation("checkout", self.fxtag) + + if not os.path.exists(os.path.join(repodir, ".git")): + utils.fatal_error( + f"Failed to checkout {self.name} {repo_exists} {repodir} {self.path}" + ) + + + if os.path.exists(os.path.join(self.path, ".git")): + submoddir = os.path.join(self.root_dir, self.path) + with utils.pushd(submoddir): + git = GitInterface(submoddir, self.logger) + # first make sure the url is correct + newremote = self._add_remote(git) + status, tags = git.git_operation("tag", "-l") + fxtag = self.fxtag + if fxtag and fxtag not in tags: + git.git_operation("fetch", newremote, "--tags") + status, atag = git.git_operation("describe", "--tags", "--always") + if fxtag and fxtag != atag: + try: + status, _ = git.git_operation("checkout", fxtag) + if not status: + print(f"{self.name:>20} updated to {fxtag}") + except Exception as error: + print(error) + + + elif not fxtag: + print(f"No fxtag found for submodule {self.name:>20}") + else: + print(f"{self.name:>20} up to date.") + + + + return diff --git a/.lib/git-fleximod/git_fleximod/utils.py b/.lib/git-fleximod/git_fleximod/utils.py new file mode 100644 index 0000000000..c4f43d5238 --- /dev/null +++ b/.lib/git-fleximod/git_fleximod/utils.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python3 +""" +Common public utilities for manic package + +""" + +import logging +import os +import subprocess +import sys +from threading import Timer +from pathlib import Path + +LOCAL_PATH_INDICATOR = "." +# --------------------------------------------------------------------- +# +# functions to massage text for output and other useful utilities +# +# --------------------------------------------------------------------- +from contextlib import contextmanager + + +@contextmanager +def pushd(new_dir): + """context for chdir. usage: with pushd(new_dir)""" + previous_dir = os.getcwd() + os.chdir(new_dir) + try: + yield + finally: + os.chdir(previous_dir) + + +def log_process_output(output): + """Log each line of process output at debug level so it can be + filtered if necessary. By default, output is a single string, and + logging.debug(output) will only put log info heading on the first + line. This makes it hard to filter with grep. + + """ + output = output.split("\n") + for line in output: + logging.debug(line) + + +def printlog(msg, **kwargs): + """Wrapper script around print to ensure that everything printed to + the screen also gets logged. + + """ + logging.info(msg) + if kwargs: + print(msg, **kwargs) + else: + print(msg) + sys.stdout.flush() + + +def find_upwards(root_dir, filename): + """Find a file in root dir or any of it's parents""" + d = Path(root_dir) + root = Path(d.root) + while d != root: + attempt = d / filename + if attempt.exists(): + return attempt + d = d.parent + return None + + +def last_n_lines(the_string, n_lines, truncation_message=None): + """Returns the last n lines of the given string + + Args: + the_string: str + n_lines: int + truncation_message: str, optional + + Returns a string containing the last n lines of the_string + + If truncation_message is provided, the returned string begins with + the given message if and only if the string is greater than n lines + to begin with. + """ + + lines = the_string.splitlines(True) + if len(lines) <= n_lines: + return_val = the_string + else: + lines_subset = lines[-n_lines:] + str_truncated = "".join(lines_subset) + if truncation_message: + str_truncated = truncation_message + "\n" + str_truncated + return_val = str_truncated + + return return_val + + +def indent_string(the_string, indent_level): + """Indents the given string by a given number of spaces + + Args: + the_string: str + indent_level: int + + Returns a new string that is the same as the_string, except that + each line is indented by 'indent_level' spaces. + + In python3, this can be done with textwrap.indent. + """ + + lines = the_string.splitlines(True) + padding = " " * indent_level + lines_indented = [padding + line for line in lines] + return "".join(lines_indented) + + +# --------------------------------------------------------------------- +# +# error handling +# +# --------------------------------------------------------------------- + + +def fatal_error(message): + """ + Error output function + """ + logging.error(message) + raise RuntimeError("{0}ERROR: {1}".format(os.linesep, message)) + + +# --------------------------------------------------------------------- +# +# Data conversion / manipulation +# +# --------------------------------------------------------------------- +def str_to_bool(bool_str): + """Convert a sting representation of as boolean into a true boolean. + + Conversion should be case insensitive. + """ + value = None + str_lower = bool_str.lower() + if str_lower in ("true", "t"): + value = True + elif str_lower in ("false", "f"): + value = False + if value is None: + msg = ( + 'ERROR: invalid boolean string value "{0}". ' + 'Must be "true" or "false"'.format(bool_str) + ) + fatal_error(msg) + return value + + +REMOTE_PREFIXES = ["http://", "https://", "ssh://", "git@"] + + +def is_remote_url(url): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + """ + remote_url = False + for prefix in REMOTE_PREFIXES: + if url.startswith(prefix): + remote_url = True + return remote_url + + +def split_remote_url(url): + """check if the user provided a local file path or a + remote. If remote, try to strip off protocol info. + + """ + remote_url = is_remote_url(url) + if not remote_url: + return url + + for prefix in REMOTE_PREFIXES: + url = url.replace(prefix, "") + + if "@" in url: + url = url.split("@")[1] + + if ":" in url: + url = url.split(":")[1] + + return url + + +def expand_local_url(url, field): + """check if the user provided a local file path instead of a + remote. If so, it must be expanded to an absolute + path. + + Note: local paths of LOCAL_PATH_INDICATOR have special meaning and + represent local copy only, don't work with the remotes. + + """ + remote_url = is_remote_url(url) + if not remote_url: + if url.strip() == LOCAL_PATH_INDICATOR: + pass + else: + url = os.path.expandvars(url) + url = os.path.expanduser(url) + if not os.path.isabs(url): + msg = ( + 'WARNING: Externals description for "{0}" contains a ' + "url that is not remote and does not expand to an " + "absolute path. Version control operations may " + "fail.\n\nurl={1}".format(field, url) + ) + printlog(msg) + else: + url = os.path.normpath(url) + return url + + +# --------------------------------------------------------------------- +# +# subprocess +# +# --------------------------------------------------------------------- + +# Give the user a helpful message if we detect that a command seems to +# be hanging. +_HANGING_SEC = 300 + + +def _hanging_msg(working_directory, command): + print( + """ + +Command '{command}' +from directory {working_directory} +has taken {hanging_sec} seconds. It may be hanging. + +The command will continue to run, but you may want to abort +git-fleximod with ^C and investigate. A possible cause of hangs is git +requires authentication to access a private repository. On some +systems, git requests for authentication information will not +be displayed to the user. In this case, the program will appear to +hang. Ensure you can run git manually and access all +repositories without entering your authentication information. + +""".format( + command=command, + working_directory=working_directory, + hanging_sec=_HANGING_SEC, + ) + ) + + +def execute_subprocess(commands, status_to_caller=False, output_to_caller=False): + """Wrapper around subprocess.check_output to handle common + exceptions. + + check_output runs a command with arguments and waits + for it to complete. + + check_output raises an exception on a nonzero return code. if + status_to_caller is true, execute_subprocess returns the subprocess + return code, otherwise execute_subprocess treats non-zero return + status as an error and raises an exception. + + """ + cwd = os.getcwd() + msg = "In directory: {0}\nexecute_subprocess running command:".format(cwd) + logging.info(msg) + commands_str = " ".join(str(element) for element in commands) + logging.info(commands_str) + return_to_caller = status_to_caller or output_to_caller + status = -1 + output = "" + hanging_timer = Timer( + _HANGING_SEC, + _hanging_msg, + kwargs={"working_directory": cwd, "command": commands_str}, + ) + hanging_timer.start() + try: + output = subprocess.check_output( + commands, stderr=subprocess.STDOUT, universal_newlines=True + ) + log_process_output(output) + status = 0 + except OSError as error: + msg = failed_command_msg( + "Command execution failed. Does the executable exist?", commands + ) + logging.error(error) + fatal_error(msg) + except ValueError as error: + msg = failed_command_msg( + "DEV_ERROR: Invalid arguments trying to run subprocess", commands + ) + logging.error(error) + fatal_error(msg) + except subprocess.CalledProcessError as error: + # Only report the error if we are NOT returning to the + # caller. If we are returning to the caller, then it may be a + # simple status check. If returning, it is the callers + # responsibility determine if an error occurred and handle it + # appropriately. + msg_context = ( + "Process did not run successfully; " + "returned status {0}".format(error.returncode) + ) + msg = failed_command_msg(msg_context, commands, output=error.output) + if not return_to_caller: + logging.error(error) + logging.error(msg) + log_process_output(error.output) + fatal_error(msg) + status = error.returncode + finally: + hanging_timer.cancel() + + if status_to_caller and output_to_caller: + ret_value = (status, output) + elif status_to_caller: + ret_value = status + elif output_to_caller: + ret_value = output + else: + ret_value = None + + return ret_value + + +def failed_command_msg(msg_context, command, output=None): + """Template for consistent error messages from subprocess calls. + + If 'output' is given, it should provide the output from the failed + command + """ + + if output: + output_truncated = last_n_lines( + output, 20, truncation_message="[... Output truncated for brevity ...]" + ) + errmsg = ( + "Failed with output:\n" + indent_string(output_truncated, 4) + "\nERROR: " + ) + else: + errmsg = "" + + command_str = " ".join(command) + errmsg += """In directory + {cwd} +{context}: + {command} +""".format( + cwd=os.getcwd(), context=msg_context, command=command_str + ) + + if output: + errmsg += "See above for output from failed command.\n" + + return errmsg diff --git a/.lib/git-fleximod/poetry.lock b/.lib/git-fleximod/poetry.lock new file mode 100644 index 0000000000..3a74effcd1 --- /dev/null +++ b/.lib/git-fleximod/poetry.lock @@ -0,0 +1,693 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = false +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "babel" +version = "2.15.0" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Babel-2.15.0-py3-none-any.whl", hash = "sha256:08706bdad8d0a3413266ab61bd6c34d0c28d6e1e7badf40a2cebe67644e2e1fb"}, + {file = "babel-2.15.0.tar.gz", hash = "sha256:8daf0e265d05768bc6c7a314cf1321e9a123afc328cc635c18622a2f30a04413"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "certifi" +version = "2024.6.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, + {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fsspec" +version = "2023.12.2" +description = "File-system specification" +optional = false +python-versions = ">=3.8" +files = [ + {file = "fsspec-2023.12.2-py3-none-any.whl", hash = "sha256:d800d87f72189a745fa3d6b033b9dc4a34ad069f60ca60b943a63599f5501960"}, + {file = "fsspec-2023.12.2.tar.gz", hash = "sha256:8548d39e8810b59c38014934f6b31e57f40c1b20f911f4cc2b85389c7e9bf0cb"}, +] + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +devel = ["pytest", "pytest-cov"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] +full = ["adlfs", "aiohttp (!=4.0.0a0,!=4.0.0a1)", "dask", "distributed", "dropbox", "dropboxdrivefs", "fusepy", "gcsfs", "libarchive-c", "ocifs", "panel", "paramiko", "pyarrow (>=1)", "pygit2", "requests", "s3fs", "smbprotocol", "tqdm"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["aiohttp (!=4.0.0a0,!=4.0.0a1)", "requests"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] +tqdm = ["tqdm"] + +[[package]] +name = "gitdb" +version = "4.0.11" +description = "Git Object Database" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, + {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.43" +description = "GitPython is a Python library used to interact with Git repositories" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, + {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[package.extras] +doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] +test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "8.0.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "importlib_metadata-8.0.0-py3-none-any.whl", hash = "sha256:15584cf2b1bf449d98ff8a6ff1abef57bf20f3ac6454f431736cd3e660921b2f"}, + {file = "importlib_metadata-8.0.0.tar.gz", hash = "sha256:188bd24e4c346d3f0a933f275c2fec67050326a856b9a359881d7c2a697e8812"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.4" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "packaging" +version = "24.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, + {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pyfakefs" +version = "5.5.0" +description = "pyfakefs implements a fake file system that mocks the Python file system modules." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyfakefs-5.5.0-py3-none-any.whl", hash = "sha256:8dbf203ab7bef1529f11f7d41b9478b898e95bf9f3b71262163aac07a518cd76"}, + {file = "pyfakefs-5.5.0.tar.gz", hash = "sha256:7448aaa07142f892d0a4eb52a5ed3206a9f02c6599e686cd97d624c18979c154"}, +] + +[[package]] +name = "pygments" +version = "2.18.0" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +files = [ + {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, + {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.2.2" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest-8.2.2-py3-none-any.whl", hash = "sha256:c434598117762e2bd304e526244f67bf66bbd7b5d6cf22138be51ff661980343"}, + {file = "pytest-8.2.2.tar.gz", hash = "sha256:de4bb8104e201939ccdc688b27a89a7be2079b22e2bd2b07f806b6ba71117977"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=1.5,<2.0" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "smmap" +version = "5.0.1" +description = "A pure Python implementation of a sliding window memory map manager" +optional = false +python-versions = ">=3.7" +files = [ + {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, + {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = false +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "sphinx" +version = "5.3.0" +description = "Python documentation generator" +optional = false +python-versions = ">=3.6" +files = [ + {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, + {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.1" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = false +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "2.2.2" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, + {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "wheel" +version = "0.42.0" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "wheel-0.42.0-py3-none-any.whl", hash = "sha256:177f9c9b0d45c47873b619f5b650346d632cdc35fb5e4d25058e09c9e581433d"}, + {file = "wheel-0.42.0.tar.gz", hash = "sha256:c45be39f7882c9d34243236f2d63cbd58039e360f85d0913425fbd7ceea617a8"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + +[[package]] +name = "zipp" +version = "3.19.2" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.8" +files = [ + {file = "zipp-3.19.2-py3-none-any.whl", hash = "sha256:f091755f667055f2d02b32c53771a7a6c8b47e1fdbc4b72a8b9072b3eef8015c"}, + {file = "zipp-3.19.2.tar.gz", hash = "sha256:bf1dcf6450f873a13e952a29504887c89e6de7506209e5b1bcc3460135d4de19"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "25ee2ae1d74abedde3a6637a60d4a3095ea5cf9731960875741bbc2ba84a475d" diff --git a/.lib/git-fleximod/pyproject.toml b/.lib/git-fleximod/pyproject.toml new file mode 100644 index 0000000000..619cdabe3b --- /dev/null +++ b/.lib/git-fleximod/pyproject.toml @@ -0,0 +1,41 @@ +[tool.poetry] +name = "git-fleximod" +version = "0.9.2" +description = "Extended support for git-submodule and git-sparse-checkout" +authors = ["Jim Edwards "] +maintainers = ["Jim Edwards "] +license = "MIT" +readme = "README.md" +homepage = "https://github.com/jedwards4b/git-fleximod" +keywords = ["git", "submodule", "sparse-checkout"] +packages = [ +{ include = "git_fleximod"}, +{ include = "doc"}, +] + +[tool.poetry.scripts] +git-fleximod = "git_fleximod.git_fleximod:main" +me2flexi = "git_fleximod.metoflexi:_main" +fsspec = "fsspec.fuse:main" + +[tool.poetry.dependencies] +python = "^3.8" +GitPython = "^3.1.0" +sphinx = "^5.0.0" +fsspec = "^2023.12.2" +wheel = "^0.42.0" +pytest = "^8.0.0" +pyfakefs = "^5.3.5" + +[tool.poetry.urls] +"Bug Tracker" = "https://github.com/jedwards4b/git-fleximod/issues" + +[tool.pytest.ini_options] +markers = [ + "skip_after_first: only run on first iteration" +] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" + diff --git a/.lib/git-fleximod/tbump.toml b/.lib/git-fleximod/tbump.toml new file mode 100644 index 0000000000..920ff0591c --- /dev/null +++ b/.lib/git-fleximod/tbump.toml @@ -0,0 +1,43 @@ +# Uncomment this if your project is hosted on GitHub: +github_url = "https://github.com/jedwards4b/git-fleximod/" + +[version] +current = "0.9.2" + +# Example of a semver regexp. +# Make sure this matches current_version before +# using tbump +regex = ''' + (?P\d+) + \. + (?P\d+) + \. + (?P\d+) + ''' + +[git] +message_template = "Bump to {new_version}" +tag_template = "v{new_version}" + +# For each file to patch, add a [[file]] config +# section containing the path of the file, relative to the +# tbump.toml location. +[[file]] +src = "git_fleximod/cli.py" + +[[file]] +src = "pyproject.toml" + +# You can specify a list of commands to +# run after the files have been patched +# and before the git commit is made + +# [[before_commit]] +# name = "check changelog" +# cmd = "grep -q {new_version} Changelog.rst" + +# Or run some commands after the git tag and the branch +# have been pushed: +# [[after_push]] +# name = "publish" +# cmd = "./publish.sh" diff --git a/.lib/git-fleximod/tests/__init__.py b/.lib/git-fleximod/tests/__init__.py new file mode 100644 index 0000000000..4d4c66c78e --- /dev/null +++ b/.lib/git-fleximod/tests/__init__.py @@ -0,0 +1,3 @@ +import sys, os + +sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, "src")) diff --git a/.lib/git-fleximod/tests/conftest.py b/.lib/git-fleximod/tests/conftest.py new file mode 100644 index 0000000000..81edbe713e --- /dev/null +++ b/.lib/git-fleximod/tests/conftest.py @@ -0,0 +1,150 @@ +import pytest +from git_fleximod.gitinterface import GitInterface +import os +import subprocess +import logging +from pathlib import Path + +@pytest.fixture(scope='session') +def logger(): + logging.basicConfig( + level=logging.INFO, format="%(name)s - %(levelname)s - %(message)s", handlers=[logging.StreamHandler()] + ) + logger = logging.getLogger(__name__) + return logger + +all_repos=[ + {"subrepo_path": "modules/test", + "submodule_name": "test_submodule", + "status1" : "test_submodule MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_submodule at tag MPIserial_2.4.0", + "status3" : "test_submodule at tag MPIserial_2.4.0", + "status4" : "test_submodule at tag MPIserial_2.4.0", + "gitmodules_content" : """ + [submodule "test_submodule"] + path = modules/test + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelRequired +"""}, + {"subrepo_path": "modules/test_optional", + "submodule_name": "test_optional", + "status1" : "test_optional MPIserial_2.5.0-3-gd82ce7c is out of sync with .gitmodules MPIserial_2.4.0", + "status2" : "test_optional at tag MPIserial_2.4.0", + "status3" : "test_optional not checked out, out of sync at tag None, expected tag is MPIserial_2.4.0 (optional)", + "status4" : "test_optional at tag MPIserial_2.4.0", + "gitmodules_content": """ + [submodule "test_optional"] + path = modules/test_optional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.4.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = ToplevelOptional +"""}, + {"subrepo_path": "modules/test_alwaysoptional", + "submodule_name": "test_alwaysoptional", + "status1" : "test_alwaysoptional MPIserial_2.3.0 is out of sync with .gitmodules e5cf35c", + "status2" : "test_alwaysoptional at hash e5cf35c", + "status3" : "out of sync at tag None, expected tag is e5cf35c", + "status4" : "test_alwaysoptional at hash e5cf35c", + "gitmodules_content": """ + [submodule "test_alwaysoptional"] + path = modules/test_alwaysoptional + url = https://github.com/ESMCI/mpi-serial.git + fxtag = e5cf35c + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysOptional +"""}, + {"subrepo_path": "modules/test_sparse", + "submodule_name": "test_sparse", + "status1" : "test_sparse at tag MPIserial_2.5.0", + "status2" : "test_sparse at tag MPIserial_2.5.0", + "status3" : "test_sparse at tag MPIserial_2.5.0", + "status4" : "test_sparse at tag MPIserial_2.5.0", + "gitmodules_content": """ + [submodule "test_sparse"] + path = modules/test_sparse + url = https://github.com/ESMCI/mpi-serial.git + fxtag = MPIserial_2.5.0 + fxDONOTUSEurl = https://github.com/ESMCI/mpi-serial.git + fxrequired = AlwaysRequired + fxsparse = ../.sparse_file_list +"""}, +] +@pytest.fixture(params=all_repos) + +def shared_repos(request): + return request.param + +@pytest.fixture +def get_all_repos(): + return all_repos + +def write_sparse_checkout_file(fp): + sparse_content = """m4 +""" + fp.write_text(sparse_content) + +@pytest.fixture +def test_repo(shared_repos, tmp_path, logger): + subrepo_path = shared_repos["subrepo_path"] + submodule_name = shared_repos["submodule_name"] + test_dir = tmp_path / "testrepo" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + assert test_dir.joinpath(".git").is_dir() + (test_dir / "modules").mkdir() + if "sparse" in submodule_name: + (test_dir / subrepo_path).mkdir() + # Add the sparse checkout file + write_sparse_checkout_file(test_dir / "modules" / ".sparse_file_list") + gitp.git_operation("add","modules/.sparse_file_list") + else: + gitp = GitInterface(str(test_dir), logger) + gitp.git_operation("submodule", "add", "--depth","1","--name", submodule_name, "https://github.com/ESMCI/mpi-serial.git", subrepo_path) + assert test_dir.joinpath(".gitmodules").is_file() + gitp.git_operation("add",subrepo_path) + gitp.git_operation("commit","-a","-m","\"add submod\"") + test_dir2 = tmp_path / "testrepo2" + gitp.git_operation("clone",test_dir,test_dir2) + return test_dir2 + + +@pytest.fixture +def complex_repo(tmp_path, logger): + test_dir = tmp_path / "testcomplex" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") + gitp.git_operation("fetch", "origin") + gitp.git_operation("checkout", "v0.0.1") + return test_dir + +@pytest.fixture +def complex_update(tmp_path, logger): + test_dir = tmp_path / "testcomplex" + test_dir.mkdir() + str_path = str(test_dir) + gitp = GitInterface(str_path, logger) + gitp.git_operation("remote", "add", "origin", "https://github.com/jedwards4b/fleximod-test2") + gitp.git_operation("fetch", "origin") + gitp.git_operation("checkout", "v0.0.2") + + return test_dir + +@pytest.fixture +def git_fleximod(): + def _run_fleximod(path, args, input=None): + cmd = ["git", "fleximod"] + args.split() + result = subprocess.run(cmd, cwd=path, input=input, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True) + if result.returncode: + print(result.stdout) + print(result.stderr) + return result + return _run_fleximod + diff --git a/.lib/git-fleximod/tests/test_a_import.py b/.lib/git-fleximod/tests/test_a_import.py new file mode 100644 index 0000000000..d5ca878de5 --- /dev/null +++ b/.lib/git-fleximod/tests/test_a_import.py @@ -0,0 +1,8 @@ +# pylint: disable=unused-import +from git_fleximod import cli +from git_fleximod import utils +from git_fleximod.gitinterface import GitInterface +from git_fleximod.gitmodules import GitModules + +def test_import(): + print("here") diff --git a/.lib/git-fleximod/tests/test_b_update.py b/.lib/git-fleximod/tests/test_b_update.py new file mode 100644 index 0000000000..159f1cfae0 --- /dev/null +++ b/.lib/git-fleximod/tests/test_b_update.py @@ -0,0 +1,26 @@ +import pytest +from pathlib import Path + +def test_basic_checkout(git_fleximod, test_repo, shared_repos): + # Prepare a simple .gitmodules + gm = shared_repos['gitmodules_content'] + file_path = (test_repo / ".gitmodules") + repo_name = shared_repos["submodule_name"] + repo_path = shared_repos["subrepo_path"] + + file_path.write_text(gm) + + # Run the command + result = git_fleximod(test_repo, f"update {repo_name}") + + # Assertions + assert result.returncode == 0 + assert Path(test_repo / repo_path).exists() # Did the submodule directory get created? + if "sparse" in repo_name: + assert Path(test_repo / f"{repo_path}/m4").exists() # Did the submodule sparse directory get created? + assert not Path(test_repo / f"{repo_path}/README").exists() # Did only the submodule sparse directory get created? + + status = git_fleximod(test_repo, f"status {repo_name}") + + assert shared_repos["status2"] in status.stdout + diff --git a/.lib/git-fleximod/tests/test_c_required.py b/.lib/git-fleximod/tests/test_c_required.py new file mode 100644 index 0000000000..89ab8d294d --- /dev/null +++ b/.lib/git-fleximod/tests/test_c_required.py @@ -0,0 +1,30 @@ +import pytest +from pathlib import Path + +def test_required(git_fleximod, test_repo, shared_repos): + file_path = (test_repo / ".gitmodules") + gm = shared_repos["gitmodules_content"] + repo_name = shared_repos["submodule_name"] + if file_path.exists(): + with file_path.open("r") as f: + gitmodules_content = f.read() + # add the entry if it does not exist + if repo_name not in gitmodules_content: + file_path.write_text(gitmodules_content+gm) + # or if it is incomplete + elif gm not in gitmodules_content: + file_path.write_text(gm) + else: + file_path.write_text(gm) + result = git_fleximod(test_repo, "update") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status3"] in status.stdout + status = git_fleximod(test_repo, f"update --optional") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout + status = git_fleximod(test_repo, f"update {repo_name}") + assert result.returncode == 0 + status = git_fleximod(test_repo, f"status {repo_name}") + assert shared_repos["status4"] in status.stdout diff --git a/.lib/git-fleximod/tests/test_d_complex.py b/.lib/git-fleximod/tests/test_d_complex.py new file mode 100644 index 0000000000..edde7d816d --- /dev/null +++ b/.lib/git-fleximod/tests/test_d_complex.py @@ -0,0 +1,66 @@ +import pytest +from pathlib import Path +from git_fleximod.gitinterface import GitInterface + +def test_complex_checkout(git_fleximod, complex_repo, logger): + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) + assert("Complex not checked out, aligned at tag testtag02" in status.stdout) + assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout) + + # This should checkout and update test_submodule and complex_sub + result = git_fleximod(complex_repo, "update") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag02" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex") + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + # update a single optional submodule + + result = git_fleximod(complex_repo, "update ToplevelOptional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag02" in status.stdout) + assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout) + + # Finally update optional + result = git_fleximod(complex_repo, "update --optional") + assert result.returncode == 0 + + status = git_fleximod(complex_repo, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag02" in status.stdout) + assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) + + # now check the complex_sub + root = (complex_repo / "modules" / "complex" ) + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + diff --git a/.lib/git-fleximod/tests/test_e_complex_update.py b/.lib/git-fleximod/tests/test_e_complex_update.py new file mode 100644 index 0000000000..0c3ab4c6a6 --- /dev/null +++ b/.lib/git-fleximod/tests/test_e_complex_update.py @@ -0,0 +1,69 @@ +import pytest +from pathlib import Path +from git_fleximod.gitinterface import GitInterface + +def test_complex_update(git_fleximod, complex_update, logger): + status = git_fleximod(complex_update, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired not checked out, aligned at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired not checked out, aligned at tag MPIserial_2.4.0" in status.stdout) + assert("Complex not checked out, out of sync at tag testtag02, expected tag is testtag3" in status.stdout) + assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout) + + # This should checkout and update test_submodule and complex_sub + result = git_fleximod(complex_update, "update") + assert result.returncode == 0 + + status = git_fleximod(complex_update, "status") + assert("ToplevelOptional not checked out, aligned at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag3" in status.stdout) + + # now check the complex_sub + root = (complex_update / "modules" / "complex") + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serialAR" / ".git").exists()) + assert((root / "modules" / "mpi-serialSAR" / ".git").exists()) + assert(not (root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + + # update a single optional submodule + + result = git_fleximod(complex_update, "update ToplevelOptional") + assert result.returncode == 0 + + status = git_fleximod(complex_update, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag3" in status.stdout) + assert("AlwaysOptional not checked out, out of sync at tag None, expected tag is MPIserial_2.3.0" in status.stdout) + + # Finally update optional + result = git_fleximod(complex_update, "update --optional") + assert result.returncode == 0 + + status = git_fleximod(complex_update, "status") + assert("ToplevelOptional at tag v5.3.2" in status.stdout) + assert("ToplevelRequired at tag MPIserial_2.5.0" in status.stdout) + assert("AlwaysRequired at tag MPIserial_2.4.0" in status.stdout) + assert("Complex at tag testtag3" in status.stdout) + assert("AlwaysOptional at tag MPIserial_2.3.0" in status.stdout) + + # now check the complex_sub + root = (complex_update / "modules" / "complex" ) + assert(not (root / "libraries" / "gptl" / ".git").exists()) + assert(not (root / "libraries" / "mpi-serial" / ".git").exists()) + assert(not (root / "modules" / "mpi-serial" / ".git").exists()) + assert((root / "modules" / "mpi-serialAR" / ".git").exists()) + assert((root / "modules" / "mpi-serialSAR" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / ".git").exists()) + assert((root / "modules" / "mpi-serial2" / ".git").exists()) + assert((root / "modules" / "mpi-sparse" / "m4").exists()) + assert(not (root / "modules" / "mpi-sparse" / "README").exists()) + +