diff --git a/.github/workflows/bump-version-dev.yml b/.github/workflows/bump-version-dev.yml index b42cab9bc..c84a8fae9 100644 --- a/.github/workflows/bump-version-dev.yml +++ b/.github/workflows/bump-version-dev.yml @@ -12,48 +12,31 @@ jobs: name: Bump version runs-on: ubuntu-latest + permissions: + contents: write + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: '3.10' - name: Set env variables run: | - # The next line is very important, otherwise the line after triggers - # git to track the permission change, which breaks bump2version API (needs clean git folder) - git config core.filemode false - chmod +x .github/workflows/utils.sh - echo "VERSION_FILE=porespy/__version__.py" >> $GITHUB_ENV - echo "SETUP_CFG_FILE=setup.cfg" >> $GITHUB_ENV echo "${{ github.event.head_commit.message }}" - name: Install dependencies run: | - pip install bump2version + pip install -e .[build] - - name: Bump version (build) + - name: Bump version (dev) run: | - source .github/workflows/utils.sh - bump_version build $VERSION_FILE - # Note that we don't want to create a new tag for "builds" - - # - name: Commit files - # run: | - # REPOSITORY=${INPUT_REPOSITORY:-$GITHUB_REPOSITORY} - # remote_repo="https://${GITHUB_ACTOR}:${{ secrets.PUSH_ACTION_TOKEN }}@github.com/${REPOSITORY}.git" - - # git config --local user.email "action@github.com" - # git config --local user.name "GitHub Action" - - # # Commit version bump to dev ([no ci] to avoid infinite loop) - # git commit -m "Bump version number (build) [no ci]" -a - # git push "${remote_repo}" dev + hatch version dev - name: Commit files - uses: stefanzweifel/git-auto-commit-action@v4 + uses: stefanzweifel/git-auto-commit-action@v5 with: - commit_message: Bump version number (build part) - commit_author: Author + commit_message: Bump version number (dev segment) + commit_author: GitHub Actions diff --git a/.github/workflows/bump-version.yml b/.github/workflows/bump-version.yml index 9204db4d3..c3670006f 100644 --- a/.github/workflows/bump-version.yml +++ b/.github/workflows/bump-version.yml @@ -11,53 +11,44 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: persist-credentials: false # otherwise, the token used is the GITHUB_TOKEN, instead of your personal token fetch-depth: 0 # otherwise, you will failed to push refs to dest repo - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.10' - name: Set env variables run: | - # The next line is very important, otherwise the line after triggers - # git to track the permission change, which breaks bump2version API (needs clean git folder) - git config core.filemode false - chmod +x .github/workflows/utils.sh - echo "VERSION_FILE=porespy/__version__.py" >> $GITHUB_ENV - echo "SETUP_CFG_FILE=setup.cfg" >> $GITHUB_ENV echo "${{ github.event.head_commit.message }}" - name: Install dependencies run: | - pip install bump2version + pip install -e .[build] - name: Bump version (patch) if: contains(github.event.head_commit.message, '#patch') run: | - source .github/workflows/utils.sh - bump_version patch $VERSION_FILE - echo "TAG_NEW=v$(get_version $VERSION_FILE)" >> $GITHUB_ENV + hatch version patch + echo "TAG_NEW=v$(hatch version)" >> $GITHUB_ENV - name: Bump version (minor) if: contains(github.event.head_commit.message, '#minor') run: | - source .github/workflows/utils.sh - bump_version minor $VERSION_FILE - echo "TAG_NEW=v$(get_version $VERSION_FILE)" >> $GITHUB_ENV + hatch version minor + echo "TAG_NEW=v$(hatch version)" >> $GITHUB_ENV - name: Bump version (major) if: contains(github.event.head_commit.message, '#major') run: | - source .github/workflows/utils.sh - bump_version major $VERSION_FILE - echo "TAG_NEW=v$(get_version $VERSION_FILE)" >> $GITHUB_ENV + hatch version major + echo "TAG_NEW=v$(hatch version)" >> $GITHUB_ENV - name: Commit files - if: + if: | contains(github.event.head_commit.message, '#patch') || contains(github.event.head_commit.message, '#minor') || contains(github.event.head_commit.message, '#major') @@ -68,7 +59,7 @@ jobs: git config --local user.email "action@github.com" git config --local user.name "GitHub Action" - # commit version bump to release + # Commit version bump to release git commit -m "Bump version number" -a git push "${remote_repo}" release @@ -77,7 +68,7 @@ jobs: with: source_branch: "release" # If blank, default: triggered branch destination_branch: "dev" # If blank, default: master - pr_title: "Don't forget to merge release back into dev!" + pr_title: "Merge release -> dev to propagate version number bump" pr_body: "Changes made to the release branch (e.g. hotfixes), plus the version bump." pr_assignee: "jgostick,ma-sadeghi" # Comma-separated list (no spaces) pr_label: "high priority" # Comma-separated list (no spaces) @@ -85,10 +76,6 @@ jobs: pr_allow_empty: true # Creates pull request even if there are no changes github_token: ${{ secrets.GITHUB_TOKEN }} - - name: Trim the 4th digit from the tag - run: - echo "TAG_NEW=${TAG_NEW%.dev?}" >> $GITHUB_ENV - - name: Create new tag run: | REPOSITORY=${INPUT_REPOSITORY:-$GITHUB_REPOSITORY} diff --git a/.github/workflows/cleanup-tags.yml b/.github/workflows/cleanup-tags.yml index 618523a7d..171651473 100644 --- a/.github/workflows/cleanup-tags.yml +++ b/.github/workflows/cleanup-tags.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Clean up tags run: | diff --git a/.github/workflows/examples.yml b/.github/workflows/examples.yml index 232e98bdd..3c9a81366 100644 --- a/.github/workflows/examples.yml +++ b/.github/workflows/examples.yml @@ -14,40 +14,39 @@ jobs: fail-fast: false max-parallel: 5 matrix: - python-version: ['3.8'] + python-version: ['3.10'] operating-system: [ubuntu-latest] # Next line should be [1, 2, ..., max-parallel) test_group: [1, 2, 3, 4, 5] steps: - - uses: actions/checkout@v2 - - uses: FedericoCarboni/setup-ffmpeg@v2 + - uses: actions/checkout@v4 + - uses: FedericoCarboni/setup-ffmpeg@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Cache pip - uses: actions/cache@v2 + uses: actions/cache@v4 with: # This path is specific to Ubuntu path: ~/.cache/pip # Look to see if there is a cache hit for the corresponding requirements file - key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} + key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }} restore-keys: | ${{ runner.os }}-pip- ${{ runner.os }}- - name: Install dependencies (pip) run: | - pip install -r requirements.txt - pip install -r requirements/tests.txt - pip install -r requirements/examples.txt + pip install -e .[test,extras] - name: Running tests # Make sure to pass max-parallel to --splits run: | - pytest examples/ \ + pytest \ + -p no:python \ --nbval-lax \ --splits ${{ strategy.max-parallel}} \ --group ${{ matrix.test_group }} \ diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 14230c7ac..c918efad8 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -15,12 +15,12 @@ jobs: shell: bash -l {0} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + - name: Set up Python + uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: '3.10' - name: Cache pip uses: actions/cache@v2 @@ -28,15 +28,14 @@ jobs: # This path is specific to Ubuntu path: ~/.cache/pip # Look to see if there is a cache hit for the corresponding requirements file - key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} + key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }} restore-keys: | ${{ runner.os }}-pip- ${{ runner.os }}- - name: Install dependencies (conda) run: | - pip install -r requirements.txt - pip install -r requirements/docs.txt + pip install -e .[docs,interactive] # Build the documentation - name: Build the documentation diff --git a/.github/workflows/publish-to-pypi.yml b/.github/workflows/publish-to-pypi.yml index 58d3abcb5..f4c26166b 100644 --- a/.github/workflows/publish-to-pypi.yml +++ b/.github/workflows/publish-to-pypi.yml @@ -1,9 +1,10 @@ name: Deploy to PyPI on: + workflow_dispatch: push: tags: - - '*' + - 'v*' jobs: deploy: @@ -11,31 +12,21 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: - ref: release # the production branch name (for proper version #) + ref: release - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.10' - name: Set env variables run: | chmod +x .github/workflows/utils.sh source .github/workflows/utils.sh - VERSION_FILE=porespy/__version__.py echo "TAG=$(get_most_recent_tag)" >> $GITHUB_ENV - echo "VERSION=$(get_version $VERSION_FILE)" >> $GITHUB_ENV - - - name: Set env variables (for tag mismatch) - run: | - echo "Tag: $TAG, Version: $VERSION" - if [ "${TAG//v}" = "${VERSION%.dev?}" ]; then - echo "TAG_MISMATCH=false" >> $GITHUB_ENV - else - echo "TAG_MISMATCH=true" >> $GITHUB_ENV - fi + echo "VERSION=$(hatch version)" >> $GITHUB_ENV - name: Install dependencies run: | @@ -48,30 +39,9 @@ jobs: run: python setup.py sdist bdist_wheel - name: Publish distribution 📦 to PyPI - if: startsWith(github.event.ref, 'refs/tags') && contains(env.TAG_MISMATCH, 'false') - uses: pypa/gh-action-pypi-publish@master + if: startsWith(github.event.ref, 'refs/tags') + uses: pypa/gh-action-pypi-publish@release/v1 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} - skip_existing: true - -# - name: Publish distribution 📦 to TestPyPI -# if: startsWith(github.event.ref, 'refs/tags') && contains(env.TAG_MISMATCH, 'false') -# uses: pypa/gh-action-pypi-publish@master -# with: -# user: __token__ -# password: ${{ secrets.TESTPYPI_TOKEN }} -# repository_url: https://test.pypi.org/legacy/ - - # Not a good idea: if a non-conforming tag is push, e.g. random_tag, it - # first gets deleted by cleanup-tags.yml, and then publish-to-pypi.yml gets - # tricked and deletes the most recent tag! Ouch! - - # - name: Delete tag if doesn't match with version - # if: contains(env.TAG_MISMATCH, 'true') - # run: | - # git config --local user.email "action@github.com" - # git config --local user.name "GitHub Action" - # REPOSITORY=${INPUT_REPOSITORY:-$GITHUB_REPOSITORY} - # remote_repo="https://${GITHUB_ACTOR}:${{ secrets.GITHUB_TOKEN }}@github.com/${REPOSITORY}.git" - # git push "${remote_repo}" :refs/tags/$TAG + skip-existing: true diff --git a/.github/workflows/release-notes.yml b/.github/workflows/release-notes.yml index 58cbe978b..1bac0edc6 100644 --- a/.github/workflows/release-notes.yml +++ b/.github/workflows/release-notes.yml @@ -12,26 +12,26 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: - fetch-depth: 0 # to retrieve entire history of refs/tags + fetch-depth: 0 # Retrieve entire history of refs/tags - - name: Generate release notes + - name: get-recent-tag run: | git fetch --all --tags --force chmod +x .github/workflows/logger.sh chmod +x .github/workflows/utils.sh source .github/workflows/utils.sh bash .github/workflows/logger.sh - echo "TAG=$(get_most_recent_tag)" >> $GITHUB_ENV + echo "TAG=$(get_most_recent_tag)" >> $GITHUB_OUTPUT - name: Create GitHub release - uses: Roang-zero1/github-create-release-action@master + uses: Roang-zero1/github-create-release-action@v3 with: version_regex: ^v[[:digit:]]+\.[[:digit:]]+\.[[:digit:]]+ create_draft: true - created_tag: ${{ env.TAG }} + created_tag: ${{ steps.get-recent-tag.outputs.TAG }} update_existing: false - release_title: ${{ env.TAG }} + release_title: ${{ steps.get-recent-tag.outputs.TAG }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/ruff.yml b/.github/workflows/ruff.yml new file mode 100644 index 000000000..c49507d74 --- /dev/null +++ b/.github/workflows/ruff.yml @@ -0,0 +1,10 @@ +name: Ruff + +on: pull_request + +jobs: + ruff: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 diff --git a/.github/workflows/test-duration-logger.yml b/.github/workflows/test-duration-logger.yml index 3b113311f..6a46fb389 100644 --- a/.github/workflows/test-duration-logger.yml +++ b/.github/workflows/test-duration-logger.yml @@ -13,32 +13,30 @@ jobs: strategy: max-parallel: 1 matrix: - python-version: ['3.8'] + python-version: ['3.10'] os: [ubuntu-latest] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Cache pip - uses: actions/cache@v2 + uses: actions/cache@v4 with: # This path is specific to Ubuntu path: ~/.cache/pip # Look to see if there is a cache hit for the corresponding requirements file - key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} + key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }} restore-keys: | ${{ runner.os }}-pip- ${{ runner.os }}- - name: Install dependencies (pip) run: | - pip install wheel - pip install -r requirements.txt - pip install -r requirements/tests.txt + pip install -e .[test,extras] - name: Running unit tests and examples run: | @@ -52,9 +50,9 @@ jobs: --durations-path test/fixtures/.test_durations_unit - name: Committing test duration files - uses: EndBug/add-and-commit@v7 + uses: EndBug/add-and-commit@v9 with: add: 'test/fixtures' author_name: github-actions - author_email: 41898282+github-actions[bot]@users.noreply.github.com + author_email: actions@github.com message: 'Updated test duration files.' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 32121f76d..dc28acffb 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -16,10 +16,9 @@ jobs: strategy: fail-fast: false - max-parallel: 9 + max-parallel: 12 matrix: - # Add '3.10' to the list once #611 is addressed - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.10', '3.11'] os: [ubuntu-latest, macos-latest, windows-latest] include: - os: ubuntu-latest @@ -31,42 +30,37 @@ jobs: steps: - name: Cache pip - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: ${{ matrix.path }} - key: ${{ runner.os }}-pip-${{ hashFiles('requirements.txt') }} + key: ${{ runner.os }}-pip-${{ hashFiles('pyproject.toml') }} restore-keys: | ${{ runner.os }}-pip- - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies (pip) run: | - pip install \ - -r requirements.txt \ - -r requirements/tests.txt + pip install -e .[test,extras] # TODO: uncomment this step when integration tests are fixed # - name: Disable numba JIT for codecov to include jitted methods - # if: (matrix.python-version == 3.8) && (matrix.os == 'ubuntu-latest') + # if: (matrix.python-version == 3.10) && (matrix.os == 'ubuntu-latest') # run: | # echo "NUMBA_DISABLE_JIT=1" >> $GITHUB_ENV - name: Running tests run: - pytest . - --cov=. - --cov-report=xml - --pycodestyle + pytest --cov=. --cov-report=xml - name: Upload coverage to Codecov - if: (matrix.python-version == 3.8) && (matrix.os == 'ubuntu-latest') - uses: codecov/codecov-action@v1 + if: (matrix.python-version == 3.10) && (matrix.os == 'ubuntu-latest') + uses: codecov/codecov-action@v4 with: token: ${{ secrets.CODECOV_TOKEN }} file: ./coverage.xml diff --git a/.github/workflows/verify-pip-installation.yml b/.github/workflows/verify-pip-installation.yml index c00d57d45..e88a784ef 100644 --- a/.github/workflows/verify-pip-installation.yml +++ b/.github/workflows/verify-pip-installation.yml @@ -1,6 +1,10 @@ name: Verify pip-installability -on: [workflow_dispatch] +on: + schedule: + # Run (on default branch only) at 05:00 (hr:mm) UTC -> 12am EST + - cron: "0 5 * * *" + workflow_dispatch: jobs: deploy: @@ -8,11 +12,11 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: - python-version: '3.8' + python-version: '3.10' - name: Set branch name as env variable run: | diff --git a/.gitignore b/.gitignore index b549568ca..9cca3b8a3 100644 --- a/.gitignore +++ b/.gitignore @@ -155,9 +155,11 @@ cython_debug/ *.nblink # OpenPNM -*.mphtxt +.vscode/ docs/_build/ docs/**/generated docs/examples examples/networks/*.vt* -.vscode/ +examples/**/*.vtp +examples/**/*.tif +*.mphtxt diff --git a/example.py b/example.py index bdb9ef79c..059c2b80e 100644 --- a/example.py +++ b/example.py @@ -1,7 +1,5 @@ -import porespy as ps -import numpy as np import matplotlib.pyplot as plt - +import porespy as ps # Generate an image of spheres using the imgen class im = ps.generators.blobs(shape=[500, 500], porosity=0.7, blobiness=1) @@ -17,4 +15,4 @@ ax[0][0].imshow(im) ax[0][1].imshow(chords) ax[1][0].imshow(colored_chords, cmap=plt.cm.jet) -ax[1][1].bar(h.L, h.pdf, width=h.bin_widths, edgecolor='k') +ax[1][1].bar(h.L, h.pdf, width=h.bin_widths, edgecolor="k") diff --git a/pyproject.toml b/pyproject.toml index 4feb9601a..7e0de9179 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,44 @@ dependencies = [ readme = "README.md" requires-python = ">= 3.8" +[project.optional-dependencies] +build = ["hatch"] +test = [ + "codecov", + "coverage", + "nbval", + "pytest", + "pytest-cache", + "pytest-cov", + "pytest-custom-exit-code", + "pytest-split", +] +extras = [ + "imageio", + "numpy-stl", + "pyevtk", + "scikit-fmm", + "scikit-learn", + "tensorflow", + "trimesh", +] +docs = [ + "mock", + "myst-nb", + "pandoc", + "pydata-sphinx-theme==0.9", + "sphinx", + "sphinx-copybutton", + "sphinx-design", +] +interactive = [ + "ipython", + "ipykernel", + "ipywidgets", + "jupyter", + "jupyterlab_widgets", +] + [project.urls] Homepage = "https://porespy.org" Repository = "https://github.com/PMEAL/porespy" @@ -64,6 +102,8 @@ dev-dependencies = [ "trimesh", "ipykernel", "pypardiso", + "nbval", + "ruff>=0.3.2", ] [tool.hatch.metadata] @@ -77,8 +117,24 @@ packages = ["src/porespy"] [tool.pytest.ini_options] minversion = "6.0" -addopts = "-ra -v" -testpaths = ["test"] +addopts = [ + "-ra -v", + "--ignore=docs/conf.py", + "--ignore=examples/generators/reference/fractal_noise.ipynb", + "--ignore=examples/networks/reference/diffusive_size_factor_AI.ipynb", + "--ignore=examples/networks/tutorials/predicting_diffusive_size_factors_rock_sample.ipynb", + "--ignore=examples/networks/tutorials/using_diffusive_size_factor_AI_with_snow.ipynb", +] +python_files = "*.py" +python_classes = "*Test" +python_functions = "test_*" +testpaths = ["test", "examples"] +norecursedirs = [".git", ".github", ".ipynb_checkpoints", "build", "dist"] +# filterwarnings = ["error", "ignore::UserWarning", "ignore::DeprecationWarning"] +# -p no:warnings [tool.ruff] +exclude = [".git", ".github", ".venv", "build", "docs", "examples", "test"] line-length = 92 +[tool.ruff.lint.per-file-ignores] +"__init__.py" = ["E402", "F401", "F403"] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 9e92a495e..000000000 --- a/pytest.ini +++ /dev/null @@ -1,21 +0,0 @@ -[pytest] -minversion = 6.0 -python_files = *.py -python_classes = *Test -python_functions = test_* -testpaths = - test - examples -addopts = - --doctest-modules - --ignore=setup.py - --ignore=docs/conf.py - -p no:warnings -norecursedirs = - .git - .github - .ipynb_checkpoints - build - dist - locals -;filterwarnings = ignore::DeprecationWarning diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index cd8c47d3d..000000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ ---index-url https://pypi.python.org/simple/ - --e . diff --git a/requirements/conda.txt b/requirements/conda.txt deleted file mode 100644 index 14b69bcf2..000000000 --- a/requirements/conda.txt +++ /dev/null @@ -1,20 +0,0 @@ -dask -deprecated -edt -imageio -matplotlib -numba -numpy -numpy-stl -pandas -psutil -pyevtk -rich -scikit-fmm -scikit-image -scikit-learn -scipy -tqdm -trimesh -PyWavelets -nanomesh diff --git a/requirements/docs.txt b/requirements/docs.txt deleted file mode 100644 index 3bfbf7d38..000000000 --- a/requirements/docs.txt +++ /dev/null @@ -1,17 +0,0 @@ -ipykernel -ipywidgets -ipython -jupyterlab_widgets -mock -myst-nb -pandoc -pydata-sphinx-theme==0.9 -sphinx -sphinx-copybutton -sphinx-design -pyfastnoisesimd -scikit-fmm -trimesh -pyevtk -imageio -numpy-stl diff --git a/requirements/examples.txt b/requirements/examples.txt deleted file mode 100644 index e0e59ec19..000000000 --- a/requirements/examples.txt +++ /dev/null @@ -1,8 +0,0 @@ -pyfastnoisesimd -scikit-fmm -scikit-learn -trimesh -pyevtk -imageio -numpy-stl -tensorflow diff --git a/requirements/tests.txt b/requirements/tests.txt deleted file mode 100644 index ea13899b1..000000000 --- a/requirements/tests.txt +++ /dev/null @@ -1,15 +0,0 @@ -codecov -coverage -jupyter -nbval -pytest -pytest-cache -pytest-cov -pytest-custom-exit-code -pytest-pycodestyle -pytest-split -scikit-fmm -trimesh -pyevtk -imageio -numpy-stl diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 884626d3a..000000000 --- a/setup.cfg +++ /dev/null @@ -1,28 +0,0 @@ -[bumpversion] -current_version = 2.3.0.dev19 -parse = (?P\d+)\.(?P\d+)\.(?P\d+)\.(?P\D+)(?P\d+)? -serialize = {major}.{minor}.{patch}.{release}{build} - -[bumpversion:part:release] -values = dev - -[flake8] -ignore = E122,E127,E203,E222,E226,E225,E241,E402,W503,W504,F401 -max-line-length = 90 - -[pycodestyle] -ignore = E122,E127,E203,E222,E226,E225,E241,E402,E703,W503,W504,F401 -max-line-length = 90 - -[pep8] -ignore = E122,E127,E203,E222,E226,E225,E241,E402,W503,W504,F401 -max-line-length = 90 - -[pep8_pre_commit_hook] -max-violations-per-file = 0 - -[options] -python_requires = >= 3.8 - -[metadata] -license_file = LICENSE diff --git a/src/porespy/__init__.py b/src/porespy/__init__.py index 06364b42b..5a0865d53 100644 --- a/src/porespy/__init__.py +++ b/src/porespy/__init__.py @@ -24,19 +24,22 @@ from . import simulations from . import visualization from . import io -# The dns module will be deprecated in V3, in favor of simulations + +# TODO: Deprecate dns module once v3 is out from . import dns from .visualization import imshow import numpy as _np -_np.seterr(divide='ignore', invalid='ignore') + +_np.seterr(divide="ignore", invalid="ignore") __version__ = tools._get_version() def _setup_logger_rich(): import logging + from rich.logging import RichHandler FORMAT = "%(message)s" diff --git a/src/porespy/beta/_drainage2.py b/src/porespy/beta/_drainage2.py index 205e30a1e..0c37de31b 100644 --- a/src/porespy/beta/_drainage2.py +++ b/src/porespy/beta/_drainage2.py @@ -183,7 +183,6 @@ def _insert_disks_npoints_nradii_1value_parallel( import numpy as np import porespy as ps import matplotlib.pyplot as plt - from copy import copy from edt import edt # %% diff --git a/src/porespy/beta/_gdd.py b/src/porespy/beta/_gdd.py index 8e6384626..d403feb28 100644 --- a/src/porespy/beta/_gdd.py +++ b/src/porespy/beta/_gdd.py @@ -1,5 +1,5 @@ import time -from porespy import simulations, tools, settings +from porespy import simulations, settings from porespy.tools import Results import numpy as np import openpnm as op @@ -9,7 +9,7 @@ import edt __all__ = ['tortuosity_gdd', 'chunks_to_dataframe'] -settings.loglevel=50 +settings.loglevel = 50 @dask.delayed @@ -199,7 +199,7 @@ def tortuosity_gdd(im, scale_factor=3, use_dask=True): all_gD = [result for result in all_results[::2]] all_tau_unfiltered = [result for result in all_results[1::2]] - all_tau = [result.tortuosity if type(result)!=int + all_tau = [result.tortuosity if not isinstance(result, int) else result for result in all_tau_unfiltered] t4 = time.perf_counter()- t0 @@ -329,9 +329,9 @@ def chunks_to_dataframe(im, scale_factor=3, use_dask=True): all_gD = [result for result in all_results[::2]] all_tau_unfiltered = [result for result in all_results[1::2]] - all_porosity = [result.effective_porosity if type(result)!=int + all_porosity = [result.effective_porosity if not isinstance(result, int) else result for result in all_tau_unfiltered] - all_tau = [result.tortuosity if type(result)!=int + all_tau = [result.tortuosity if not isinstance(result, int) else result for result in all_tau_unfiltered] # creates opnepnm network to calculate image tortuosity diff --git a/src/porespy/filters/__init__.py b/src/porespy/filters/__init__.py index 25eaf9411..76b230b83 100644 --- a/src/porespy/filters/__init__.py +++ b/src/porespy/filters/__init__.py @@ -56,9 +56,9 @@ """ +from . import imagej +from ._fftmorphology import * from ._funcs import * -from ._snows import * -from ._size_seq_satn import * from ._nlmeans import * -from ._fftmorphology import * -from . import imagej +from ._size_seq_satn import * +from ._snows import * diff --git a/src/porespy/filters/_snows.py b/src/porespy/filters/_snows.py index 192c60820..0e114bd94 100644 --- a/src/porespy/filters/_snows.py +++ b/src/porespy/filters/_snows.py @@ -7,7 +7,7 @@ import scipy.ndimage as spim import scipy.spatial as sptl from skimage.segmentation import watershed -from skimage.morphology import ball, disk, square, cube +from skimage.morphology import square, cube from porespy.tools import _check_for_singleton_axes from porespy.tools import extend_slice, ps_rect, ps_round from porespy.tools import Results diff --git a/src/porespy/filters/imagej/__init__.py b/src/porespy/filters/imagej/__init__.py index 313e782ad..de377911d 100644 --- a/src/porespy/filters/imagej/__init__.py +++ b/src/porespy/filters/imagej/__init__.py @@ -16,5 +16,7 @@ """ -from ._funcs import imagej_wrapper -from ._funcs import imagej_plugin +from ._funcs import ( + imagej_plugin, + imagej_wrapper, +) diff --git a/src/porespy/generators/_micromodels.py b/src/porespy/generators/_micromodels.py index 4b9bf4843..4dbdd1314 100644 --- a/src/porespy/generators/_micromodels.py +++ b/src/porespy/generators/_micromodels.py @@ -1,12 +1,13 @@ -import numpy as np +from typing import List + import matplotlib.pyplot as plt -from nanomesh import Mesher2D -from porespy.generators import lattice_spheres, borders, spheres_from_coords -from porespy.tools import _insert_disks_at_points_parallel, extend_slice +import numpy as np import scipy.ndimage as spim import scipy.stats as spst -from typing import List +from nanomesh import Mesher2D +from porespy.generators import borders, lattice_spheres, spheres_from_coords +from porespy.tools import _insert_disks_at_points_parallel, extend_slice __all__ = [ 'rectangular_pillars_array', diff --git a/src/porespy/io/_unzipper.py b/src/porespy/io/_unzipper.py index 7758ff83c..67ab7bae7 100644 --- a/src/porespy/io/_unzipper.py +++ b/src/porespy/io/_unzipper.py @@ -1,10 +1,12 @@ -import imageio -import numpy as np import os -from zipfile import ZipFile -from porespy.tools import get_tqdm +import shutil from pathlib import Path +from zipfile import ZipFile +import imageio +import numpy as np + +from porespy.tools import get_tqdm tqdm = get_tqdm() @@ -91,4 +93,7 @@ def zip_to_stack(f): for i, f in enumerate(tqdm(os.listdir(dir_for_files))): im[..., i] = imageio.v2.imread(os.path.join(dir_for_files , f)) + # Remove the unzipped folder + shutil.rmtree(dir_for_files) + return im diff --git a/src/porespy/networks/_funcs.py b/src/porespy/networks/_funcs.py index 484bb1483..6c56462e3 100644 --- a/src/porespy/networks/_funcs.py +++ b/src/porespy/networks/_funcs.py @@ -3,9 +3,9 @@ import openpnm as op import scipy.ndimage as spim from skimage.segmentation import find_boundaries -from skimage.morphology import ball, cube, disk, square +from skimage.morphology import ball, cube from porespy.tools import make_contiguous -from porespy.tools import overlay, extend_slice +from porespy.tools import overlay from porespy.tools import insert_cylinder from porespy.generators import borders from porespy import settings diff --git a/src/porespy/networks/_size_factors.py b/src/porespy/networks/_size_factors.py index 3bc516940..8f7a6330c 100644 --- a/src/porespy/networks/_size_factors.py +++ b/src/porespy/networks/_size_factors.py @@ -471,7 +471,7 @@ def _denorm_predict(prediction, g_train): ''' from sklearn import preprocessing scaler = preprocessing.MinMaxScaler(feature_range=(0, 1)) - train_N = scaler.fit_transform(g_train.reshape(-1, 1)) + _ = scaler.fit_transform(g_train.reshape(-1, 1)) denorm = scaler.inverse_transform(X=prediction.reshape(-1, 1)) denorm = np.squeeze(denorm) return denorm diff --git a/src/porespy/simulations/_drainage.py b/src/porespy/simulations/_drainage.py index 58cab5246..380c1da4a 100644 --- a/src/porespy/simulations/_drainage.py +++ b/src/porespy/simulations/_drainage.py @@ -1,9 +1,7 @@ import numpy as np from edt import edt -import numba from porespy.filters import trim_disconnected_blobs, find_trapped_regions -from porespy.filters import find_disconnected_voxels -from porespy.filters import pc_to_satn, satn_to_seq, seq_to_satn +from porespy.filters import pc_to_satn, satn_to_seq from porespy import settings from porespy.tools import _insert_disks_at_points from porespy.tools import get_tqdm diff --git a/src/porespy/tools/_funcs.py b/src/porespy/tools/_funcs.py index 5ebf3c4a1..ba451be2e 100644 --- a/src/porespy/tools/_funcs.py +++ b/src/porespy/tools/_funcs.py @@ -1390,7 +1390,7 @@ def extract_regions(regions, labels: list, trim=True): to view online example. """ - if type(labels) is int: + if isinstance(labels, int): labels = [labels] s = spim.find_objects(regions) im_new = np.zeros_like(regions) diff --git a/src/porespy/visualization/_views.py b/src/porespy/visualization/_views.py index 3c61324d1..d79895408 100644 --- a/src/porespy/visualization/_views.py +++ b/src/porespy/visualization/_views.py @@ -1,6 +1,5 @@ import numpy as np import scipy.ndimage as spim -import matplotlib.pyplot as plt # from mpl_toolkits.mplot3d.art3d import Poly3DCollection diff --git a/test/unit/blobs_layers.zip b/test/fixtures/blobs_layers.zip similarity index 100% rename from test/unit/blobs_layers.zip rename to test/fixtures/blobs_layers.zip diff --git a/test/unit/blobs_layers/0.tif b/test/fixtures/blobs_layers/0.tif similarity index 100% rename from test/unit/blobs_layers/0.tif rename to test/fixtures/blobs_layers/0.tif diff --git a/test/unit/blobs_layers/1.tif b/test/fixtures/blobs_layers/1.tif similarity index 100% rename from test/unit/blobs_layers/1.tif rename to test/fixtures/blobs_layers/1.tif diff --git a/test/unit/blobs_layers/2.tif b/test/fixtures/blobs_layers/2.tif similarity index 100% rename from test/unit/blobs_layers/2.tif rename to test/fixtures/blobs_layers/2.tif diff --git a/test/unit/blobs_layers/3.tif b/test/fixtures/blobs_layers/3.tif similarity index 100% rename from test/unit/blobs_layers/3.tif rename to test/fixtures/blobs_layers/3.tif diff --git a/test/unit/blobs_layers/4.tif b/test/fixtures/blobs_layers/4.tif similarity index 100% rename from test/unit/blobs_layers/4.tif rename to test/fixtures/blobs_layers/4.tif diff --git a/test/unit/blobs_layers/5.tif b/test/fixtures/blobs_layers/5.tif similarity index 100% rename from test/unit/blobs_layers/5.tif rename to test/fixtures/blobs_layers/5.tif diff --git a/test/unit/blobs_layers/6.tif b/test/fixtures/blobs_layers/6.tif similarity index 100% rename from test/unit/blobs_layers/6.tif rename to test/fixtures/blobs_layers/6.tif diff --git a/test/unit/blobs_layers/7.tif b/test/fixtures/blobs_layers/7.tif similarity index 100% rename from test/unit/blobs_layers/7.tif rename to test/fixtures/blobs_layers/7.tif diff --git a/test/unit/blobs_layers/8.tif b/test/fixtures/blobs_layers/8.tif similarity index 100% rename from test/unit/blobs_layers/8.tif rename to test/fixtures/blobs_layers/8.tif diff --git a/test/unit/blobs_layers/9.tif b/test/fixtures/blobs_layers/9.tif similarity index 100% rename from test/unit/blobs_layers/9.tif rename to test/fixtures/blobs_layers/9.tif diff --git a/test/unit/test_generators.py b/test/unit/test_generators.py index ebbad8dd4..233d4218d 100644 --- a/test/unit/test_generators.py +++ b/test/unit/test_generators.py @@ -458,29 +458,27 @@ def test_faces(self): with pytest.raises(Exception): ps.generators.faces(shape=[10, 10, 10]) + @pytest.mark.skip(reason="Doesn't support Python 3.9+") def test_fractal_noise_2d(self): - try: - s = [100, 100] - # Ensure identical images are returned if seed is same - im1 = ps.generators.fractal_noise(shape=s, seed=0, cores=1) - im2 = ps.generators.fractal_noise(shape=s, seed=0, cores=1) - assert np.linalg.norm(im1) == np.linalg.norm(im2) - # Ensure different images are returned even if seed is same - im1 = ps.generators.fractal_noise(shape=s, mode='perlin', - seed=0, octaves=2, cores=1) - im2 = ps.generators.fractal_noise(shape=s, mode='perlin', - seed=0, octaves=4, cores=1) - assert np.linalg.norm(im1) != np.linalg.norm(im2) - # Check uniformization - im1 = ps.generators.fractal_noise(shape=s, mode='cubic', - uniform=True, cores=1) - assert im1.min() >= 0 - assert im1.max() <= 1 - im2 = ps.generators.fractal_noise(shape=s, mode='cubic', - uniform=False, cores=1) - assert im2.min() < 0 - except ModuleNotFoundError: - pass + s = [100, 100] + # Ensure identical images are returned if seed is same + im1 = ps.generators.fractal_noise(shape=s, seed=0, cores=1) + im2 = ps.generators.fractal_noise(shape=s, seed=0, cores=1) + assert np.linalg.norm(im1) == np.linalg.norm(im2) + # Ensure different images are returned even if seed is same + im1 = ps.generators.fractal_noise(shape=s, mode='perlin', + seed=0, octaves=2, cores=1) + im2 = ps.generators.fractal_noise(shape=s, mode='perlin', + seed=0, octaves=4, cores=1) + assert np.linalg.norm(im1) != np.linalg.norm(im2) + # Check uniformization + im1 = ps.generators.fractal_noise(shape=s, mode='cubic', + uniform=True, cores=1) + assert im1.min() >= 0 + assert im1.max() <= 1 + im2 = ps.generators.fractal_noise(shape=s, mode='cubic', + uniform=False, cores=1) + assert im2.min() < 0 def test_cantor_dust(self): np.random.seed(0) diff --git a/test/unit/test_io.py b/test/unit/test_io.py index 5568ed003..81343bbeb 100644 --- a/test/unit/test_io.py +++ b/test/unit/test_io.py @@ -60,21 +60,21 @@ def test_spheres_to_comsol_radii_centers(self): [40, 25, 55], [60, 0, 89]]) ps.io.spheres_to_comsol(filename='sphere_pack', centers=centers, radii=radii) - # os.remove("sphere_pack.mphtxt") + os.remove("sphere_pack.mphtxt") def test_spheres_to_comsol_im(self): im = ps.generators.overlapping_spheres(shape=[100, 100, 100], r=10, porosity=0.6) ps.io.spheres_to_comsol(filename='sphere_pack', im=im) - # os.remove("sphere_pack.mphtxt") + os.remove("sphere_pack.mphtxt") def test_zip_to_stack_and_folder_to_stack(self): p = Path(os.path.realpath(__file__), - '../../../test/unit/blobs_layers.zip').resolve() + '../../../test/fixtures/blobs_layers.zip').resolve() im = ps.io.zip_to_stack(p) assert im.shape == (100, 100, 10) p = Path(os.path.realpath(__file__), - '../../../test/unit/blobs_layers').resolve() + '../../../test/fixtures/blobs_layers').resolve() im = ps.io.folder_to_stack(p) assert im.shape == (100, 100, 10)