From 224d1ef1ab36b020126424ed0ab0723d6a62ead6 Mon Sep 17 00:00:00 2001 From: Wei Ji <23487320+weiji14@users.noreply.github.com> Date: Mon, 16 Oct 2023 05:49:24 +1300 Subject: [PATCH] Replace flakeheaven and isort with ruff (#2747) Co-authored-by: Dongdong Tian --- .github/workflows/format-command.yml | 2 +- .github/workflows/style_checks.yaml | 4 +-- .gitignore | 2 +- Makefile | 9 +++-- doc/conf.py | 4 +-- doc/contributing.md | 6 ++-- environment.yml | 3 +- .../cyl/cyl_universal_transverse_mercator.py | 2 +- pygmt/datasets/earth_age.py | 2 +- pygmt/datasets/earth_free_air_anomaly.py | 2 +- pygmt/datasets/earth_geoid.py | 2 +- pygmt/datasets/earth_magnetic_anomaly.py | 4 +-- pygmt/datasets/earth_mask.py | 2 +- pygmt/datasets/earth_relief.py | 2 +- .../earth_vertical_gravity_gradient.py | 2 +- pygmt/datasets/samples.py | 9 +++-- pygmt/src/nearneighbor.py | 14 ++++---- pyproject.toml | 33 ++++++++++--------- 18 files changed, 52 insertions(+), 52 deletions(-) diff --git a/.github/workflows/format-command.yml b/.github/workflows/format-command.yml index 60917035b0d..5300dd87ff5 100644 --- a/.github/workflows/format-command.yml +++ b/.github/workflows/format-command.yml @@ -32,7 +32,7 @@ jobs: # Install formatting tools - name: Install formatting tools run: | - python -m pip install black blackdoc docformatter flakeheaven isort + python -m pip install black blackdoc docformatter ruff python -m pip list sudo apt-get install dos2unix diff --git a/.github/workflows/style_checks.yaml b/.github/workflows/style_checks.yaml index 2658a52b790..9fb807524be 100644 --- a/.github/workflows/style_checks.yaml +++ b/.github/workflows/style_checks.yaml @@ -34,11 +34,11 @@ jobs: - name: Install packages run: | - python -m pip install black blackdoc docformatter flakeheaven pylint isort + python -m pip install black blackdoc docformatter pylint ruff python -m pip list sudo apt-get install dos2unix - - name: Formatting check (black, blackdoc, docformatter, flakeheaven and isort) + - name: Formatting check (black, blackdoc, docformatter, ruff) run: make check - name: Linting (pylint) diff --git a/.gitignore b/.gitignore index 12ade17828c..c89f876a3ca 100644 --- a/.gitignore +++ b/.gitignore @@ -17,8 +17,8 @@ MANIFEST .coverage coverage.xml htmlcov/ -.flakeheaven_cache/ .pytest_cache/ +.ruff_cache/ results/ result_images/ tmp-test-dir-with-unique-name/ diff --git a/Makefile b/Makefile index cb66f8deb80..3fcab6575ad 100644 --- a/Makefile +++ b/Makefile @@ -15,8 +15,8 @@ help: @echo " fulltest run the test suite (including all doctests)" @echo " doctest run the doctests only" @echo " test_no_images run the test suite (including all doctests) but skip image comparisons" - @echo " format run black, blackdoc, docformatter and isort to automatically format the code" - @echo " check run code style and quality checks (black, blackdoc, docformatter, flakeheaven and isort)" + @echo " format run black, blackdoc, docformatter and ruff to automatically format the code" + @echo " check run code style and quality checks (black, blackdoc, docformatter, ruff)" @echo " codespell run codespell to check common misspellings" @echo " lint run pylint for a deeper (and slower) quality check" @echo " clean clean up build and generated files" @@ -60,17 +60,16 @@ test_no_images: PYTEST_ARGS=-o addopts="--verbose --durations=0 --durations-min= test_no_images: _runtest format: - isort . docformatter --in-place $(FORMAT_FILES) black $(FORMAT_FILES) blackdoc $(FORMAT_FILES) + ruff check --fix $(FORMAT_FILES) check: - isort . --check docformatter --check $(FORMAT_FILES) black --check $(FORMAT_FILES) blackdoc --check $(FORMAT_FILES) - FLAKEHEAVEN_CACHE_TIMEOUT=0 flakeheaven lint $(FORMAT_FILES) + ruff check $(FORMAT_FILES) codespell: @codespell diff --git a/doc/conf.py b/doc/conf.py index 5356ee1adcc..849d5cd0a38 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -7,7 +7,7 @@ import datetime from importlib.metadata import metadata -# isort: off +# ruff: isort: off from sphinx_gallery.sorting import ( # pylint: disable=no-name-in-module ExplicitOrder, ExampleTitleSortKey, @@ -16,7 +16,7 @@ from pygmt import __commit__, __version__ from pygmt.sphinx_gallery import PyGMTScraper -# isort: on +# ruff: isort: on extensions = [ "myst_parser", diff --git a/doc/contributing.md b/doc/contributing.md index fffc0a7bc40..c44e80d5ae5 100644 --- a/doc/contributing.md +++ b/doc/contributing.md @@ -476,7 +476,7 @@ We use some tools to format the code so we don't have to think about it: - [Black](https://github.com/psf/black) - [blackdoc](https://github.com/keewis/blackdoc) - [docformatter](https://github.com/myint/docformatter) -- [isort](https://pycqa.github.io/isort/) +- [ruff](https://docs.astral.sh/ruff) Black and blackdoc loosely follows the [PEP8](http://pep8.org) guide but with a few differences. Regardless, you won't have to worry about formatting the code yourself. @@ -499,14 +499,14 @@ words bridged only by consonants, such as `distcalc`, and `crossprofile`. This convention is not applied by the code checking tools, but the PyGMT maintainers will comment on any pull requests as needed. -We also use [flakeheaven](https://flakeheaven.readthedocs.io) and +We also use [ruff](https://docs.astral.sh/ruff) and [pylint](https://pylint.pycqa.org/) to check the quality of the code and quickly catch common errors. The [`Makefile`](https://github.com/GenericMappingTools/pygmt/blob/main/Makefile) contains rules for running both checks: ```bash -make check # Runs black, blackdoc, docformatter, flakeheaven and isort (in check mode) +make check # Runs black, blackdoc, docformatter, ruff (in check mode) make lint # Runs pylint, which is a bit slower ``` diff --git a/environment.yml b/environment.yml index 0f65963817c..73b45304436 100644 --- a/environment.yml +++ b/environment.yml @@ -26,9 +26,8 @@ dependencies: - blackdoc - codespell - docformatter>=1.7.2 - - flakeheaven>=3 - - isort>=5 - pylint + - ruff # Dev dependencies (unit testing) - matplotlib - pytest-cov diff --git a/examples/projections/cyl/cyl_universal_transverse_mercator.py b/examples/projections/cyl/cyl_universal_transverse_mercator.py index 27f2299a48d..1cd0556c303 100644 --- a/examples/projections/cyl/cyl_universal_transverse_mercator.py +++ b/examples/projections/cyl/cyl_universal_transverse_mercator.py @@ -14,7 +14,7 @@ .. _GMT_utm_zones: -.. figure:: https://docs.generic-mapping-tools.org/latest/_images/GMT_utm_zones.png # noqa: W505 +.. figure:: https://docs.generic-mapping-tools.org/latest/_images/GMT_utm_zones.png :width: 700 px :align: center diff --git a/pygmt/datasets/earth_age.py b/pygmt/datasets/earth_age.py index 8182be77b28..8fe3c5f4d66 100644 --- a/pygmt/datasets/earth_age.py +++ b/pygmt/datasets/earth_age.py @@ -15,7 +15,7 @@ def load_earth_age(resolution="01d", region=None, registration=None): r""" Load the Earth seafloor crustal age dataset in various resolutions. - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_age.png # noqa: W505 + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_age.png :width: 80 % :align: center diff --git a/pygmt/datasets/earth_free_air_anomaly.py b/pygmt/datasets/earth_free_air_anomaly.py index 8e411e14359..f1cc1c4f9c8 100644 --- a/pygmt/datasets/earth_free_air_anomaly.py +++ b/pygmt/datasets/earth_free_air_anomaly.py @@ -16,7 +16,7 @@ def load_earth_free_air_anomaly(resolution="01d", region=None, registration=None Load the IGPP Global Earth Free-Air Anomaly datatset in various resolutions. - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_faa.jpg # noqa: W505 + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_faa.jpg :width: 80 % :align: center diff --git a/pygmt/datasets/earth_geoid.py b/pygmt/datasets/earth_geoid.py index 104df50e344..140bb39ffe2 100644 --- a/pygmt/datasets/earth_geoid.py +++ b/pygmt/datasets/earth_geoid.py @@ -15,7 +15,7 @@ def load_earth_geoid(resolution="01d", region=None, registration=None): r""" Load the EGM2008 Global Earth Geoid dataset in various resolutions. - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_geoid.jpg # noqa: W505 + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_geoid.jpg :width: 80 % :align: center diff --git a/pygmt/datasets/earth_magnetic_anomaly.py b/pygmt/datasets/earth_magnetic_anomaly.py index 7372ba486d7..e9ec6b9a449 100644 --- a/pygmt/datasets/earth_magnetic_anomaly.py +++ b/pygmt/datasets/earth_magnetic_anomaly.py @@ -24,8 +24,8 @@ def load_earth_magnetic_anomaly( * - Global Earth Magnetic Anomaly Model (EMAG2) - World Digital Magnetic Anomaly Map (WDMAM) - * - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_mag4km.jpg # noqa: W505 - - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_wdmam.jpg # noqa: W505 + * - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_mag4km.jpg + - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_wdmam.jpg The grids are downloaded to a user data directory (usually ``~/.gmt/server/earth/earth_mag/``, diff --git a/pygmt/datasets/earth_mask.py b/pygmt/datasets/earth_mask.py index 99bb57f51b2..f9fcc4be5ca 100644 --- a/pygmt/datasets/earth_mask.py +++ b/pygmt/datasets/earth_mask.py @@ -15,7 +15,7 @@ def load_earth_mask(resolution="01d", region=None, registration=None): r""" Load the GSHHG Global Earth Mask dataset in various resolutions. - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_mask.png # noqa: W505 + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_mask.png :width: 80 % :align: center diff --git a/pygmt/datasets/earth_relief.py b/pygmt/datasets/earth_relief.py index aa7df5cfbca..9236753e413 100644 --- a/pygmt/datasets/earth_relief.py +++ b/pygmt/datasets/earth_relief.py @@ -23,7 +23,7 @@ def load_earth_relief( Load the Earth relief datasets (topography and bathymetry) in various resolutions. - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_gebcosi.jpg # noqa: W505 + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_gebcosi.jpg :width: 80 % :align: center diff --git a/pygmt/datasets/earth_vertical_gravity_gradient.py b/pygmt/datasets/earth_vertical_gravity_gradient.py index 30ac2c427f4..0eada0a374c 100644 --- a/pygmt/datasets/earth_vertical_gravity_gradient.py +++ b/pygmt/datasets/earth_vertical_gravity_gradient.py @@ -18,7 +18,7 @@ def load_earth_vertical_gravity_gradient( Load the IGPP Global Earth Vertical Gravity Gradient dataset in various resolutions. - .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_vgg.jpg # noqa: W505 + .. figure:: https://www.generic-mapping-tools.org/remote-datasets/_images/GMT_earth_vgg.jpg :width: 80 % :align: center diff --git a/pygmt/datasets/samples.py b/pygmt/datasets/samples.py index 7471ad473c4..2733b354b92 100644 --- a/pygmt/datasets/samples.py +++ b/pygmt/datasets/samples.py @@ -301,7 +301,6 @@ def list_sample_data(): def load_sample_data(name): - # pylint: disable=line-too-long """ Load an example dataset from the GMT server. @@ -317,8 +316,8 @@ def load_sample_data(name): Returns ------- :class:`pandas.DataFrame` or :class:`xarray.DataArray` - Sample dataset loaded as a :class:`pandas.DataFrame` for tabular data or - :class:`xarray.DataArray` for raster data. + Sample dataset loaded as a :class:`pandas.DataFrame` for tabular data + or :class:`xarray.DataArray` for raster data. See Also -------- @@ -331,7 +330,7 @@ def load_sample_data(name): >>> from pprint import pprint >>> from pygmt.datasets import list_sample_data, load_sample_data >>> # use list_sample_data to see the available datasets - >>> pprint(list_sample_data(), width=120) # noqa: W505 + >>> pprint(list_sample_data(), width=120) {'bathymetry': 'Table of ship bathymetric observations off Baja California', 'earth_relief_holes': 'Regional 20 arc-minutes Earth relief grid with holes', 'fractures': 'Table of hypothetical fracture lengths and azimuths', @@ -345,7 +344,7 @@ def load_sample_data(name): 'usgs_quakes': 'Table of earthquakes from the USGS'} >>> # load the sample bathymetry dataset >>> data = load_sample_data("bathymetry") - """ + """ # noqa: W505 if name not in datasets: raise GMTInvalidInput(f"Invalid dataset name '{name}'.") return datasets[name].func() diff --git a/pygmt/src/nearneighbor.py b/pygmt/src/nearneighbor.py index 53aa9057dde..704958aa7b0 100644 --- a/pygmt/src/nearneighbor.py +++ b/pygmt/src/nearneighbor.py @@ -40,11 +40,11 @@ def nearneighbor(data=None, x=None, y=None, z=None, **kwargs): r""" Grid table data using a "Nearest neighbor" algorithm. - **nearneighbor** reads arbitrarily located (*x*, *y*, *z*\ [, *w*]) triplets - [quadruplets] and uses a nearest neighbor algorithm to assign a weighted - average value to each node that has one or more data points within a search - radius centered on the node with adequate coverage across a subset of the - chosen sectors. The node value is computed as a weighted mean of the + **nearneighbor** reads arbitrarily located (*x*, *y*, *z*\ [, *w*]) + triplets [quadruplets] and uses a nearest neighbor algorithm to assign a + weighted average value to each node that has one or more data points within + a search radius centered on the node with adequate coverage across a subset + of the chosen sectors. The node value is computed as a weighted mean of the nearest point from each sector inside the search radius. The weighting function and the averaging used is given by: @@ -57,7 +57,7 @@ def nearneighbor(data=None, x=None, y=None, z=None, **kwargs): criteria and :math:`r_i` is the distance from the node to the *i*'th data point. If no data weights are supplied then :math:`w_i = 1`. - .. figure:: https://docs.generic-mapping-tools.org/dev/_images/GMT_nearneighbor.png # noqa: W505 + .. figure:: https://docs.generic-mapping-tools.org/dev/_images/GMT_nearneighbor.png :width: 300 px :align: center @@ -139,7 +139,7 @@ def nearneighbor(data=None, x=None, y=None, z=None, **kwargs): >>> # Load a sample dataset of bathymetric x, y, and z values >>> data = pygmt.datasets.load_sample_data(name="bathymetry") >>> # Create a new grid with 5 arc-minutes spacing in the designated region - >>> # Set search_radius to only consider points within 10 arc-minutes of a node + >>> # Set search_radius to only take points within 10 arc-minutes of a node >>> output = pygmt.nearneighbor( ... data=data, ... spacing="5m", diff --git a/pyproject.toml b/pyproject.toml index 42ec3d330cb..4b5ca958003 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,27 +85,30 @@ make-summary-multi-line = true wrap-summaries = 79 wrap-descriptions = 79 -[tool.flakeheaven] -max_line_length = 88 -max_doc_length = 79 -show_source = true +[tool.ruff] +line-length = 88 # E501 (line-too-long) +show-source = true +select = [ + "E", # pycodestyle + "F", # pyflakes + "I", # isort + "W", # pycodestyle warnings +] +ignore = ["E501"] # Avoid enforcing line-length violations + +[tool.ruff.pycodestyle] +max-doc-length = 79 -[tool.flakeheaven.plugins] -pycodestyle = ["+*", "-E501", "-W503"] -pyflakes = ["+*"] +[tool.ruff.per-file-ignores] +"__init__.py" = ["F401"] # Ignore `F401` (unused-import) in all `__init__.py` files -[tool.flakeheaven.exceptions."**/__init__.py"] -pyflakes = ["-F401"] +[tool.ruff.isort] +known-third-party = ["pygmt"] [tool.pytest.ini_options] minversion = "6.0" addopts = "--verbose --durations=0 --durations-min=0.2 --doctest-modules --mpl --mpl-results-path=results" -[tool.isort] -profile = "black" -skip_gitignore = true -known_third_party = "pygmt" - [tool.pylint.MASTER] # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the # number of processors available to use. @@ -136,5 +139,5 @@ max-module-lines=2000 disable=[ "duplicate-code", "import-error", - "line-too-long", # Already checked by flakeheaven/pycodestyle + "line-too-long", # Already checked by ruff's pycodestyle ]