diff --git a/.gitignore b/.gitignore index f1cbbba1..bc62feca 100644 --- a/.gitignore +++ b/.gitignore @@ -143,3 +143,4 @@ examples/*/data/ auto_examples/ gen_modules/ examples/*/processed +examples/temp.tif diff --git a/dev-environment.yml b/dev-environment.yml index 281de3a5..bc0e45a1 100644 --- a/dev-environment.yml +++ b/dev-environment.yml @@ -17,7 +17,6 @@ dependencies: - proj-data - scikit-gstat>=0.6.8 - pytransform3d - - geoutils>=0.0.5 # Development-specific - pytest @@ -29,3 +28,7 @@ dependencies: - flake8 - sphinx-autodoc-typehints - sphinx-gallery + + - pip: + - git+https://github.com/GlacioHack/GeoUtils.git + diff --git a/docs/source/about_xdem.rst b/docs/source/about_xdem.rst new file mode 100644 index 00000000..523347df --- /dev/null +++ b/docs/source/about_xdem.rst @@ -0,0 +1,17 @@ +.. _about_xdem: + +About xDEM +========== + +xDEM is a set of open-source Python tools to facilitate the postprocessing of DEMs, and more general with georeferenced rasters. It is designed by geoscientists for geoscientists, although currently our group has a strong focus on glaciological applications. + +We are not software developers, but we try our best to offer tools that can be useful to a larger group, are well documented and are reliable and maintained. All develpment and maintenance is made on a voluntary basis and we welcome any new contributors. See some information on how to contribute in the dedicated page of our `GitHub repository `_. + +The core concepts behind *xdem* are: + +**Ease of use**: Most operations require only a few lines of code. The module was developed with a focus on remote sensing and geoscience applications. + +**Modularity**: We offer a set of options, rather than favoring a single method. Everyone should be able to contribute to xdem and add to its functionalities. + +**Reproducibility**: Version-controlled, releases saved with DOI and test-based development ensure our code always performs as expected. + diff --git a/docs/source/biascorr.rst b/docs/source/biascorr.rst index 21e8704c..68d0fcac 100644 --- a/docs/source/biascorr.rst +++ b/docs/source/biascorr.rst @@ -8,9 +8,9 @@ Bias corrections correspond to transformations that cannot be described as a 3-d Directional biases ------------------ -TODO +TODO: In construction Terrain biases -------------- -TODO \ No newline at end of file +TODO: In construction \ No newline at end of file diff --git a/docs/source/code/comparison.py b/docs/source/code/comparison.py index 0d4fd71a..3cff1038 100644 --- a/docs/source/code/comparison.py +++ b/docs/source/code/comparison.py @@ -55,9 +55,10 @@ ) # The example DEMs are void-free, so let's make some random voids. -ddem.data.mask = np.zeros_like(ddem.data, dtype=bool) # Reset the mask # Introduce 50000 nans randomly throughout the dDEM. -ddem.data.mask.ravel()[np.random.choice(ddem.data.size, 50000, replace=False)] = True +mask = np.zeros_like(ddem.data, dtype=bool) +mask.ravel()[(np.random.choice(ddem.data.size, 50000, replace=False))] = True +ddem.set_mask(mask) # SUBSECTION: Linear spatial interpolation diff --git a/docs/source/code/coregistration.py b/docs/source/code/coregistration.py index f1a4f641..756a2b8c 100644 --- a/docs/source/code/coregistration.py +++ b/docs/source/code/coregistration.py @@ -11,18 +11,14 @@ # Load the data using xdem and geoutils (could be with rasterio and geopandas instead) # Load a reference DEM from 2009 -reference_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) +ref_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) # Load a moderately well aligned DEM from 1990 -dem_to_be_aligned = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")).reproject(reference_dem, silent=True) +tba_dem = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem")).reproject(ref_dem, silent=True) # Load glacier outlines from 1990. This will act as the unstable ground. glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) -# Prepare the inputs for coregistration. -ref_data = reference_dem.data.squeeze() # This is a numpy 2D array/masked_array -tba_data = dem_to_be_aligned.data.squeeze() # This is a numpy 2D array/masked_array # This is a boolean numpy 2D array. Note the bitwise not (~) symbol -inlier_mask = ~glacier_outlines.create_mask(reference_dem) -transform = reference_dem.transform # This is a rio.transform.Affine object. +inlier_mask = ~glacier_outlines.create_mask(ref_dem) ######################## # SECTION: Nuth and Kääb @@ -30,10 +26,10 @@ nuth_kaab = coreg.NuthKaab() # Fit the data to a suitable x/y/z offset. -nuth_kaab.fit(ref_data, tba_data, transform=transform, inlier_mask=inlier_mask) +nuth_kaab.fit(ref_dem, tba_dem, inlier_mask=inlier_mask) # Apply the transformation to the data (or any other data) -aligned_dem = nuth_kaab.apply(tba_data, transform=transform) +aligned_dem = nuth_kaab.apply(tba_dem) #################### # SECTION: Deramping @@ -42,10 +38,10 @@ # Instantiate a 1st order deramping object. deramp = coreg.Deramp(degree=1) # Fit the data to a suitable polynomial solution. -deramp.fit(ref_data, tba_data, transform=transform, inlier_mask=inlier_mask) +deramp.fit(ref_dem, tba_dem, inlier_mask=inlier_mask) # Apply the transformation to the data (or any other data) -deramped_dem = deramp.apply(dem_to_be_aligned.data, transform=dem_to_be_aligned.transform) +deramped_dem = deramp.apply(tba_dem) ########################## # SECTION: Bias correction @@ -53,10 +49,10 @@ bias_corr = coreg.BiasCorr() # Note that the transform argument is not needed, since it is a simple vertical correction. -bias_corr.fit(ref_data, tba_data, inlier_mask=inlier_mask, transform=reference_dem.transform) +bias_corr.fit(ref_dem, tba_dem, inlier_mask=inlier_mask) # Apply the bias to a DEM -corrected_dem = bias_corr.apply(tba_data, transform=dem_to_be_aligned.transform) +corrected_dem = bias_corr.apply(tba_dem) # Use median bias instead bias_median = coreg.BiasCorr(bias_func=np.median) @@ -70,10 +66,10 @@ # Instantiate the object with default parameters icp = coreg.ICP() # Fit the data to a suitable transformation. -icp.fit(ref_data, tba_data, transform=transform, inlier_mask=inlier_mask) +icp.fit(ref_dem, tba_dem, inlier_mask=inlier_mask) # Apply the transformation matrix to the data (or any other data) -aligned_dem = icp.apply(tba_data, transform=transform) +aligned_dem = icp.apply(tba_dem) ################### # SECTION: Pipeline diff --git a/docs/source/comparison.rst b/docs/source/comparison.rst index a872504e..94756eef 100644 --- a/docs/source/comparison.rst +++ b/docs/source/comparison.rst @@ -17,7 +17,7 @@ dDEM interpolation ------------------ There are many approaches to interpolate a dDEM. A good comparison study for glaciers is McNabb et al., (`2019 `_). -So far, ``xdem`` has three types of interpolation: +So far, xDEM has three types of interpolation: - Linear spatial interpolation - Local hypsometric interpolation @@ -26,7 +26,7 @@ So far, ``xdem`` has three types of interpolation: Let's first create a :class:`xdem.ddem.dDEM` object to experiment on: .. literalinclude:: code/comparison.py - :lines: 51-60 + :lines: 51-61 Linear spatial interpolation @@ -35,7 +35,7 @@ Linear spatial interpolation (also often called bilinear interpolation) of dDEMs .. literalinclude:: code/comparison.py - :lines: 64 + :lines: 65 .. plot:: code/comparison_plot_spatial_interpolation.py @@ -51,7 +51,7 @@ Then, voids are interpolated by replacing them with what "should be there" at th .. literalinclude:: code/comparison.py - :lines: 68 + :lines: 69 .. plot:: code/comparison_plot_local_hypsometric_interpolation.py @@ -67,7 +67,7 @@ This is advantageous in respect to areas where voids are frequent, as not even a Of course, the accuracy of such an averaging is much lower than if the local hypsometric approach is used (assuming it is possible). .. literalinclude:: code/comparison.py - :lines: 72 + :lines: 73 .. plot:: code/comparison_plot_regional_hypsometric_interpolation.py @@ -76,7 +76,7 @@ Of course, the accuracy of such an averaging is much lower than if the local hyp The DEMCollection object ------------------------ -Keeping track of multiple DEMs can be difficult when many different extents, resolutions and CRSs are involved, and :class:`xdem.demcollection.DEMCollection` is ``xdem``'s answer to make this simple. +Keeping track of multiple DEMs can be difficult when many different extents, resolutions and CRSs are involved, and :class:`xdem.demcollection.DEMCollection` is xDEM's answer to make this simple. We need metadata on the timing of these products. The DEMs can be provided with the ``datetime=`` argument upon instantiation, or the attribute could be set later. Multiple outlines are provided as a dictionary in the shape of ``{datetime: outline}``. diff --git a/docs/source/coregistration.rst b/docs/source/coregistration.rst index 4b7e2bb6..5eb5b803 100644 --- a/docs/source/coregistration.rst +++ b/docs/source/coregistration.rst @@ -36,13 +36,13 @@ Examples are given using data close to Longyearbyen on Svalbard. These can be lo .. literalinclude:: code/coregistration.py - :lines: 5-27 + :lines: 5-21 The Coreg object ---------------- :class:`xdem.coreg.Coreg` -Each of the coregistration approaches in ``xdem`` inherit their interface from the ``Coreg`` class. +Each of the coregistration approaches in xDEM inherit their interface from the ``Coreg`` class. It is written in a style that should resemble that of ``scikit-learn`` (see their `LinearRegression `_ class for example). Each coregistration approach has the methods: @@ -53,6 +53,8 @@ Each coregistration approach has the methods: First, ``.fit()`` is called to estimate the transform, and then this transform can be used or exported using the subsequent methods. +**Figure illustrating the different coregistration methods implemented** + .. inheritance-diagram:: xdem.coreg :top-classes: xdem.coreg.Coreg @@ -90,7 +92,7 @@ Example ^^^^^^^ .. literalinclude:: code/coregistration.py - :lines: 31-36 + :lines: 27-32 .. minigallery:: xdem.coreg.NuthKaab @@ -100,13 +102,13 @@ Deramping --------- :class:`xdem.coreg.Deramp` -- **Performs:** Bias, linear or nonlinear height corrections. +- **Performs:** Bias, linear or nonlinear vertical corrections. - **Supports weights** (soon) - **Recommended for:** Data with no horizontal offset and low to moderate rotational differences. Deramping works by estimating and correcting for an N-degree polynomial over the entire dDEM between a reference and the DEM to be aligned. This may be useful for correcting small rotations in the dataset, or nonlinear errors that for example often occur in structure-from-motion derived optical DEMs (e.g. Rosnell and Honkavaara `2012 `_; Javernick et al. `2014 `_; Girod et al. `2017 `_). -Applying a "0 degree deramping" is equivalent to a simple bias correction, and is recommended for e.g. vertical datum corrections. +Applying a "0 degree deramping" is equivalent to a simple bias correction. Limitations ^^^^^^^^^^^ @@ -119,7 +121,7 @@ Example ^^^^^^^ .. literalinclude:: code/coregistration.py - :lines: 42-48 + :lines: 38-44 Bias correction @@ -141,7 +143,7 @@ Only performs vertical corrections, so it should be combined with another approa Example ^^^^^^^ .. literalinclude:: code/coregistration.py - :lines: 54-64 + :lines: 50-60 ICP --- @@ -171,7 +173,7 @@ Due to the repeated nearest neighbour calculations, ICP is often the slowest cor Example ^^^^^^^ .. literalinclude:: code/coregistration.py - :lines: 70-76 + :lines: 66-72 .. minigallery:: xdem.coreg.ICP :add-heading: @@ -184,7 +186,7 @@ Often, more than one coregistration approach is necessary to obtain the best res For example, ICP works poorly with large initial biases, so a ``CoregPipeline`` can be constructed to perform both sequentially: .. literalinclude:: code/coregistration.py - :lines: 82-87 + :lines: 78-83 The ``CoregPipeline`` object exposes the same interface as the ``Coreg`` object. The results of a pipeline can be used in other programs by exporting the combined transformation matrix: @@ -221,6 +223,4 @@ For large biases, rotations and high amounts of noise: .. code-block:: python - coreg.VerticalShift() + coreg.ICP() + coreg.NuthKaab() - - + coreg.BiasCorr() + coreg.ICP() + coreg.NuthKaab() diff --git a/docs/source/filters.rst b/docs/source/filters.rst index 09e2ce17..83e92791 100644 --- a/docs/source/filters.rst +++ b/docs/source/filters.rst @@ -3,4 +3,4 @@ Filtering ========= -In construction \ No newline at end of file +TODO: In construction \ No newline at end of file diff --git a/docs/source/how_to_install.rst b/docs/source/how_to_install.rst new file mode 100644 index 00000000..c8bd1f15 --- /dev/null +++ b/docs/source/how_to_install.rst @@ -0,0 +1,60 @@ +.. _how_to_install: + +How to install +============ + +Installing with conda (recommended) +------------------------ +.. code-block:: bash + + conda install -c conda-forge --strict-channel-priority xdem + +**Notes** + +- The ``--strict-channel-priority`` flag seems essential for Windows installs to function correctly, and is recommended for UNIX-based systems as well. + +- Solving dependencies can take a long time with ``conda``. To speed up this, consider installing ``mamba``: + + .. code-block:: bash + + conda install mamba -n base -c conda-forge + + Once installed, the same commands can be run by simply replacing ``conda`` by ``mamba``. More details available through the `mamba project `_. + +- If running into the ``sklearn`` error ``ImportError: dlopen: cannot load any more object with static TLS``, your system + needs to update its ``glibc`` (see details `here `_). + If you have no administrator right on the system, you might be able to circumvent this issue by installing a working + environment with specific downgraded versions of ``scikit-learn`` and ``numpy``: + + .. code-block:: bash + + conda create -n xdem-env -c conda-forge xdem scikit-learn==0.20.3 numpy==1.19.* + + On very old systems, if the above install results in segmentation faults, try setting more specifically + ``numpy==1.19.2=py37h54aff64_0`` (worked with Debian 8.11, GLIBC 2.19). + +Installing with pip +------------------- + +.. code-block:: bash + + pip install xdem + +**NOTE**: Setting up GDAL and PROJ may need some extra steps, depending on your operating system and configuration. + + +Installing for contributors +--------------------------- +Recommended: Use conda for dependency solving. + +.. code-block:: shell + + git clone https://github.com/GlacioHack/xdem.git + cd ./xdem + conda env create -f dev-environment.yml + conda activate xdem + pip install -e . + +After installing, we recommend to check that everything is working by running the tests: + +``pytest -rA`` diff --git a/docs/source/index.rst b/docs/source/index.rst index 91d8aa27..a0a4bd4f 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -3,41 +3,52 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to xdem's documentation! -================================ -``xdem`` aims to make Digital Elevation Model (DEM) analysis easy. -Coregistration, subtraction (and volume measurements), and error statistics should be available to anyone with the correct input data. +xDEM: Analysis of digital elevation models for geosciences +========================================================== +xDEM aims at simplifying the analysis of digital elevation models (DEMs). This module provides typical post-processing steps such as coregistration, elevation/volume change measurements and error statistics. +.. toctree:: + :maxdepth: 2 + :caption: Getting started -Simple usage -================== - -.. code-block:: python - - import xdem - - dem1 = xdem.DEM("path/to/first_dem.tif") - dem2 = xdem.DEM("path/to/second_dem.tif") + about_xdem + how_to_install + quick_start - difference = dem1 - dem2 +.. toctree:: + :maxdepth: 2 + :caption: Background + intro_dems + intro_robuststats + intro_accuracy_precision .. toctree:: :maxdepth: 2 - :caption: Contents: + :caption: Features - intro + vertical_ref + terrain coregistration biascorr filters comparison spatialstats - robuststats - terrain + +.. toctree:: + :maxdepth: 2 + :caption: Gallery of examples + auto_examples/index.rst + +.. toctree:: + :maxdepth: 2 + :caption: API Reference + api.rst + Indices and tables ================== diff --git a/docs/source/intro.rst b/docs/source/intro_accuracy_precision.rst similarity index 95% rename from docs/source/intro.rst rename to docs/source/intro_accuracy_precision.rst index 263d7779..86466b65 100644 --- a/docs/source/intro.rst +++ b/docs/source/intro_accuracy_precision.rst @@ -1,7 +1,7 @@ .. _intro: -Introduction: why is it complex to assess DEM accuracy and precision? -===================================================================== +Analysis of accuracy and precision +================================== Digital Elevation Models are numerical, gridded representations of elevation. They are generated from different instruments (e.g., optical sensors, radar, lidar), acquired in different conditions (e.g., ground, airborne, satellite) @@ -16,7 +16,7 @@ While some complexities are specific to certain instruments and methods, all DEM These factors lead to difficulties in assessing the accuracy and precision of DEMs, which are necessary to perform further analysis. -In ``xdem``, we provide a framework with state-of-the-art methods published in the scientific literature to make DEM +In xDEM, we provide a framework with state-of-the-art methods published in the scientific literature to make DEM calculations consistent, reproducible, and easy. Accuracy and precision @@ -87,7 +87,7 @@ ps://doi.org/10.3389/feart.2020.566802>`_ and `Hugonnet et al. (2021) `_ +In order to mitigate their effect on DEM analysis, xDEM integrates `robust statistics `_ methods at different levels. These methods can be used to robustly fit functions necessary to perform DEM alignment (see :ref:`coregistration`, :ref:`biascorr`), or to provide robust statistical measures equivalent to the mean, the standard deviation or the covariance of a sample when analyzing DEM precision with @@ -86,7 +86,7 @@ Spatial auto-correlation of a sample `Variogram `_ analysis exploits statistical measures equivalent to the covariance, and is therefore also subject to outliers. -Based on `scikit-gstat `_, ``xdem`` allows to specify robust variogram +Based on `scikit-gstat `_, xDEM allows to specify robust variogram estimators such as Dowd's variogram based on medians (`Dowd (1984) `_) defined as: .. math:: @@ -107,7 +107,7 @@ Least-square loss functions When performing least-squares linear regression, the traditional `loss functions `_ that are used are not robust to outliers. -A robust soft L1 loss default is used by default when ``xdem`` performs least-squares regression through `scipy.optimize +A robust soft L1 loss default is used by default when xDEM performs least-squares regression through `scipy.optimize `_. Robust estimators diff --git a/docs/source/quick_start.rst b/docs/source/quick_start.rst new file mode 100644 index 00000000..23062d1c --- /dev/null +++ b/docs/source/quick_start.rst @@ -0,0 +1,53 @@ +.. _quick_start: + +Quick start +=========== + +Sample data +----------- + +xDEM comes with some sample data that is used throughout this documentation to demonstrate the features. If not done already, the sample data can be downloaded with the command + +.. code-block:: python + + xdem.examples.download_longyearbyen_examples() + +The dataset keys and paths can be found from + +.. code-block:: python + + xdem.examples.FILEPATHS_DATA + +Load DEMs and calculate elevation difference +------------------------------------------------ + +.. code-block:: python + + import xdem + + # Load data + dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) + dem_1990 = xdem.DEM(xdem.examples.get_path("longyearbyen_tba_dem_coreg")) + + # Calculate the difference + ddem = dem_2009 - dem_1990 + + # Plot + ddem.show(cmap='coolwarm_r', vmin=-20, vmax=20, cb_title="Elevation change (m)") + + # Save to file + ddem.save("temp.tif") + +A detailed example on how to load raster data, reproject it to a different grid/projection, run simple arithmetic operations such as subtraction, plotting the data and saving to file can be found in the example gallery :ref:`sphx_glr_auto_examples_plot_dem_subtraction.py`. + +.. + .. raw:: html + +
+ + .. only:: html + + .. figure:: /auto_examples/images/thumb/sphx_glr_plot_dem_subtraction_thumb.png + :alt: DEM subtraction + + :ref:`sphx_glr_auto_examples_plot_dem_subtraction.py` diff --git a/docs/source/spatialstats.rst b/docs/source/spatialstats.rst index 4a64d92a..25f0371d 100644 --- a/docs/source/spatialstats.rst +++ b/docs/source/spatialstats.rst @@ -5,7 +5,7 @@ Spatial statistics Spatial statistics, also referred to as `geostatistics `_, are essential for the analysis of observations distributed in space. -To analyze DEMs, ``xdem`` integrates spatial statistics tools specific to DEMs described in recent literature, +To analyze DEMs, xDEM integrates spatial statistics tools specific to DEMs described in recent literature, in particular in `Rolstad et al. (2009) `_, `Dehecq et al. (2020) `_ and `Hugonnet et al. (2021) `_. The implementation of these methods relies @@ -136,7 +136,7 @@ Non-stationarity in elevation measurement errors Elevation data contains significant non-stationarities in elevation measurement errors. -``xdem`` provides tools to **quantify** these non-stationarities along several explanatory variables, +xDEM provides tools to **quantify** these non-stationarities along several explanatory variables, **model** those numerically to estimate an elevation measurement error, and **standardize** them for further analysis. Quantify and model non-stationarites @@ -232,7 +232,7 @@ Spatial correlation of elevation measurement errors correspond to a dependency b close pixels in elevation data. Those can be related to the resolution of the data (short-range correlation), or to instrument noise and deformations (mid- to long-range correlations). -``xdem`` provides tools to **quantify** these spatial correlation with pairwise sampling optimized for grid data and to +xDEM provides tools to **quantify** these spatial correlation with pairwise sampling optimized for grid data and to **model** correlations simultaneously at multiple ranges. Quantify spatial correlations @@ -273,7 +273,7 @@ Random subsampling of the grid samples used is a solution, but often unsatisfact of pairwise samples that unevenly represents lag classes (most pairwise differences are found at mid distances, but too few at short distances and long distances). -To remedy this issue, ``xdem`` provides :func:`xdem.spatialstats.sample_empirical_variogram`, an empirical variogram estimation tool +To remedy this issue, xDEM provides :func:`xdem.spatialstats.sample_empirical_variogram`, an empirical variogram estimation tool that encapsulates a pairwise subsampling method described in ``skgstat.MetricSpace.RasterEquidistantMetricSpace``. This method compares pairwise distances between a center subset and equidistant subsets iteratively across a grid, based on `sparse matrices `_ routines computing pairwise distances of two separate diff --git a/docs/source/vertical_ref.rst b/docs/source/vertical_ref.rst new file mode 100644 index 00000000..5fdad953 --- /dev/null +++ b/docs/source/vertical_ref.rst @@ -0,0 +1,6 @@ +.. _vertical_ref: + +Vertical referencing +==================== + +TODO: In construction diff --git a/examples/plot_blockwise_coreg.py b/examples/plot_blockwise_coreg.py index 05c918fb..8fd83869 100644 --- a/examples/plot_blockwise_coreg.py +++ b/examples/plot_blockwise_coreg.py @@ -44,11 +44,9 @@ # The product is a mosaic of multiple DEMs, so "seams" may exist in the data. # These can be visualized by plotting a change map: -diff_before = (reference_dem - dem_to_be_aligned).data +diff_before = reference_dem - dem_to_be_aligned -plt.figure(figsize=(8, 5)) -plt.imshow(diff_before.squeeze(), cmap="coolwarm_r", vmin=-10, vmax=10, extent=plt_extent) -plt.colorbar() +diff_before.show(cmap="coolwarm_r", vmin=-10, vmax=10) plt.show() # %% @@ -70,9 +68,9 @@ # Coregistration is performed with the ``.fit()`` method. # This runs in multiple threads by default, so more CPU cores are preferable here. -blockwise.fit(reference_dem.data, dem_to_be_aligned.data, transform=reference_dem.transform, inlier_mask=inlier_mask) +blockwise.fit(reference_dem, dem_to_be_aligned, inlier_mask=inlier_mask) -aligned_dem_data = blockwise.apply(dem_to_be_aligned.data, transform=dem_to_be_aligned.transform) +aligned_dem = blockwise.apply(dem_to_be_aligned) # %% # The estimated shifts can be visualized by applying the coregistration to a completely flat surface. @@ -89,11 +87,9 @@ # %% # Then, the new difference can be plotted to validate that it improved. -diff_after = reference_dem.data - aligned_dem_data +diff_after = reference_dem - aligned_dem -plt.figure(figsize=(8, 5)) -plt.imshow(diff_after.squeeze(), cmap="coolwarm_r", vmin=-10, vmax=10, extent=plt_extent) -plt.colorbar() +diff_after.show(cmap="coolwarm_r", vmin=-10, vmax=10) plt.show() # %% diff --git a/examples/plot_dem_subtraction.py b/examples/plot_dem_subtraction.py index 9be5f1ba..964833cc 100644 --- a/examples/plot_dem_subtraction.py +++ b/examples/plot_dem_subtraction.py @@ -9,10 +9,10 @@ The :func:`xdem.DEM.reproject` method takes care of this. """ +import geoutils as gu import matplotlib.pyplot as plt import xdem - # %% dem_2009 = xdem.DEM(xdem.examples.get_path("longyearbyen_ref_dem")) @@ -35,9 +35,8 @@ # Oops! # ``xdem`` just warned us that ``dem_1990`` did not need reprojection, but we asked it to anyway. # To hide this prompt, add ``.reproject(..., silent=True)``. -# By default, :func:`xdem.DEM.reproject` uses "nearest neighbour" resampling (assuming resampling is needed). -# This approach is great for speed, but it may often be erroneous when resampling. -# Other more accurate methods are "bilinear", "cubic_spline" and others. +# By default, :func:`xdem.DEM.reproject` uses "bilinear" resampling (assuming resampling is needed). +# Other options are "nearest" (fast but inaccurate), "cubic_spline", "lanczos" and others. # See `geoutils.Raster.reproject() `_ and `rasterio.enums.Resampling `_ for more information about reprojection. # # Now, we are ready to generate the dDEM: @@ -50,11 +49,31 @@ # It is a new :class:`xdem.DEM` instance, loaded in memory. # Let's visualize it: -plt.figure(figsize=(8, 5)) -plt.imshow(ddem.data.squeeze(), cmap="coolwarm_r", vmin=-20, vmax=20) +ddem.show(cmap="coolwarm_r", vmin=-20, vmax=20, cb_title="Elevation change (m)") +# %% +# Let's add some glacier outlines + +# Load the outlines +glacier_outlines = gu.Vector(xdem.examples.get_path("longyearbyen_glacier_outlines")) + +# Need to create a common matplotlib Axes to plot both on the same figure +# The xlim/ylim commands are necessary only because outlines extend further than the raster extent +ax = plt.subplot(111) +ddem.show(ax=ax, cmap="coolwarm_r", vmin=-20, vmax=20, cb_title="Elevation change (m)") +glacier_outlines.ds.plot(ax=ax, fc='none', ec='k') +plt.xlim(ddem.bounds.left, ddem.bounds.right) +plt.ylim(ddem.bounds.bottom, ddem.bounds.top) +plt.title("With glacier outlines") plt.show() # %% -# ... and that's it! # For missing values, ``xdem`` provides a number of interpolation methods which are shown in the other examples. + +# %% +# Saving the output to a file is also very simple + +ddem.save("temp.tif") + +# %% +# ... and that's it! diff --git a/examples/plot_icp_coregistration.py b/examples/plot_icp_coregistration.py index 231ba578..0d8b4caf 100644 --- a/examples/plot_icp_coregistration.py +++ b/examples/plot_icp_coregistration.py @@ -24,10 +24,7 @@ # %% # Let's plot a hillshade of the mountain for context. -plt_extent = [subset_extent[0], subset_extent[2], subset_extent[1], subset_extent[3]] - -plt.imshow(xdem.terrain.hillshade(dem.data, resolution=dem.res).squeeze(), cmap="Greys_r", extent=plt_extent) -plt.show() +xdem.terrain.hillshade(dem).show(cmap='gray') # %% # To try the effects of rotation, we can artificially rotate the DEM using a transformation matrix. @@ -45,15 +42,14 @@ ) # This will apply the matrix along the center of the DEM -rotated_dem = xdem.coreg.apply_matrix(dem.data.squeeze(), transform=dem.transform, matrix=rotation_matrix) - +rotated_dem_data = xdem.coreg.apply_matrix(dem.data.squeeze(), transform=dem.transform, matrix=rotation_matrix) +rotated_dem = xdem.DEM.from_array(rotated_dem_data, transform=dem.transform, crs=dem.crs, nodata=-9999) # %% # We can plot the difference between the original and rotated DEM. # It is now artificially tilting from east down to the west. -diff_before = dem.data - rotated_dem -plt.imshow(diff_before.squeeze(), cmap="coolwarm_r", vmin=-20, vmax=20, extent=plt_extent) - +diff_before = dem - rotated_dem +diff_before.show(cmap="coolwarm_r", vmin=-20, vmax=20) plt.show() # %% @@ -75,18 +71,17 @@ for i, (approach, name) in enumerate(approaches): approach.fit( - reference_dem=dem.data, + reference_dem=dem, dem_to_be_aligned=rotated_dem, - transform=dem.transform, ) - corrected_dem_data = approach.apply(dem=rotated_dem, transform=dem.transform) + corrected_dem = approach.apply(dem=rotated_dem) - diff = dem.data - corrected_dem_data + diff = dem - corrected_dem - plt.subplot(3, 1, i + 1) + ax = plt.subplot(3, 1, i + 1) plt.title(name) - plt.imshow(diff.squeeze(), cmap="coolwarm_r", vmin=-20, vmax=20, extent=plt_extent) + diff.show(cmap="coolwarm_r", vmin=-20, vmax=20, ax=ax) plt.tight_layout() plt.show() diff --git a/examples/plot_norm_regional_hypso.py b/examples/plot_norm_regional_hypso.py index 3025fd15..399a9568 100644 --- a/examples/plot_norm_regional_hypso.py +++ b/examples/plot_norm_regional_hypso.py @@ -3,11 +3,11 @@ ============================================= There are many ways of interpolating gaps in a dDEM. -In the case of glaciers, one very useful fact is that elevation change is generally varies with elevation. +In the case of glaciers, one very useful fact is that elevation change generally varies with elevation. This means that if valid pixels exist in a certain elevation bin, their values can be used to fill other pixels in the same approximate elevation. Filling gaps by elevation is the main basis of "hypsometric interpolation approaches", of which there are many variations of. -One problem with simple hypsometric approaches is that they may not work glaciers with different elevation ranges and scales. +One problem with simple hypsometric approaches is that they may not work for glaciers with different elevation ranges and scales. Let's say we have two glaciers: one gigantic reaching from 0-1000 m, and one small from 900-1100 m. Usually in the 2000s, glaciers thin rapidly at the bottom, while they may be neutral or only thin slightly in the top. If we extrapolate the hypsometric signal of the gigantic glacier to use on the small one, it may seem like the smaller glacier has almost no change whatsoever. @@ -60,7 +60,7 @@ np.random.seed(42) random_nans = (xdem.misc.generate_random_field(dem_1990.shape, corr_size=5) > 0.7) & (glacier_index_map > 0) -plt.imshow(random_nans) +plt.imshow(random_nans, interpolation='none') plt.show() # %% diff --git a/examples/plot_nuth_kaab.py b/examples/plot_nuth_kaab.py index f9ecb58c..edfa4fc6 100644 --- a/examples/plot_nuth_kaab.py +++ b/examples/plot_nuth_kaab.py @@ -8,8 +8,7 @@ For more information about the approach, see :ref:`coregistration_nuthkaab`. """ import geoutils as gu -import matplotlib.pyplot as plt - +import numpy as np import xdem # %% @@ -21,23 +20,13 @@ # Create a stable ground mask (not glacierized) to mark "inlier data" inlier_mask = ~glacier_outlines.create_mask(reference_dem) -plt_extent = [ - reference_dem.bounds.left, - reference_dem.bounds.right, - reference_dem.bounds.bottom, - reference_dem.bounds.top, -] # %% # The DEM to be aligned (a 1990 photogrammetry-derived DEM) has some vertical and horizontal biases that we want to avoid. # These can be visualized by plotting a change map: -diff_before = (reference_dem - dem_to_be_aligned).data - -plt.figure(figsize=(8, 5)) -plt.imshow(diff_before.squeeze(), cmap="coolwarm_r", vmin=-10, vmax=10, extent=plt_extent) -plt.colorbar() -plt.show() +diff_before = reference_dem - dem_to_be_aligned +diff_before.show(cmap="coolwarm_r", vmin=-10, vmax=10, cb_title="Elevation change (m)") # %% @@ -46,25 +35,27 @@ nuth_kaab = xdem.coreg.NuthKaab() -nuth_kaab.fit(reference_dem.data, dem_to_be_aligned.data, inlier_mask, transform=reference_dem.transform) +nuth_kaab.fit(reference_dem, dem_to_be_aligned, inlier_mask) -aligned_dem_data = nuth_kaab.apply(dem_to_be_aligned.data, transform=dem_to_be_aligned.transform) +aligned_dem = nuth_kaab.apply(dem_to_be_aligned) # %% # Then, the new difference can be plotted to validate that it improved. -diff_after = reference_dem.data - aligned_dem_data +diff_after = reference_dem - aligned_dem +diff_after.show(cmap="coolwarm_r", vmin=-10, vmax=10, cb_title="Elevation change (m)") -plt.figure(figsize=(8, 5)) -plt.imshow(diff_after.squeeze(), cmap="coolwarm_r", vmin=-10, vmax=10, extent=plt_extent) -plt.colorbar() -plt.show() # %% -# We compare the NMAD to validate numerically that there was an improvement (see :ref:`robuststats_meanstd`): +# We compare the median and NMAD to validate numerically that there was an improvement (see :ref:`robuststats_meanstd`): +inliers_before = diff_before.data[inlier_mask].compressed() +med_before, nmad_before = np.median(inliers_before), xdem.spatialstats.nmad(inliers_before) + +inliers_after = diff_after.data[inlier_mask].compressed() +med_after, nmad_after = np.median(inliers_after), xdem.spatialstats.nmad(inliers_after) -print(f"Error before: {xdem.spatialstats.nmad(diff_before):.2f} m") -print(f"Error after: {xdem.spatialstats.nmad(diff_after):.2f} m") +print(f"Error before: median = {med_before:.2f} - NMAD = {nmad_before:.2f} m") +print(f"Error after: median = {med_after:.2f} - NMAD = {nmad_after:.2f} m") # %% # In the plot above, one may notice a positive (blue) tendency toward the east. diff --git a/examples/plot_terrain_attributes.py b/examples/plot_terrain_attributes.py index 047a5828..9b0cf904 100644 --- a/examples/plot_terrain_attributes.py +++ b/examples/plot_terrain_attributes.py @@ -20,7 +20,11 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): - plt.figure(figsize=(8, 5)) + + add_cb = True if label is not None else False + + fig = plt.figure(figsize=(8, 5)) + ax = fig.add_subplot(111) if vlim is not None: if isinstance(vlim, (int, float)): @@ -30,15 +34,13 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): else: vlims = {} - plt.imshow( - attribute.squeeze(), + attribute.show( + ax=ax, cmap=cmap, - extent=[dem.bounds.left, dem.bounds.right, dem.bounds.bottom, dem.bounds.top], + add_cb=add_cb, + cb_title=label, **vlims ) - if label is not None: - cbar = plt.colorbar() - cbar.set_label(label) plt.xticks([]) plt.yticks([]) @@ -51,15 +53,20 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): # Slope # ----- -slope = xdem.terrain.slope(dem.data, resolution=dem.res) +slope = xdem.terrain.slope(dem) plot_attribute(slope, "Reds", "Slope (°)") +# %% +# Note that all functions also work with numpy array as inputs, if resolution is specified + +slope = xdem.terrain.slope(dem.data, resolution=dem.res) + # %% # Aspect # ------ -aspect = xdem.terrain.aspect(dem.data) +aspect = xdem.terrain.aspect(dem) plot_attribute(aspect, "twilight", "Aspect (°)") @@ -67,7 +74,7 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): # Hillshade # --------- -hillshade = xdem.terrain.hillshade(dem.data, resolution=dem.res, azimuth=315.0, altitude=45.0) +hillshade = xdem.terrain.hillshade(dem, azimuth=315.0, altitude=45.0) plot_attribute(hillshade, "Greys_r") @@ -75,7 +82,7 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): # Curvature # --------- -curvature = xdem.terrain.curvature(dem.data, resolution=dem.res) +curvature = xdem.terrain.curvature(dem) plot_attribute(curvature, "RdGy_r", "Curvature (100 / m)", vlim=1) @@ -83,49 +90,49 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): # Planform curvature # ------------------ -planform_curvature = xdem.terrain.planform_curvature(dem.data, resolution=dem.res) +planform_curvature = xdem.terrain.planform_curvature(dem) plot_attribute(planform_curvature, "RdGy_r", "Planform curvature (100 / m)", vlim=1) # %% # Profile curvature # ----------------- -profile_curvature = xdem.terrain.profile_curvature(dem.data, resolution=dem.res) +profile_curvature = xdem.terrain.profile_curvature(dem) plot_attribute(profile_curvature, "RdGy_r", "Profile curvature (100 / m)", vlim=1) # %% # Topographic Position Index # -------------------------- -tpi = xdem.terrain.topographic_position_index(dem.data) +tpi = xdem.terrain.topographic_position_index(dem) plot_attribute(tpi, "Spectral", "Topographic Position Index", vlim=5) # %% # Terrain Ruggedness Index # ------------------------ -tri = xdem.terrain.terrain_ruggedness_index(dem.data) +tri = xdem.terrain.terrain_ruggedness_index(dem) plot_attribute(tri, "Purples", "Terrain Ruggedness Index") # %% # Roughness # --------- -roughness = xdem.terrain.roughness(dem.data) +roughness = xdem.terrain.roughness(dem) plot_attribute(roughness, "Oranges", "Roughness") # %% # Rugosity # -------- -rugosity = xdem.terrain.rugosity(dem.data, resolution=dem.res) +rugosity = xdem.terrain.rugosity(dem) plot_attribute(rugosity, "YlOrRd", "Rugosity") # %% # Fractal roughness # ----------------- -fractal_roughness = xdem.terrain.fractal_roughness(dem.data) +fractal_roughness = xdem.terrain.fractal_roughness(dem) plot_attribute(fractal_roughness, "Reds", "Fractal roughness") @@ -135,8 +142,8 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): attributes = xdem.terrain.get_terrain_attribute( dem.data, - attribute=["hillshade", "slope", "aspect", "curvature", "terrain_ruggedness_index", "rugosity"], - resolution=dem.res + resolution=dem.res, + attribute=["hillshade", "slope", "aspect", "curvature", "terrain_ruggedness_index", "rugosity"] ) plt.figure(figsize=(8, 6.5)) @@ -157,4 +164,4 @@ def plot_attribute(attribute, cmap, label=None, vlim=None): plt.yticks([]) plt.tight_layout() -plt.show() \ No newline at end of file +plt.show()