Skip to content

Commit

Permalink
fixing stratigraphic sorters (#144)
Browse files Browse the repository at this point in the history
* fix: stratigraphic alpha sorter was inverted, reverse sorter

* ignore units without contact for max contact length sorter

* updated orientation sorter to use correct trigonometry for strike/dip lines.

also change to using all intersections along line not only the first intersection. perhaps this should be a different sorter?

* revert back to original orientation sorter

* style: black

* updating WA json to work

* remove unused argument documentation

* only run doc build on master
  • Loading branch information
lachlangrose authored Nov 8, 2024
1 parent c14e986 commit 184663e
Show file tree
Hide file tree
Showing 4 changed files with 183 additions and 134 deletions.
50 changes: 25 additions & 25 deletions .github/workflows/CD.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# for this workflow to work, need to release LPF=1.0.5, map2model=1.0.1, use json instead of hjson
# for this workflow to work, need to release LPF=1.0.5, map2model=1.0.1, use json instead of hjson

name: release-please

Expand Down Expand Up @@ -51,15 +51,14 @@ jobs:
with:
name: map2loop-dist
path: dist/*.tar.gz
compression-level: 0
compression-level: 0

pypi-test-sdist:
name: Test sdist
needs: pypi-build-sdist
runs-on: ubuntu-latest
steps:

- name : Install GDAL
- name: Install GDAL
run: |
sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable
sudo apt-get update
Expand All @@ -79,15 +78,15 @@ jobs:
pip install geopandas shapely networkx owslib map2model loopprojectfile beartype gdal==3.8.4 hjson pytest scikit-learn
pip install --no-cache dist/*.tar.gz
pip list
pypi-build-wheels:
needs: pypi-test-sdist
name: Build Wheels
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os:
os:
- ubuntu-latest
steps:
- uses: actions/checkout@v4
Expand All @@ -96,7 +95,7 @@ jobs:
uses: actions/setup-python@v5
with:
python-version: 3.x

- name: Build Wheels
run: |
pip install build
Expand Down Expand Up @@ -128,14 +127,14 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v5

- name : Install GDAL - Linux
- name: Install GDAL - Linux
if: runner.os == 'Linux'
run: |
sudo add-apt-repository ppa:ubuntugis/ubuntugis-unstable
sudo apt-get update
sudo apt-get install -y libgdal-dev gdal-bin
- name: Upgrade pip
- name: Upgrade pip
run: |
python -m pip install --upgrade pip
Expand All @@ -152,7 +151,7 @@ jobs:
pip install numpy==1.26.4
pip install -r dependencies.txt
pip install --no-cache --pre --no-index --find-links dist map2loop
pip list
pip list
- name: Testing
shell: bash -l {0}
Expand All @@ -165,10 +164,10 @@ jobs:
strategy:
fail-fast: false
matrix:
os:
- ubuntu-latest
# - macos-latest # currently not supported because needs libgcc>= 14 and not available for macos-latest (version available is 4.x.x)
- windows-latest
os:
- ubuntu-latest
# - macos-latest # currently not supported because needs libgcc>= 14 and not available for macos-latest (version available is 4.x.x)
- windows-latest
python-version: ["3.9", "3.10", "3.11", "3.12"]
steps:
- uses: actions/checkout@v4
Expand Down Expand Up @@ -200,8 +199,8 @@ jobs:
path: ~/conda_pkgs_dir
shell: bash -l {0}
run: |
conda build -c anaconda -c conda-forge -c loop3d --output-folder conda conda --python ${{ matrix.python-version }}
conda build -c anaconda -c conda-forge -c loop3d --output-folder conda conda --python ${{ matrix.python-version }}
- name: upload artifacts
uses: actions/upload-artifact@v4
with:
Expand All @@ -217,14 +216,15 @@ jobs:
- uses: googleapis/release-please-action@v4
id: release
with:
config-file: 'release-please-config.json'
manifest-file: '.release-please-manifest.json'
config-file: "release-please-config.json"
manifest-file: ".release-please-manifest.json"
outputs:
release_created: ${{ steps.release.outputs.release_created }}
#if a release is created then run the deploy scripts for github.io, conda, pypi and docker

documentation-test:
runs-on: ubuntu-latest
if: github.ref == 'refs/heads/master'
steps:
- uses: actions/checkout@v4
- run: |
Expand Down Expand Up @@ -255,17 +255,17 @@ jobs:
with:
branch: gh-pages # The branch the action should deploy to.
folder: docs # The folder the action should deploy.

conda-upload:
needs: [release-please, conda-build]
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
os:
- ubuntu-latest
# - macos-latest
- windows-latest
- ubuntu-latest
# - macos-latest
- windows-latest
python-version: ["3.9", "3.10", "3.11", "3.12"]
if: ${{ needs.release-please.outputs.release_created }}
steps:
Expand All @@ -278,15 +278,15 @@ jobs:
shell: bash -l {0}
env:
ANACONDA_API_TOKEN: ${{ secrets.ANACONDA_TOKEN }}
with:
with:
skip-existing: true
verbose: true
run: |
conda install -c anaconda anaconda-client -y
anaconda upload --label main conda/*/*.tar.bz2
pypi-upload:
needs: [release-please, pypi-test-wheels]
needs: [release-please, pypi-test-wheels]
runs-on:
- ubuntu-latest
if: ${{ needs.release-please.outputs.release_created }}
Expand Down
97 changes: 48 additions & 49 deletions map2loop/_datasets/config_files/WA.json
Original file line number Diff line number Diff line change
@@ -1,50 +1,49 @@
{
"structure": {
"orientation_type": "strike",
"dipdir_column": "strike",
"dip_column": "dip",
"description_column": "DESCRIPTION",
"bedding_text": "Bed",
"overturned_column": "structypei",
"overturned_text": "BEOI",
"objectid_column": "objectid",
"desciption_column": "feature"
},
"geology": {
"unitname_column": "unitname",
"alt_unitname_column": "code",
"group_column": "group_",
"supergroup_column": "supersuite",
"description_column": "descriptn",
"minage_column": "min_age_ma",
"maxage_column": "max_age_ma",
"rocktype_column": "rocktype1",
"alt_rocktype_column": "rocktype2",
"sill_text": "sill",
"intrusive_text": "intrusive",
"volcanic_text": "volcanic",
"objectid_column": "ID",
"ignore_codes": ["cover"]
},
"fault": {
"structtype_column": "feature",
"fault_text": "Fault",
"dip_null_value": "0",
"dipdir_flag": "num",
"dipdir_column": "dip_dir",
"dip_column": "dip",
"orientation_type": "dip direction",
"dipestimate_column": "dip_est",
"dipestimate_text": "gentle,moderate,steep",
"name_column": "name",
"objectid_column": "objectid"
},
"fold": {
"structtype_column": "feature",
"fold_text": "Fold axial trace",
"description_column": "type",
"synform_text": "syncline",
"foldname_column": "NAME",
"objectid_column": "objectid"
}
}
"structure": {
"orientation_type": "strike",
"dipdir_column": "strike",
"dip_column": "dip",
"description_column": "feature",
"bedding_text": "Bed",
"overturned_column": "structypei",
"overturned_text": "BEOI",
"objectid_column": "objectid"
},
"geology": {
"unitname_column": "unitname",
"alt_unitname_column": "code",
"group_column": "group_",
"supergroup_column": "supersuite",
"description_column": "descriptn",
"minage_column": "min_age_ma",
"maxage_column": "max_age_ma",
"rocktype_column": "rocktype1",
"alt_rocktype_column": "rocktype2",
"sill_text": "is a sill",
"intrusive_text": "intrusive",
"volcanic_text": "volcanic",
"objectid_column": "ID",
"ignore_codes": ["cover"]
},
"fault": {
"structtype_column": "feature",
"fault_text": "Fault",
"dip_null_value": "0",
"dipdir_flag": "num",
"dipdir_column": "dip_dir",
"dip_column": "dip",
"orientation_type": "dip direction",
"dipestimate_column": "dip_est",
"dipestimate_text": "gentle,moderate,steep",
"name_column": "name",
"objectid_column": "objectid"
},
"fold": {
"structtype_column": "feature",
"fold_text": "Fold axial trace",
"description_column": "type",
"synform_text": "syncline",
"foldname_column": "NAME",
"objectid_column": "objectid"
}
}
38 changes: 26 additions & 12 deletions map2loop/mapdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
from io import BytesIO
from typing import Union


class MapData:
"""
A data structure containing all the map data loaded from map files
Expand Down Expand Up @@ -315,7 +316,7 @@ def set_filenames_from_australian_state(self, state: str):
self.set_filename(Datatype.STRUCTURE, AustraliaStateUrls.aus_structure_urls[state])
self.set_filename(Datatype.FAULT, AustraliaStateUrls.aus_fault_urls[state])
self.set_filename(Datatype.FOLD, AustraliaStateUrls.aus_fold_urls[state])
self.set_filename(Datatype.DTM, "au")
self.set_filename(Datatype.DTM, "hawaii")
lower = state == "SA"

# Check if this is running a documentation test and use local datasets if so
Expand Down Expand Up @@ -483,8 +484,6 @@ def __check_and_create_tmp_path(self):
if not os.path.isdir(self.tmp_path):
os.mkdir(self.tmp_path)



@beartype.beartype
def __retrieve_tif(self, filename: str):
"""
Expand All @@ -498,10 +497,13 @@ def __retrieve_tif(self, filename: str):
_type_: The open geotiff in a gdal handler
"""
self.__check_and_create_tmp_path()

# For gdal debugging use exceptions
gdal.UseExceptions()
bb_ll = tuple(float(coord) for coord in self.bounding_box_polygon.to_crs("EPSG:4326").geometry.total_bounds)
bb_ll = tuple(
float(coord)
for coord in self.bounding_box_polygon.to_crs("EPSG:4326").geometry.total_bounds
)

if filename.lower() == "aus" or filename.lower() == "au":

Expand All @@ -511,7 +513,7 @@ def __retrieve_tif(self, filename: str):
coverage = wcs.getCoverage(
identifier="1", bbox=bb_ll, format="GeoTIFF", crs=4326, width=2048, height=2048
)

# This is stupid that gdal cannot read a byte stream and has to have a
# file on the local system to open or otherwise create a gdal file
# from scratch with Create
Expand All @@ -521,14 +523,14 @@ def __retrieve_tif(self, filename: str):
with open(tmp_file, "wb") as fh:
fh.write(coverage.read())
tif = gdal.Open(tmp_file)

elif filename == "hawaii":
import netCDF4

bbox_str = (
f"[({str(bb_ll[1])}):1:({str(bb_ll[3])})][({str(bb_ll[0])}):1:({str(bb_ll[2])})]"
)

filename = f"https://pae-paha.pacioos.hawaii.edu/erddap/griddap/srtm30plus_v11_land.nc?elev{bbox_str}"
f = urllib.request.urlopen(filename)
ds = netCDF4.Dataset("in-mem-file", mode="r", memory=f.read())
Expand Down Expand Up @@ -734,10 +736,10 @@ def parse_structure_map(self) -> tuple:
structure["DIPDIR"] = self.raw_data[Datatype.STRUCTURE][config["dipdir_column"]]
else:
print(f"Structure map does not contain dipdir_column '{config['dipdir_column']}'")

# Ensure all DIPDIR values are within [0, 360]
structure["DIPDIR"] = structure["DIPDIR"] % 360.0

if config["dip_column"] in self.raw_data[Datatype.STRUCTURE]:
structure["DIP"] = self.raw_data[Datatype.STRUCTURE][config["dip_column"]]
else:
Expand Down Expand Up @@ -1491,9 +1493,9 @@ def colour_units(
)

colour_lookup["colour"] = colour_lookup["colour"].str.upper()
# if there are duplicates in the clut file, drop.
# if there are duplicates in the clut file, drop.
colour_lookup = colour_lookup.drop_duplicates(subset=["UNITNAME"])

if "UNITNAME" in colour_lookup.columns and "colour" in colour_lookup.columns:
stratigraphic_units = stratigraphic_units.merge(
colour_lookup,
Expand All @@ -1511,3 +1513,15 @@ def colour_units(
f"Colour Lookup file {self.colour_filename} does not contain 'UNITNAME' or 'colour' field"
)
return stratigraphic_units

@property
def GEOLOGY(self):
return self.get_map_data(Datatype.GEOLOGY)

@property
def STRUCTURE(self):
return self.get_map_data(Datatype.STRUCTURE)

@property
def FAULT(self):
return self.get_map_data(Datatype.FAULT)
Loading

0 comments on commit 184663e

Please sign in to comment.