Skip to content

Commit

Permalink
Bump owslib from 0.28.1 to 0.29.1 (#10944)
Browse files Browse the repository at this point in the history
* Bump owslib from 0.28.1 to 0.29.1

Bumps [owslib](https://github.com/geopython/OWSLib) from 0.28.1 to 0.29.1.
- [Release notes](https://github.com/geopython/OWSLib/releases)
- [Changelog](https://github.com/geopython/OWSLib/blob/master/CHANGES.rst)
- [Commits](geopython/OWSLib@0.28.1...0.29.1)

---
updated-dependencies:
- dependency-name: owslib
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <[email protected]>

* [Dependencies] Align setup.cfg to requirements.txt

* - Fix OWSLib BREAKING: remove ISO deprecations (geopython/OWSLib#862) (geopython/OWSLib#869)

* - Fix OWSLib BREAKING: remove ISO deprecations (geopython/OWSLib#862) (geopython/OWSLib#869)

* - Fix OWSLib BREAKING: remove ISO deprecations (geopython/OWSLib#862) (geopython/OWSLib#869)

* - Fix OWSLib BREAKING: remove ISO deprecations (geopython/OWSLib#862) (geopython/OWSLib#869)

* - Fix OWSLib BREAKING: remove ISO deprecations (geopython/OWSLib#862) (geopython/OWSLib#869)

* - Fix OWSLib BREAKING: remove ISO deprecations (geopython/OWSLib#862) (geopython/OWSLib#869)

* - Fix OWSLib BREAKING: remove ISO deprecations (geopython/OWSLib#862) (geopython/OWSLib#869)

* - Black formatting issues

* [Dependencies] align setup.cfg to requirements.txt

* [Dependencies] align setup.cfg to requirements.txt

---------

Signed-off-by: dependabot[bot] <[email protected]>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: mattiagiupponi <[email protected]>
(cherry picked from commit 6448ad4)
  • Loading branch information
afabiani authored and github-actions[bot] committed Apr 21, 2023
1 parent 1244814 commit 2a8d557
Show file tree
Hide file tree
Showing 10 changed files with 73 additions and 52 deletions.
18 changes: 9 additions & 9 deletions geonode/catalogue/backends/generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ def get_by_uuid(self, uuid):
return None
record = list(self.records.values())[0]
record.keywords = []
if hasattr(record, "identification") and hasattr(record.identification, "keywords"):
for kw in record.identification.keywords:
if hasattr(record, "identification") and hasattr(record.identification[0], "keywords"):
for kw in record.identification[0].keywords:
record.keywords.extend(kw["keywords"])
return record
else:
Expand Down Expand Up @@ -193,11 +193,11 @@ def metadatarecord2dict(self, rec):
# Let owslib do some parsing for us...
result = {}
result["uuid"] = rec.identifier
result["title"] = rec.identification.title
result["abstract"] = rec.identification.abstract
result["title"] = rec.identification[0].title
result["abstract"] = rec.identification[0].abstract

keywords = []
for kw in rec.identification.keywords:
for kw in rec.identification[0].keywords:
keywords.extend(kw["keywords"])

result["keywords"] = keywords
Expand All @@ -208,10 +208,10 @@ def metadatarecord2dict(self, rec):
result["name"] = result["uuid"]

result["bbox"] = {
"minx": rec.identification.bbox.minx,
"maxx": rec.identification.bbox.maxx,
"miny": rec.identification.bbox.miny,
"maxy": rec.identification.bbox.maxy,
"minx": rec.identification[0].bbox.minx,
"maxx": rec.identification[0].bbox.maxx,
"miny": rec.identification[0].bbox.miny,
"maxy": rec.identification[0].bbox.maxy,
}

# locate all distribution links
Expand Down
6 changes: 3 additions & 3 deletions geonode/catalogue/backends/pycsw_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,9 @@ def get_record(self, uuid):

record = MD_Metadata(result)
record.keywords = []
if hasattr(record, "identification") and hasattr(record.identification, "keywords"):
for kw in record.identification.keywords:
record.keywords.extend(kw["keywords"])
if hasattr(record, "identification") and hasattr(record.identification[0], "keywords"):
for kw in record.identification[0].keywords:
record.keywords.extend([_kw.name for _kw in kw.keywords])

record.links = {}
record.links["metadata"] = self.catalogue.urls_for_uuid(uuid)
Expand Down
8 changes: 4 additions & 4 deletions geonode/catalogue/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,10 @@ def test_update_metadata_records(self):
csw = get_catalogue()
record = csw.get_record(dataset.uuid)
self.assertIsNotNone(record)
self.assertEqual(record.identification.title, dataset.title)
self.assertEqual(record.identification.abstract, dataset.raw_abstract)
if len(record.identification.otherconstraints) > 0:
self.assertEqual(record.identification.otherconstraints[0], dataset.raw_constraints_other)
self.assertEqual(record.identification[0].title, dataset.title)
self.assertEqual(record.identification[0].abstract, dataset.raw_abstract)
if len(record.identification[0].otherconstraints) > 0:
self.assertEqual(record.identification[0].otherconstraints[0], dataset.raw_constraints_other)

def test_given_a_simple_request_should_return_200(self):
actual = csw_global_dispatch(self.request)
Expand Down
56 changes: 33 additions & 23 deletions geonode/layers/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

# OWSLib functionality
from owslib.csw import CswRecord
from owslib.iso import MD_Metadata
from owslib.iso import MD_Metadata, MD_Keywords
from owslib.fgdc import Metadata
from django.utils import timezone

Expand Down Expand Up @@ -83,32 +83,32 @@ def iso2dict(exml):
vals["date"] = sniff_date(mdata.datestamp)

if hasattr(mdata, "identification"):
vals["title"] = mdata.identification.title
vals["abstract"] = mdata.identification.abstract
vals["purpose"] = mdata.identification.purpose
vals["title"] = mdata.identification[0].title
vals["abstract"] = mdata.identification[0].abstract
vals["purpose"] = mdata.identification[0].purpose

if mdata.identification.supplementalinformation is not None:
vals["supplemental_information"] = mdata.identification.supplementalinformation
if mdata.identification[0].supplementalinformation is not None:
vals["supplemental_information"] = mdata.identification[0].supplementalinformation

vals["temporal_extent_start"] = mdata.identification.temporalextent_start
vals["temporal_extent_end"] = mdata.identification.temporalextent_end
vals["temporal_extent_start"] = mdata.identification[0].temporalextent_start
vals["temporal_extent_end"] = mdata.identification[0].temporalextent_end

if len(mdata.identification.topiccategory) > 0:
vals["topic_category"] = mdata.identification.topiccategory[0]
if len(mdata.identification[0].topiccategory) > 0:
vals["topic_category"] = mdata.identification[0].topiccategory[0]

if hasattr(mdata.identification, "keywords") and len(mdata.identification.keywords) > 0:
for kw in mdata.identification.keywords:
if kw["type"] == "place":
regions.extend(kw["keywords"])
if hasattr(mdata.identification[0], "keywords") and len(mdata.identification[0].keywords) > 0:
for kw in mdata.identification[0].keywords:
if kw.type == "place":
regions.extend([_kw.name for _kw in kw.keywords])
else:
keywords.extend(kw["keywords"])
keywords.extend([_kw.name for _kw in kw.keywords])

keywords = convert_keyword(mdata.identification.keywords, iso2dict=True)
keywords = convert_keyword(keywords, iso2dict=True)

if len(mdata.identification.otherconstraints) > 0:
vals["constraints_other"] = mdata.identification.otherconstraints[0]
if len(mdata.identification[0].otherconstraints) > 0:
vals["constraints_other"] = mdata.identification[0].otherconstraints[0]

vals["purpose"] = mdata.identification.purpose
vals["purpose"] = mdata.identification[0].purpose

if mdata.dataquality is not None:
vals["data_quality_statement"] = mdata.dataquality.lineage
Expand Down Expand Up @@ -241,13 +241,23 @@ def parse_metadata(exml, uuid="", vals={}, regions=[], keywords=[], custom={}):
return uuid, vals, regions, keywords, custom


def convert_keyword(keyword, iso2dict=False, theme="theme"):
if not iso2dict and keyword:
def convert_keyword(keywords, iso2dict=False, theme="theme"):
if not iso2dict and keywords:
return [
{
"keywords": keyword,
"keywords": convert_iso_keywords(keywords),
"thesaurus": {"date": None, "datetype": None, "title": None},
"type": theme,
}
]
return keyword
return convert_iso_keywords(keywords)


def convert_iso_keywords(keywords):
_keywords = []
for kw in keywords:
if isinstance(kw, MD_Keywords):
_keywords.append([_kw.name for _kw in kw.keywords])
else:
_keywords.append(kw)
return _keywords
12 changes: 10 additions & 2 deletions geonode/layers/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -1638,10 +1638,14 @@ def test_set_metadata_return_expected_values_from_xml(self):
"temporal_extent_start": None,
"title": "test_dataset",
}
expected_keywords = []
for kw in [kw.get("keywords") for kw in self.custom if kw["type"] != "place"]:
for _kw in [_kw for _kw in kw]:
expected_keywords.append(_kw)
self.assertEqual("7cfbc42c-efa7-431c-8daa-1399dff4cd19", identifier)
self.assertListEqual(["Global"], regions)
self.assertDictEqual(expected_vals, vals)
self.assertListEqual(self.custom, keywords)
self.assertListEqual(expected_keywords, keywords)

def test_convert_keyword_should_empty_list_for_empty_keyword(self):
actual = convert_keyword([])
Expand Down Expand Up @@ -1724,9 +1728,13 @@ def setUp(self):

def test_will_use_only_the_default_metadata_parser(self):
identifier, vals, regions, keywords, _ = parse_metadata(open(self.exml_path).read())
expected_keywords = []
for kw in [kw.get("keywords") for kw in self.keywords if kw["type"] != "place"]:
for _kw in [_kw for _kw in kw]:
expected_keywords.append(_kw)
self.assertEqual("7cfbc42c-efa7-431c-8daa-1399dff4cd19", identifier)
self.assertListEqual(["Global"], regions)
self.assertListEqual(self.keywords, keywords)
self.assertListEqual(expected_keywords, keywords)
self.assertDictEqual(self.expected_vals, vals)

@override_settings(METADATA_PARSERS=["__DEFAULT__", "geonode.layers.tests.dummy_metadata_parser"])
Expand Down
2 changes: 1 addition & 1 deletion geonode/resource/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ def update_resource(
try:
instance = KeywordHandler(instance, _keywords).set_keywords()
except Exception as e:
logger.debug(e)
logger.error(e)

# set model properties
defaults = {}
Expand Down
14 changes: 7 additions & 7 deletions geonode/tests/csw.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,18 +168,18 @@ def test_csw_outputschema_iso(self):
record = list(csw.catalogue.records.values())[0]

# test that the ISO title maps correctly in Dublin Core
self.assertTrue(record.identification.title in "san_andres_y_providencia_location.shp")
self.assertTrue(record.identification[0].title in "san_andres_y_providencia_location.shp")

# test that the ISO abstract maps correctly in Dublin Core
self.assertEqual(record.identification.abstract, "No abstract provided")
self.assertEqual(record.identification[0].abstract, "No abstract provided")

# test BBOX properties in Dublin Core
from decimal import Decimal

self.assertAlmostEqual(Decimal(record.identification.bbox.minx), Decimal("-81.8593555"), places=3)
self.assertAlmostEqual(Decimal(record.identification.bbox.miny), Decimal("12.1665322"), places=3)
self.assertAlmostEqual(Decimal(record.identification.bbox.maxx), Decimal("-81.356409"), places=3)
self.assertAlmostEqual(Decimal(record.identification.bbox.maxy), Decimal("13.396306"), places=3)
self.assertAlmostEqual(Decimal(record.identification[0].bbox.minx), Decimal("-81.8593555"), places=3)
self.assertAlmostEqual(Decimal(record.identification[0].bbox.miny), Decimal("12.1665322"), places=3)
self.assertAlmostEqual(Decimal(record.identification[0].bbox.maxx), Decimal("-81.356409"), places=3)
self.assertAlmostEqual(Decimal(record.identification[0].bbox.maxy), Decimal("13.396306"), places=3)

# test for correct link articulation
for link in record.distribution.online:
Expand Down Expand Up @@ -310,7 +310,7 @@ def test_csw_upload_fgdc(self):
record = list(csw.catalogue.records.values())[0]

# test that the FGDC title maps correctly in ISO
self.assertEqual(record.identification.title, "Census_Blockgroup_Pop_Housing")
self.assertEqual(record.identification[0].title, "Census_Blockgroup_Pop_Housing")

# cleanup and delete inserted FGDC metadata document
csw.catalogue.transaction(
Expand Down
5 changes: 4 additions & 1 deletion geonode/thumbs/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,10 @@ def getmap(
if headers.get("content-type", "").split(";")[0] in ["application/vnd.ogc.se_xml", "text/xml"]:
se_xml = u.read()
se_tree = etree.fromstring(se_xml)
err_message = str(se_tree.find(nspath("ServiceException", n.get_namespace("ogc"))).text).strip()
try:
err_message = str(se_tree.find(nspath("ServiceException", n.get_namespace("ogc"))).text).strip()
except Exception:
err_message = se_xml
raise ServiceException(err_message)
return u

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ pyjwt==2.6.0

# geopython dependencies
pyproj<3.6.0
OWSLib==0.28.1
OWSLib==0.29.1
pycsw==2.6.1
SQLAlchemy==2.0.9 # required by PyCSW
Shapely==1.8.5.post1
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ install_requires =

# geopython dependencies
pyproj<3.6.0
OWSLib==0.28.1
OWSLib==0.29.1
pycsw==2.6.1
SQLAlchemy==2.0.9 # required by PyCSW
Shapely==1.8.5.post1
Expand Down

0 comments on commit 2a8d557

Please sign in to comment.