Skip to content

Commit

Permalink
Merge remote-tracking branch 'frodrigo/master'
Browse files Browse the repository at this point in the history
* frodrigo/master: (27 commits)
  Workaround Geofabrik 404
  Geocoding fallback to Merimee coord on analyser_merge_heritage_FR_merimee #591
  Better geocoding in analyser_merge_heritage_FR_merimee #591
  Update fields names in analyser_merge_power_plant_FR
  Deduplicate official data in Analyser_Merge #560
  Avoid duplicate issues in TagWatchFrViPofm #560
  Avoid duplicate issiues in Name_MisspelledWordByRegex_Lang_fa.validator.mapcss #560
  Avoid duplicate issues in analyser_osmosis_highway_zone #560
  Avoid duplicate issues in analyser_osmosis_relation_enforcement #560
  Avoid duplicate issues in analyser_osmosis_relation_restriction #560
  Avoid duplicate issues in analyser_osmosis_roundabout_level #560
  No issue on bus stops with name containing 'Marché' in TagFix_MultipleTag_Lang_fr #586
  Avoid duplicate issue from analyser_osmosis_boundary_relation #560
  Fix avoid duplicate issues in analyser_osmosis_node_like_way #560
  Avoid duplicate issues in analyser_osmosis_relation_associatedStreet #560
  Avoir duplicate in analyser_osmosis_addr_interpolation (and speed up) #560
  Avoid duplicate issue in Highway_Lanes #560
  Only one fix per tag in analyser_osmosis_tag_typo #560
  Avoid duplicate issues from Name_Script #560
  Use highway_ends.geom in analyser_osmosis_highway_cul-de-sac_level
  ...
  • Loading branch information
jocelynj committed Aug 19, 2019
2 parents 2bcf013 + 61cf3c5 commit bc7565a
Show file tree
Hide file tree
Showing 27 changed files with 247 additions and 142 deletions.
25 changes: 22 additions & 3 deletions analysers/Analyser_Merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,7 @@
"""

sql00 = """
DROP TABLE IF EXISTS %(official)s CASCADE;
CREATE UNLOGGED TABLE %(official)s (
CREATE TEMP TABLE %(official)s_temp (
ref varchar(65534),
tags hstore,
tags1 hstore,
Expand Down Expand Up @@ -97,7 +96,7 @@

sql02 = """
INSERT INTO
%(official)s
%(official)s_temp
VALUES (
%(ref)s,
%(tags)s,
Expand All @@ -107,6 +106,25 @@
)
"""

sql02b = """
DROP TABLE IF EXISTS %(official)s CASCADE;
CREATE UNLOGGED TABLE %(official)s AS
SELECT
ref,
tags,
tags1,
fields,
geom
FROM
%(official)s_temp
GROUP BY
ref,
tags,
tags1,
fields,
geom
"""

sql03 = """
CREATE INDEX ir_%(official)s ON %(official)s(ref);
CREATE INDEX ig_%(official)s ON %(official)s USING GIST(geom);
Expand Down Expand Up @@ -688,6 +706,7 @@ def insertOfficial(res):
else:
distinct = order_by = ""
osmosis.run0((sql01_ref if mapping.osmRef != "NULL" else sql01_geo) % {"table":table, "x":self.x, "y":self.y, "where":self.formatCSVSelect(), "distinct": distinct, "order_by": order_by}, insertOfficial)
giscurs.execute(sql02b.replace("%(official)s", tableOfficial))
if self.srid:
giscurs.execute("SELECT ST_AsText(ST_Envelope(ST_Extent(geom::geometry))::geography) FROM %s" % tableOfficial)
self.bbox = giscurs.fetchone()[0]
Expand Down
3 changes: 2 additions & 1 deletion analysers/Analyser_Osmosis.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ class Analyser_Osmosis(Analyser):
highway,
is_link,
is_roundabout,
ends(nodes) AS nid,
(ends_geom(nodes, linestring)).id AS nid,
(ends_geom(nodes, linestring)).geom AS geom,
level
FROM
highways
Expand Down
8 changes: 7 additions & 1 deletion analysers/analyser_merge_heritage_FR_merimee.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,10 @@ def parseDPRO(dpro):
CSV(Geocode_Addok_CSV(Source(attribution = u"Ministère de la Culture", millesime = "06/2019",
fileUrl = u"https://data.culture.gouv.fr/explore/dataset/liste-des-immeubles-proteges-au-titre-des-monuments-historiques/download/?format=csv&timezone=Europe/Berlin&use_labels_for_header=true",
filter = lambda s: reduce(lambda a, v: a.replace(v, ''), SKIP, (u'' + s).encode('utf-8').replace(b'l\u92', b"l'").replace(b'\x85)', b"...)").decode('utf-8', 'ignore'))),
columns = 'Adresse', citycode = 'INSEE', delimiter = u';', logger = logger),
columns = ['Adresse', 'Commune'], citycode = 'INSEE', delimiter = u';', logger = logger),
separator = u';'),
Load("longitude", "latitude",
map = self.coord_fallback,
select = {u"Date de Protection": True}),
Mapping(
select = Select(
Expand All @@ -92,3 +93,8 @@ def parseDPRO(dpro):
mapping2 = {"name": lambda res: res[u"Appellation courante"] if res[u"Appellation courante"] not in BLACK_WORDS else None},
tag_keep_multiple_values = ["heritage:operator"],
text = lambda tags, fields: T_f(u"Historical monument: {0} (positioned at {1} with confidence {2})", ", ".join(filter(lambda x: x, [fields[u"Date de Protection"], fields[u"Adresse"], fields[u"Commune"]])), fields[u"result_type"], fields[u"result_score"]) )))

def coord_fallback(self, fields):
if not fields['longitude'] and fields['Coordonnées INSEE']:
fields['latitude'], fields['longitude'] = list(map(lambda s: float(s.strip()), fields['Coordonnées INSEE'].split(',')))
return fields
6 changes: 3 additions & 3 deletions analysers/analyser_merge_power_plant_FR.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,12 @@ def __init__(self, config, logger = None):
Analyser_Merge.__init__(self, config, logger,
u"https://opendata.reseaux-energies.fr/explore/dataset/registre-national-installation-production-stockage-electricite-agrege-311217",
u"Registre national des installations de production d'électricité et de stockage",
CSV(Geocode_Addok_CSV(Source(attribution = u"data.gouv.fr:RTE", millesime = "2017",
CSV(Geocode_Addok_CSV(Source(attribution = u"data.gouv.fr:RTE", millesime = "2019",
fileUrl = u"https://opendata.reseaux-energies.fr/explore/dataset/registre-national-installation-production-stockage-electricite-agrege-311217/download/?format=csv&timezone=Europe/Berlin&use_labels_for_header=true"),
columns = 'Commune', citycode = 'codeINSEECommune', delimiter = ';', logger = logger),
separator = u";"),
Load("longitude", "latitude",
where = lambda res: res.get('max_puissance') and float(res["max_puissance"]) > 1000,
where = lambda res: res.get('puisMaxRac') and float(res["puisMaxRac"]) > 1000,
map = lambda res: dict(res, **{"_x": float(res["_x"]) + (Stablehash.stablehash(str(res)) % 200 - 100) * 0.00001, "_y": float(res["_y"]) + (Stablehash.stablehash(str(res)) % 212 - 106) * 0.00001})),
Mapping(
select = Select(
Expand All @@ -55,7 +55,7 @@ def __init__(self, config, logger = None):
# No voltage tga on power=plant
#"voltage": lambda fields: (int(fields["Tension raccordement"].split(' ')[0]) * 1000) if fields.get("Tension raccordement") and fields["Tension raccordement"] not in ["< 45 kV", "BT", "HTA"] else None,
"plant:source": lambda fields: self.filiere[fields["Filière"]][fields["Combustible"]],
"plant:output:electricity": lambda fields: int(float(fields["max_puissance"]) * 1000)},
"plant:output:electricity": lambda fields: int(float(fields["puisMaxRac"]) * 1000)},
mapping2 = {
"start_date": lambda fields: None if not fields.get(u"dateMiseEnService") else fields[u"dateMiseEnService"][0:4] if fields[u"dateMiseEnService"].endswith('-01-01') or fields[u"dateMiseEnService"].endswith('-12-31') else fields[u"dateMiseEnService"]},
tag_keep_multiple_values = ["voltage"],
Expand Down
26 changes: 14 additions & 12 deletions analysers/analyser_osmosis_addr_interpolation.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,15 +40,21 @@
sql02 = """
CREATE TEMP TABLE interpolation_nodes AS
SELECT
interpolations.id AS w_id,
nodes.*
array_agg(interpolations.id) AS w_ids,
nodes.id,
nodes.tags,
nodes.geom
FROM
interpolations
JOIN nodes ON
nodes.geom && interpolations.linestring AND
nodes.id = ANY (interpolations.nodes) AND
nodes.tags != ''::hstore AND
nodes.tags - ARRAY['source'] != ''::hstore
GROUP BY
nodes.id,
nodes.tags,
nodes.geom
"""

sql03 = """
Expand All @@ -72,12 +78,8 @@
FROM
interpolation_nodes
WHERE
tags?'addr:housenumber'
GROUP BY
id,
geom
HAVING
count(*) > 1
tags?'addr:housenumber' AND
array_length(w_ids, 1) > 1
"""

sql30 = """
Expand All @@ -99,7 +101,7 @@
FROM
interpolations AS ways
LEFT JOIN interpolation_nodes AS nodes ON
nodes.w_id = ways.id AND
ways.id = ANY (nodes.w_ids) AND
(
nodes.id = ways.nodes[1] OR
nodes.id = ways.nodes[array_length(nodes,1)]
Expand All @@ -116,11 +118,11 @@
FROM
interpolations AS ways
JOIN interpolation_nodes AS nodes_s ON
nodes_s.w_id = ways.id AND
ways.id = ANY (nodes_s.w_ids) AND
nodes_s.id = ways.nodes[1] AND
nodes_s.tags?'addr:housenumber'
JOIN interpolation_nodes AS nodes_e ON
nodes_e.w_id = ways.id AND
ways.id = ANY (nodes_e.w_ids) AND
nodes_e.id = ways.nodes[array_length(nodes,1)] AND
nodes_e.tags?'addr:housenumber'
WHERE
Expand All @@ -133,7 +135,7 @@
ST_AsText(min(geom)),
string_agg(DISTINCT tags->'addr:street', ', ')
FROM
interpolation_nodes AS nodes
(SELECT *, unnest(w_ids) AS w_id FROM interpolation_nodes) AS nodes
WHERE
tags != ''::hstore AND
tags?'addr:street'
Expand Down
4 changes: 3 additions & 1 deletion analysers/analyser_osmosis_boundary_relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@

sql00 = """
CREATE TEMP TABLE {0}_{1}_admin AS
SELECT
SELECT DISTINCT ON (relations.id)
relations.id,
(relation_members.member_role IS NOT NULL) AS has_admin_centre,
relations.tags AS rtags,
Expand All @@ -48,6 +48,8 @@
relations.tags->'boundary' = 'administrative' AND
relations.tags?'admin_level' AND
relations.tags->'admin_level' = '{2}'
ORDER BY
relations.id
"""

sql10 = """
Expand Down
38 changes: 16 additions & 22 deletions analysers/analyser_osmosis_highway_broken_level_continuity.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,11 +23,12 @@
from .Analyser_Osmosis import Analyser_Osmosis

sql13 = """
CREATE TEMP VIEW orphan_endin AS
CREATE TEMP TABLE orphan_endin AS
SELECT
network.id,
network.nid,
network.level,
network.geom,
CASE network.level
WHEN 1 THEN (ways.highway IN ('construction', 'motorway', 'motorway_link', 'trunk', 'trunk_link', 'primary', 'primary_link'))
WHEN 2 THEN (ways.highway IN ('construction', 'motorway', 'motorway_link', 'trunk', 'trunk_link', 'primary', 'primary_link', 'secondary', 'secondary_link'))
Expand All @@ -48,58 +49,52 @@
1,
2,
3,
4
4,
5
"""

sql14 = """
CREATE TEMP VIEW orphan0 AS
CREATE TEMP TABLE orphan AS
SELECT
id,
nid,
level
level,
geom
FROM
orphan_endin
GROUP BY
id,
nid,
level
level,
geom
HAVING
NOT BOOL_OR(orphan_endin.endin)
"""

sql15 = """
CREATE TEMP TABLE orphan1 AS
SELECT
orphan0.*,
geom
FROM
orphan0
JOIN nodes ON
orphan0.nid = nodes.id
"""

sql16 = """
CREATE INDEX orphan1_level_idx ON orphan1(level)
CREATE INDEX orphan_level_idx ON orphan(level)
"""

sql17 = """
CREATE INDEX orphan1_geom_idx ON orphan1 USING gist(geom)
CREATE INDEX orphan_geom_idx ON orphan USING gist(geom)
"""

sql18 = """
SELECT
o1.id,
o1.nid,
ST_AsText(o1.geom),
o1.level
FROM
orphan1 AS o1,
orphan1 AS o2
orphan AS o1,
orphan AS o2
WHERE
o1.nid != o2.nid AND
o1.level = o2.level AND
ST_DistanceSphere(o1.geom, o2.geom) < 1000
GROUP BY
o1.id,
o1.nid,
o1.level,
o1.geom
"""
Expand All @@ -117,7 +112,6 @@ def __init__(self, config, logger = None):
def analyser_osmosis_common(self):
self.run(sql13)
self.run(sql14)
self.run(sql15)
self.run(sql16)
self.run(sql17)
self.run(sql18, lambda res: {"class":res[2], "data":[self.way_full, self.positionAsText]} )
self.run(sql18, lambda res: {"class":res[3], "data":[self.way_full, self.node, self.positionAsText]} )
54 changes: 23 additions & 31 deletions analysers/analyser_osmosis_highway_cul-de-sac_level.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,35 +24,27 @@

sql40 = """
SELECT
t.id,
t.nid,
ST_AsText(nodes.geom),
level
way_ends.id,
way_ends.nid,
ST_AsText(way_ends.geom),
way_ends.level
FROM
(
SELECT
way_ends.id,
way_ends.nid,
way_ends.level
FROM
highway_ends AS way_ends
JOIN way_nodes ON
way_ends.nid = way_nodes.node_id AND
way_nodes.way_id != way_ends.id
JOIN highway_ends AS highway_level ON
highway_level.id = way_nodes.way_id
WHERE
NOT way_ends.is_roundabout AND
way_ends.level <= 3
GROUP BY
way_ends.id,
way_ends.nid,
way_ends.level
HAVING
BOOL_AND(way_ends.level + 1 < highway_level.level)
) AS t
JOIN nodes ON
nodes.id = nid
highway_ends AS way_ends
JOIN way_nodes ON
way_ends.nid = way_nodes.node_id AND
way_nodes.way_id != way_ends.id
JOIN highway_ends AS highway_level ON
highway_level.id = way_nodes.way_id
WHERE
NOT way_ends.is_roundabout AND
way_ends.level <= 3
GROUP BY
way_ends.id,
way_ends.nid,
way_ends.geom,
way_ends.level
HAVING
BOOL_AND(way_ends.level + 1 < highway_level.level)
"""

class Analyser_Osmosis_Highway_CulDeSac_Level(Analyser_Osmosis):
Expand All @@ -61,9 +53,9 @@ class Analyser_Osmosis_Highway_CulDeSac_Level(Analyser_Osmosis):

def __init__(self, config, logger = None):
Analyser_Osmosis.__init__(self, config, logger)
self.classs[1] = {"item":"1090", "level": 1, "tag": ["highway", "fix:chair"], "desc": T_f(u"Bad topology way level {0}", 1) }
self.classs[2] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Bad topology way level {0}", 2) }
self.classs[3] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Bad topology way level {0}", 3) }
self.classs[1] = {"item":"1090", "level": 1, "tag": ["highway", "fix:chair"], "desc": T_f(u"Sudden highway type change (level {0})", 1) }
self.classs[2] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Sudden highway type change (level {0})", 2) }
self.classs[3] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Sudden highway type change (level {0})", 3) }

def analyser_osmosis_common(self):
self.run(sql40, lambda res: {"class":res[3], "data":[self.way, self.node, self.positionAsText]} )
4 changes: 3 additions & 1 deletion analysers/analyser_osmosis_highway_zone.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@

# Get candidates ways in the resulting envelope
sql14 = """
SELECT
SELECT DISTINCT ON (highways.id)
highways.id,
ST_AsText(way_locate(linestring))
FROM
Expand All @@ -97,6 +97,8 @@
ST_Length(ST_Intersection(highways.linestring_proj, a3.geom)) / ST_Length(highways.linestring) > 0.8
WHERE
NOT ST_IsEmpty(a3.geom)
ORDER BY
highways.id
"""

class Analyser_Osmosis_Highway_Zone(Analyser_Osmosis):
Expand Down
2 changes: 2 additions & 0 deletions analysers/analyser_osmosis_node_like_way.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,8 @@
slice(nodes.tags, ARRAY['aerialway', 'aeroway', 'amenity', 'highway', 'landuse', 'leisure', 'natural', 'railway', 'waterway', 'building']) OR
slice(ways.tags, ARRAY['aerialway', 'aeroway', 'amenity', 'highway', 'landuse', 'leisure', 'natural', 'railway', 'waterway', 'building']) <@
slice(nodes.tags, ARRAY['aerialway', 'aeroway', 'amenity', 'highway', 'landuse', 'leisure', 'natural', 'railway', 'waterway', 'building'])
ORDER BY
nodes.id
"""

class Analyser_Osmosis_Node_Like_Way(Analyser_Osmosis):
Expand Down
Loading

0 comments on commit bc7565a

Please sign in to comment.