diff --git a/analysers/Analyser_Merge.py b/analysers/Analyser_Merge.py index 704b6b6b0..bdaad6bed 100644 --- a/analysers/Analyser_Merge.py +++ b/analysers/Analyser_Merge.py @@ -55,8 +55,7 @@ """ sql00 = """ -DROP TABLE IF EXISTS %(official)s CASCADE; -CREATE UNLOGGED TABLE %(official)s ( +CREATE TEMP TABLE %(official)s_temp ( ref varchar(65534), tags hstore, tags1 hstore, @@ -97,7 +96,7 @@ sql02 = """ INSERT INTO - %(official)s + %(official)s_temp VALUES ( %(ref)s, %(tags)s, @@ -107,6 +106,25 @@ ) """ +sql02b = """ +DROP TABLE IF EXISTS %(official)s CASCADE; +CREATE UNLOGGED TABLE %(official)s AS +SELECT + ref, + tags, + tags1, + fields, + geom +FROM + %(official)s_temp +GROUP BY + ref, + tags, + tags1, + fields, + geom +""" + sql03 = """ CREATE INDEX ir_%(official)s ON %(official)s(ref); CREATE INDEX ig_%(official)s ON %(official)s USING GIST(geom); @@ -688,6 +706,7 @@ def insertOfficial(res): else: distinct = order_by = "" osmosis.run0((sql01_ref if mapping.osmRef != "NULL" else sql01_geo) % {"table":table, "x":self.x, "y":self.y, "where":self.formatCSVSelect(), "distinct": distinct, "order_by": order_by}, insertOfficial) + giscurs.execute(sql02b.replace("%(official)s", tableOfficial)) if self.srid: giscurs.execute("SELECT ST_AsText(ST_Envelope(ST_Extent(geom::geometry))::geography) FROM %s" % tableOfficial) self.bbox = giscurs.fetchone()[0] diff --git a/analysers/Analyser_Osmosis.py b/analysers/Analyser_Osmosis.py index e22e4843e..c73538a02 100644 --- a/analysers/Analyser_Osmosis.py +++ b/analysers/Analyser_Osmosis.py @@ -94,7 +94,8 @@ class Analyser_Osmosis(Analyser): highway, is_link, is_roundabout, - ends(nodes) AS nid, + (ends_geom(nodes, linestring)).id AS nid, + (ends_geom(nodes, linestring)).geom AS geom, level FROM highways diff --git a/analysers/analyser_merge_heritage_FR_merimee.py b/analysers/analyser_merge_heritage_FR_merimee.py index a3c22645b..41e7f249e 100644 --- a/analysers/analyser_merge_heritage_FR_merimee.py +++ b/analysers/analyser_merge_heritage_FR_merimee.py @@ -69,9 +69,10 @@ def parseDPRO(dpro): CSV(Geocode_Addok_CSV(Source(attribution = u"Ministère de la Culture", millesime = "06/2019", fileUrl = u"https://data.culture.gouv.fr/explore/dataset/liste-des-immeubles-proteges-au-titre-des-monuments-historiques/download/?format=csv&timezone=Europe/Berlin&use_labels_for_header=true", filter = lambda s: reduce(lambda a, v: a.replace(v, ''), SKIP, (u'' + s).encode('utf-8').replace(b'l\u92', b"l'").replace(b'\x85)', b"...)").decode('utf-8', 'ignore'))), - columns = 'Adresse', citycode = 'INSEE', delimiter = u';', logger = logger), + columns = ['Adresse', 'Commune'], citycode = 'INSEE', delimiter = u';', logger = logger), separator = u';'), Load("longitude", "latitude", + map = self.coord_fallback, select = {u"Date de Protection": True}), Mapping( select = Select( @@ -92,3 +93,8 @@ def parseDPRO(dpro): mapping2 = {"name": lambda res: res[u"Appellation courante"] if res[u"Appellation courante"] not in BLACK_WORDS else None}, tag_keep_multiple_values = ["heritage:operator"], text = lambda tags, fields: T_f(u"Historical monument: {0} (positioned at {1} with confidence {2})", ", ".join(filter(lambda x: x, [fields[u"Date de Protection"], fields[u"Adresse"], fields[u"Commune"]])), fields[u"result_type"], fields[u"result_score"]) ))) + + def coord_fallback(self, fields): + if not fields['longitude'] and fields['Coordonnées INSEE']: + fields['latitude'], fields['longitude'] = list(map(lambda s: float(s.strip()), fields['Coordonnées INSEE'].split(','))) + return fields diff --git a/analysers/analyser_merge_power_plant_FR.py b/analysers/analyser_merge_power_plant_FR.py index 126cc4299..bc41a0819 100644 --- a/analysers/analyser_merge_power_plant_FR.py +++ b/analysers/analyser_merge_power_plant_FR.py @@ -35,12 +35,12 @@ def __init__(self, config, logger = None): Analyser_Merge.__init__(self, config, logger, u"https://opendata.reseaux-energies.fr/explore/dataset/registre-national-installation-production-stockage-electricite-agrege-311217", u"Registre national des installations de production d'électricité et de stockage", - CSV(Geocode_Addok_CSV(Source(attribution = u"data.gouv.fr:RTE", millesime = "2017", + CSV(Geocode_Addok_CSV(Source(attribution = u"data.gouv.fr:RTE", millesime = "2019", fileUrl = u"https://opendata.reseaux-energies.fr/explore/dataset/registre-national-installation-production-stockage-electricite-agrege-311217/download/?format=csv&timezone=Europe/Berlin&use_labels_for_header=true"), columns = 'Commune', citycode = 'codeINSEECommune', delimiter = ';', logger = logger), separator = u";"), Load("longitude", "latitude", - where = lambda res: res.get('max_puissance') and float(res["max_puissance"]) > 1000, + where = lambda res: res.get('puisMaxRac') and float(res["puisMaxRac"]) > 1000, map = lambda res: dict(res, **{"_x": float(res["_x"]) + (Stablehash.stablehash(str(res)) % 200 - 100) * 0.00001, "_y": float(res["_y"]) + (Stablehash.stablehash(str(res)) % 212 - 106) * 0.00001})), Mapping( select = Select( @@ -55,7 +55,7 @@ def __init__(self, config, logger = None): # No voltage tga on power=plant #"voltage": lambda fields: (int(fields["Tension raccordement"].split(' ')[0]) * 1000) if fields.get("Tension raccordement") and fields["Tension raccordement"] not in ["< 45 kV", "BT", "HTA"] else None, "plant:source": lambda fields: self.filiere[fields["Filière"]][fields["Combustible"]], - "plant:output:electricity": lambda fields: int(float(fields["max_puissance"]) * 1000)}, + "plant:output:electricity": lambda fields: int(float(fields["puisMaxRac"]) * 1000)}, mapping2 = { "start_date": lambda fields: None if not fields.get(u"dateMiseEnService") else fields[u"dateMiseEnService"][0:4] if fields[u"dateMiseEnService"].endswith('-01-01') or fields[u"dateMiseEnService"].endswith('-12-31') else fields[u"dateMiseEnService"]}, tag_keep_multiple_values = ["voltage"], diff --git a/analysers/analyser_osmosis_addr_interpolation.py b/analysers/analyser_osmosis_addr_interpolation.py index 022cf4d0f..ec7a439e7 100644 --- a/analysers/analyser_osmosis_addr_interpolation.py +++ b/analysers/analyser_osmosis_addr_interpolation.py @@ -40,8 +40,10 @@ sql02 = """ CREATE TEMP TABLE interpolation_nodes AS SELECT - interpolations.id AS w_id, - nodes.* + array_agg(interpolations.id) AS w_ids, + nodes.id, + nodes.tags, + nodes.geom FROM interpolations JOIN nodes ON @@ -49,6 +51,10 @@ nodes.id = ANY (interpolations.nodes) AND nodes.tags != ''::hstore AND nodes.tags - ARRAY['source'] != ''::hstore +GROUP BY + nodes.id, + nodes.tags, + nodes.geom """ sql03 = """ @@ -72,12 +78,8 @@ FROM interpolation_nodes WHERE - tags?'addr:housenumber' -GROUP BY - id, - geom -HAVING - count(*) > 1 + tags?'addr:housenumber' AND + array_length(w_ids, 1) > 1 """ sql30 = """ @@ -99,7 +101,7 @@ FROM interpolations AS ways LEFT JOIN interpolation_nodes AS nodes ON - nodes.w_id = ways.id AND + ways.id = ANY (nodes.w_ids) AND ( nodes.id = ways.nodes[1] OR nodes.id = ways.nodes[array_length(nodes,1)] @@ -116,11 +118,11 @@ FROM interpolations AS ways JOIN interpolation_nodes AS nodes_s ON - nodes_s.w_id = ways.id AND + ways.id = ANY (nodes_s.w_ids) AND nodes_s.id = ways.nodes[1] AND nodes_s.tags?'addr:housenumber' JOIN interpolation_nodes AS nodes_e ON - nodes_e.w_id = ways.id AND + ways.id = ANY (nodes_e.w_ids) AND nodes_e.id = ways.nodes[array_length(nodes,1)] AND nodes_e.tags?'addr:housenumber' WHERE @@ -133,7 +135,7 @@ ST_AsText(min(geom)), string_agg(DISTINCT tags->'addr:street', ', ') FROM - interpolation_nodes AS nodes + (SELECT *, unnest(w_ids) AS w_id FROM interpolation_nodes) AS nodes WHERE tags != ''::hstore AND tags?'addr:street' diff --git a/analysers/analyser_osmosis_boundary_relation.py b/analysers/analyser_osmosis_boundary_relation.py index ae840c359..c59860a53 100644 --- a/analysers/analyser_osmosis_boundary_relation.py +++ b/analysers/analyser_osmosis_boundary_relation.py @@ -24,7 +24,7 @@ sql00 = """ CREATE TEMP TABLE {0}_{1}_admin AS -SELECT +SELECT DISTINCT ON (relations.id) relations.id, (relation_members.member_role IS NOT NULL) AS has_admin_centre, relations.tags AS rtags, @@ -48,6 +48,8 @@ relations.tags->'boundary' = 'administrative' AND relations.tags?'admin_level' AND relations.tags->'admin_level' = '{2}' +ORDER BY + relations.id """ sql10 = """ diff --git a/analysers/analyser_osmosis_highway_broken_level_continuity.py b/analysers/analyser_osmosis_highway_broken_level_continuity.py index 0cfa05a39..e5fd82ab5 100644 --- a/analysers/analyser_osmosis_highway_broken_level_continuity.py +++ b/analysers/analyser_osmosis_highway_broken_level_continuity.py @@ -23,11 +23,12 @@ from .Analyser_Osmosis import Analyser_Osmosis sql13 = """ -CREATE TEMP VIEW orphan_endin AS +CREATE TEMP TABLE orphan_endin AS SELECT network.id, network.nid, network.level, + network.geom, CASE network.level WHEN 1 THEN (ways.highway IN ('construction', 'motorway', 'motorway_link', 'trunk', 'trunk_link', 'primary', 'primary_link')) WHEN 2 THEN (ways.highway IN ('construction', 'motorway', 'motorway_link', 'trunk', 'trunk_link', 'primary', 'primary_link', 'secondary', 'secondary_link')) @@ -48,58 +49,52 @@ 1, 2, 3, - 4 + 4, + 5 """ sql14 = """ -CREATE TEMP VIEW orphan0 AS +CREATE TEMP TABLE orphan AS SELECT id, nid, - level + level, + geom FROM orphan_endin GROUP BY id, nid, - level + level, + geom HAVING NOT BOOL_OR(orphan_endin.endin) """ -sql15 = """ -CREATE TEMP TABLE orphan1 AS -SELECT - orphan0.*, - geom -FROM - orphan0 - JOIN nodes ON - orphan0.nid = nodes.id -""" - sql16 = """ -CREATE INDEX orphan1_level_idx ON orphan1(level) +CREATE INDEX orphan_level_idx ON orphan(level) """ sql17 = """ -CREATE INDEX orphan1_geom_idx ON orphan1 USING gist(geom) +CREATE INDEX orphan_geom_idx ON orphan USING gist(geom) """ sql18 = """ SELECT o1.id, + o1.nid, ST_AsText(o1.geom), o1.level FROM - orphan1 AS o1, - orphan1 AS o2 + orphan AS o1, + orphan AS o2 WHERE o1.nid != o2.nid AND o1.level = o2.level AND ST_DistanceSphere(o1.geom, o2.geom) < 1000 GROUP BY o1.id, + o1.nid, o1.level, o1.geom """ @@ -117,7 +112,6 @@ def __init__(self, config, logger = None): def analyser_osmosis_common(self): self.run(sql13) self.run(sql14) - self.run(sql15) self.run(sql16) self.run(sql17) - self.run(sql18, lambda res: {"class":res[2], "data":[self.way_full, self.positionAsText]} ) + self.run(sql18, lambda res: {"class":res[3], "data":[self.way_full, self.node, self.positionAsText]} ) diff --git a/analysers/analyser_osmosis_highway_cul-de-sac_level.py b/analysers/analyser_osmosis_highway_cul-de-sac_level.py index b14940f73..e2c1c11e2 100644 --- a/analysers/analyser_osmosis_highway_cul-de-sac_level.py +++ b/analysers/analyser_osmosis_highway_cul-de-sac_level.py @@ -24,35 +24,27 @@ sql40 = """ SELECT - t.id, - t.nid, - ST_AsText(nodes.geom), - level + way_ends.id, + way_ends.nid, + ST_AsText(way_ends.geom), + way_ends.level FROM - ( - SELECT - way_ends.id, - way_ends.nid, - way_ends.level - FROM - highway_ends AS way_ends - JOIN way_nodes ON - way_ends.nid = way_nodes.node_id AND - way_nodes.way_id != way_ends.id - JOIN highway_ends AS highway_level ON - highway_level.id = way_nodes.way_id - WHERE - NOT way_ends.is_roundabout AND - way_ends.level <= 3 - GROUP BY - way_ends.id, - way_ends.nid, - way_ends.level - HAVING - BOOL_AND(way_ends.level + 1 < highway_level.level) - ) AS t - JOIN nodes ON - nodes.id = nid + highway_ends AS way_ends + JOIN way_nodes ON + way_ends.nid = way_nodes.node_id AND + way_nodes.way_id != way_ends.id + JOIN highway_ends AS highway_level ON + highway_level.id = way_nodes.way_id +WHERE + NOT way_ends.is_roundabout AND + way_ends.level <= 3 +GROUP BY + way_ends.id, + way_ends.nid, + way_ends.geom, + way_ends.level +HAVING + BOOL_AND(way_ends.level + 1 < highway_level.level) """ class Analyser_Osmosis_Highway_CulDeSac_Level(Analyser_Osmosis): @@ -61,9 +53,9 @@ class Analyser_Osmosis_Highway_CulDeSac_Level(Analyser_Osmosis): def __init__(self, config, logger = None): Analyser_Osmosis.__init__(self, config, logger) - self.classs[1] = {"item":"1090", "level": 1, "tag": ["highway", "fix:chair"], "desc": T_f(u"Bad topology way level {0}", 1) } - self.classs[2] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Bad topology way level {0}", 2) } - self.classs[3] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Bad topology way level {0}", 3) } + self.classs[1] = {"item":"1090", "level": 1, "tag": ["highway", "fix:chair"], "desc": T_f(u"Sudden highway type change (level {0})", 1) } + self.classs[2] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Sudden highway type change (level {0})", 2) } + self.classs[3] = {"item":"1090", "level": 2, "tag": ["highway", "fix:chair"], "desc": T_f(u"Sudden highway type change (level {0})", 3) } def analyser_osmosis_common(self): self.run(sql40, lambda res: {"class":res[3], "data":[self.way, self.node, self.positionAsText]} ) diff --git a/analysers/analyser_osmosis_highway_zone.py b/analysers/analyser_osmosis_highway_zone.py index 3bd3c6a5f..6f5108826 100644 --- a/analysers/analyser_osmosis_highway_zone.py +++ b/analysers/analyser_osmosis_highway_zone.py @@ -81,7 +81,7 @@ # Get candidates ways in the resulting envelope sql14 = """ -SELECT +SELECT DISTINCT ON (highways.id) highways.id, ST_AsText(way_locate(linestring)) FROM @@ -97,6 +97,8 @@ ST_Length(ST_Intersection(highways.linestring_proj, a3.geom)) / ST_Length(highways.linestring) > 0.8 WHERE NOT ST_IsEmpty(a3.geom) +ORDER BY + highways.id """ class Analyser_Osmosis_Highway_Zone(Analyser_Osmosis): diff --git a/analysers/analyser_osmosis_node_like_way.py b/analysers/analyser_osmosis_node_like_way.py index 53333be1b..958b18efe 100644 --- a/analysers/analyser_osmosis_node_like_way.py +++ b/analysers/analyser_osmosis_node_like_way.py @@ -67,6 +67,8 @@ slice(nodes.tags, ARRAY['aerialway', 'aeroway', 'amenity', 'highway', 'landuse', 'leisure', 'natural', 'railway', 'waterway', 'building']) OR slice(ways.tags, ARRAY['aerialway', 'aeroway', 'amenity', 'highway', 'landuse', 'leisure', 'natural', 'railway', 'waterway', 'building']) <@ slice(nodes.tags, ARRAY['aerialway', 'aeroway', 'amenity', 'highway', 'landuse', 'leisure', 'natural', 'railway', 'waterway', 'building']) +ORDER BY + nodes.id """ class Analyser_Osmosis_Node_Like_Way(Analyser_Osmosis): diff --git a/analysers/analyser_osmosis_relation_associatedStreet.py b/analysers/analyser_osmosis_relation_associatedStreet.py index 137d7361c..6fe713d38 100644 --- a/analysers/analyser_osmosis_relation_associatedStreet.py +++ b/analysers/analyser_osmosis_relation_associatedStreet.py @@ -123,7 +123,7 @@ # role street without highway sql30 = """ -SELECT +SELECT DISTINCT ON (ways.id) ways.id, relations.id, ST_ASText(way_locate(linestring)) @@ -139,11 +139,13 @@ WHERE relations.tags?'type' AND relations.tags->'type' = 'associatedStreet' +ORDER BY + ways.id """ # roleless member node in relation sql40 = """ -SELECT +SELECT DISTINCT ON (nodes.id) nodes.id, relations.id, ST_AsText(geom) @@ -158,11 +160,13 @@ WHERE relations.tags?'type' AND relations.tags->'type' = 'associatedStreet' +ORDER BY + nodes.id """ # roleless member way in relation sql41 = """ -SELECT +SELECT DISTINCT ON (ways.id) ways.id, relations.id, ST_AsText(way_locate(linestring)) @@ -177,11 +181,13 @@ WHERE relations.tags?'type' AND relations.tags->'type' = 'associatedStreet' +ORDER BY + ways.id """ # node of relation without addr:housenumber nor addr:housename sql50 = """ -SELECT +SELECT DISTINCT ON (nodes.id) nodes.id, relations.id, ST_AsText(geom) @@ -196,11 +202,13 @@ WHERE relations.tags?'type' AND relations.tags->'type' = 'associatedStreet' +ORDER BY + nodes.id """ # house role way of relation without addr:housenumber nor addr:housename sql51 = """ -SELECT +SELECT DISTINCT ON (ways.id) ways.id, relations.id, ST_AsText(way_locate(linestring)) @@ -216,12 +224,14 @@ WHERE relations.tags?'type' AND relations.tags->'type' = 'associatedStreet' +ORDER BY + ways.id """ # many time same number in street sql60 = """ CREATE TEMP TABLE housenumber AS ( -SELECT +SELECT DISTINCT ON (id) type, id, geom_proj, @@ -234,8 +244,10 @@ WHERE role = 'house' AND NOT flats +ORDER BY + id ) UNION ALL ( -SELECT +SELECT DISTINCT ON (id) type, id, ST_Centroid(linestring_proj) AS geom_proj, @@ -248,6 +260,8 @@ WHERE role = 'house' AND NOT flats +ORDER BY + id ) """ diff --git a/analysers/analyser_osmosis_relation_enforcement.py b/analysers/analyser_osmosis_relation_enforcement.py index bc19074ac..efacf1fef 100644 --- a/analysers/analyser_osmosis_relation_enforcement.py +++ b/analysers/analyser_osmosis_relation_enforcement.py @@ -23,7 +23,7 @@ from .Analyser_Osmosis import Analyser_Osmosis sql00 = """ -SELECT +SELECT DISTINCT ON (nodes.id) nodes.id, ST_AsText(nodes.geom) FROM @@ -44,6 +44,8 @@ nodes.tags->'highway' = 'speed_camera' AND relations.id IS NULL AND highways.id IS NULL +ORDER BY + nodes.id """ class Analyser_Osmosis_Relation_Enforcement(Analyser_Osmosis): diff --git a/analysers/analyser_osmosis_relation_restriction.py b/analysers/analyser_osmosis_relation_restriction.py index 57870a062..6f169108f 100644 --- a/analysers/analyser_osmosis_relation_restriction.py +++ b/analysers/analyser_osmosis_relation_restriction.py @@ -104,7 +104,7 @@ sql20 = """ CREATE TEMP TABLE bad_member AS -SELECT +SELECT DISTINCT ON (restrictions.id, ways.id) restrictions.id AS rid, ways.id AS wid, ways.linestring @@ -124,6 +124,9 @@ noto = 0 AND ((nnvia = 1 AND nwvia = 0) OR (nnvia = 0 AND nwvia > 0)) AND nrvia = 0 +ORDER BY + restrictions.id, + ways.id """ sql21 = """ diff --git a/analysers/analyser_osmosis_roundabout_level.py b/analysers/analyser_osmosis_roundabout_level.py index 5573aea6a..63b3175b9 100644 --- a/analysers/analyser_osmosis_roundabout_level.py +++ b/analysers/analyser_osmosis_roundabout_level.py @@ -180,6 +180,7 @@ sql22 = """ SELECT ra1.a_id, + COALESCE(ra1.n_ids[2], ra1.n_ids[1]), COALESCE(ra1.n_ids[2], ra1.n_ids[1]) FROM roundabout_acces AS ra1 @@ -274,7 +275,7 @@ def analyser_osmosis_common(self): self.run(sql17, lambda res: {"class":1, "subclass":res[2], "data":[self.way_full, self.positionAsText]} ) self.run(sql20) self.run(sql21) - self.run(sql22, lambda res: {"class":2, "data":[self.way_full, self.node_position]} ) + self.run(sql22, lambda res: {"class":2, "data":[self.way_full, self.node, self.node_position]} ) self.run(sql30) self.run(sql31, lambda res: {"class":3, "data":[self.way_full, self.positionAsText]} ) self.run(sql40, lambda res: {"class":4, "data":[self.way_full, self.way_full, self.positionAsText]} ) diff --git a/analysers/analyser_osmosis_tag_typo.py b/analysers/analyser_osmosis_tag_typo.py index a1fd0866b..56880912b 100644 --- a/analysers/analyser_osmosis_tag_typo.py +++ b/analysers/analyser_osmosis_tag_typo.py @@ -84,7 +84,7 @@ sql20 = """ CREATE TEMP TABLE fix_{0} AS -SELECT +SELECT DISTINCT ON (t1.key) t1.key as low_key, t2.key as hight_key FROM @@ -94,6 +94,9 @@ t1.count < t2.count / 20 AND abs(length(t1.key) - length(t2.key)) <= 1 AND levenshtein(t1.key, t2.key) <= 1 +ORDER BY + t1.key, + t2.count DESC """ sql30 = """ diff --git a/merge_data/mapillary-traffic-signs.mapping.json b/merge_data/mapillary-traffic-signs.mapping.json index 32fa26f2a..39d2fb082 100644 --- a/merge_data/mapillary-traffic-signs.mapping.json +++ b/merge_data/mapillary-traffic-signs.mapping.json @@ -949,6 +949,39 @@ "hazard": "animal_crossing" } }, + { + "class": 52, + "level": 3, + "otype": [ + "nodes" + ], + "conflation": 100, + "title": "stop", + "only_for": [ + "CA-ON", + "CA-QC" + ], + "object": [ + "regulatory--stop--g1", + "regulatory--stop--g2", + "regulatory--stop--g3", + "regulatory--stop--g4", + "regulatory--stop--g5", + "regulatory--stop--g6", + "regulatory--stop--g7", + "regulatory--stop--g8", + "regulatory--stop--g9", + "regulatory--stop--g10" + ], + "select_tags": [ + { + "highway": "stop" + } + ], + "generate_tags": { + "highway": "stop" + } + }, { "class": 101, "level": 2, diff --git a/modules/download.py b/modules/download.py index 7480111ab..884191404 100755 --- a/modules/download.py +++ b/modules/download.py @@ -52,7 +52,8 @@ def dl(url, local, logger=OsmoseLog.logger(), min_file_size=10*1024): headers["If-Modified-Since"] = open(file_ts).read() # request fails with a 304 error when the file wasn't modified - answer = downloader.get(url, headers=headers) + # Retry on 404, workaround Geofabrik update in progress + answer = downloader.get(url, headers=headers, session=downloader.requests_retry_session(status_forcelist=downloader.DEFAULT_RETRY_ON + (404, ))) if answer.status_code == 304: logger.log(u"not newer") return False diff --git a/modules/downloader.py b/modules/downloader.py index fa4977940..2f13f716e 100644 --- a/modules/downloader.py +++ b/modules/downloader.py @@ -37,7 +37,9 @@ HTTP_DATE_FMT = "%a, %d %b %Y %H:%M:%S GMT" -def requests_retry_session(retries=3, backoff_factor=1, status_forcelist=(500, 502, 503, 504)): +DEFAULT_RETRY_ON = (500, 502, 503, 504) + +def requests_retry_session(retries=3, backoff_factor=1, status_forcelist=DEFAULT_RETRY_ON): session = requests.Session() retry = Retry( total=retries, @@ -52,9 +54,11 @@ def requests_retry_session(retries=3, backoff_factor=1, status_forcelist=(500, 5 return session -def get(url, headers={}): +def get(url, headers={}, session=None): headers['User-Agent'] = 'Wget/1.9.1 - http://osmose.openstreetmap.fr' # Add "Wget" for Dropbox user-agent checker - return requests_retry_session().get(url, headers=headers, stream=True) + if not session: + session = requests_retry_session() + return session.get(url, headers=headers, stream=True) def http_get(url, tmp_file, date_string=None, get=get): headers = {} diff --git a/osmosis/CreateFunctions.sql b/osmosis/CreateFunctions.sql index 516d56bad..1cc6f7084 100644 --- a/osmosis/CreateFunctions.sql +++ b/osmosis/CreateFunctions.sql @@ -11,6 +11,28 @@ $$ LANGUAGE plpgsql RETURNS NULL ON NULL INPUT; +CREATE TYPE id_geom AS ( + id bigint, + geom geometry +); + +CREATE OR REPLACE FUNCTION ends_geom(nodes bigint[], linestring geometry) RETURNS SETOF id_geom AS $$ +DECLARE + tmp id_geom; +BEGIN + tmp.id = nodes[1]; + tmp.geom = ST_StartPoint(linestring); + RETURN NEXT tmp; + tmp.id = nodes[array_length(nodes,1)]; + tmp.geom = ST_EndPoint(linestring); + RETURN NEXT tmp; + RETURN; +END +$$ LANGUAGE plpgsql + IMMUTABLE + RETURNS NULL ON NULL INPUT; + + CREATE OR REPLACE FUNCTION way_locate(linestring geometry) RETURNS geometry AS $$ DECLARE BEGIN IF ST_NPoints(linestring) > 1 THEN diff --git a/plugins/Highway_Lanes.py b/plugins/Highway_Lanes.py index d4d6eceb2..de764ab29 100644 --- a/plugins/Highway_Lanes.py +++ b/plugins/Highway_Lanes.py @@ -21,6 +21,7 @@ from plugins.Plugin import Plugin from modules.py3 import ilen +from modules.Stablehash import stablehash class Highway_Lanes(Plugin): @@ -71,9 +72,9 @@ def way(self, data, tags, nds): for t in tt.split(";"): if t not in ["left", "slight_left", "sharp_left", "through", "right", "slight_right", "sharp_right", "reverse", "merge_to_left", "merge_to_right", "none", ""]: unknown = True - err.append({"class": 31606, "subclass": 1, "text": T_f(u"Unknown turn lanes value \"{0}\"", t)}) + err.append({"class": 31606, "subclass": 0 + stablehash(tl), "text": T_f(u"Unknown turn lanes value \"{0}\"", t)}) if (t == "merge_to_left" and i == 0) or (t == "merge_to_right" and i == len(ttt) - 1): - err.append({"class": 31600, "subclass": 1}) + err.append({"class": 31600, "subclass": 1 + stablehash(tl)}) i += 1 if not unknown: # merge_to_left is a on the right and vice versa @@ -96,7 +97,7 @@ def way(self, data, tags, nds): (first_space == None or last_space == None or first_space <= last_space) and (last_space == None or first_right == None or last_space < first_right) and (last_left == None or first_right == None or last_left < first_right)): - err.append({"class": 31607, "subclass": 1}) + err.append({"class": 31607, "subclass": 1 + stablehash(tl)}) # Check acces lanes values @@ -113,7 +114,7 @@ def way(self, data, tags, nds): if tag.startswith(base): try: int(tags_lanes[tag]) - err.append({"class": 31609, "subclass": 1, "text": {'en': '%s=%s' % (tag, tags_lanes[tag]) }}) + err.append({"class": 31609, "subclass": 1 + stablehash(tag), "text": {'en': '%s=%s' % (tag, tags_lanes[tag]) }}) except ValueError: # Ok, should not be an integer pass @@ -132,7 +133,7 @@ def way(self, data, tags, nds): lb = star + ':backward' in tags_lanes l2 = star + ':both_ways' in tags_lanes if l and (lf or lb or l2): - err.append({"class": 31603, "subclass": 0, "text": {"en": star + ":*"}}) + err.append({"class": 31603, "subclass": 0 + stablehash(star), "text": {"en": star + ":*"}}) if err != []: return err @@ -149,7 +150,7 @@ def way(self, data, tags, nds): elif len(parts) == 2 and parts[1] in ['forward', 'backward', 'both_ways']: number['lanes'][':'+parts[1]] = n except ValueError: - err.append({"class": 31601, "subclass": 0, "text": T_f(u"lanes={0} is not an integer", tags_lanes[tag])}) + err.append({"class": 31601, "subclass": 0 + stablehash(tag), "text": T_f(u"lanes={0} is not an integer", tags_lanes[tag])}) for star in stars: number[star] = {} @@ -167,7 +168,7 @@ def way(self, data, tags, nds): if n_lanes.get(direction) != None and number[star].get(direction) != None and \ number[star][direction] - non_fullwidth_lanes_number_star != \ n_lanes[direction] - non_fullwidth_lanes_number_tag: - err.append({"class": 31608, "subclass": 0, "text": { + err.append({"class": 31608, "subclass": 0 + stablehash(star), "text": { "en": "(lanes(%s)=%s) - (non fullwidth=%s) != (lanes(%s)=%s) - (non fullwidth=%s)" % ( star+":*"+direction, number[star][direction], non_fullwidth_lanes_number_star, tag, n_lanes[direction], non_fullwidth_lanes_number_tag) }}) @@ -206,16 +207,16 @@ def way(self, data, tags, nds): if oneway: if nl != None and nlf != None and nl != nlf - nfw_nlf: err.append({"class": 31604, "subclass": 0, "text": T_f(u"on oneway, (lanes={0}) != (lanes:forward={1}) - (non fullwidth forward={2})", nl, nlf, nfw_nlf)}) - if nlb != None or nl2 != None: + elif nlb != None or nl2 != None: err.append({"class": 31605, "subclass": 0}) else: if nl != None and nlf != None and nlb != None and nl != nlf + nlb + (nl2 or 0) - nfw_nl - nfw_nlf - nfw_nlb - nfw_nl2: err.append({"class": 31604, "subclass": 0, "text": T_f(u"on two way, (lanes={0}) != (lanes:forward={1}) + (lanes:backward={2}) + (lanes:both_ways={3}) - (non fullwidth={4}) - (non fullwidth forward={5}) - (non fullwidth backward={6}) - (non fullwidth both_ways={7})", nl, nlf, nlb, nl2, nfw_nl, nfw_nlf, nfw_nlb, nfw_nl2)}) - if nl != None and nlf != None and nl <= nlf - nfw_nlf: + elif nl != None and nlf != None and nl <= nlf - nfw_nlf: err.append({"class": 31604, "subclass": 0, "text": T_f(u"on two way, (lanes={0}) <= (lanes:forward={1}) - (non fullwidth forward={2})", nl, nlf, nfw_nlf)}) - if nl != None and nlb != None and nl <= nlb - nfw_nlb: + elif nl != None and nlb != None and nl <= nlb - nfw_nlb: err.append({"class": 31604, "subclass": 0, "text": T_f(u"on two way, (lanes={0}) <= (lanes:backward={1}) - (non fullwidth backward={2})", nl, nlb, nfw_nlb)}) - if nl != None and nl2 != None and nl < nl2 - nfw_nl2: + elif nl != None and nl2 != None and nl < nl2 - nfw_nl2: err.append({"class": 31604, "subclass": 0, "text": T_f(u"on two way, (lanes={0}) < (lanes:both_ways={1}) - (non fullwidth both_ways={2})", nl, nl2, nfw_nl2)}) if err != []: diff --git a/plugins/Name_MisspelledWordByRegex_Lang_fa.py b/plugins/Name_MisspelledWordByRegex_Lang_fa.py index 98db53833..298d22e98 100644 --- a/plugins/Name_MisspelledWordByRegex_Lang_fa.py +++ b/plugins/Name_MisspelledWordByRegex_Lang_fa.py @@ -37,7 +37,7 @@ def node(self, data, tags): except mapcss.RuleAbort: pass if match: # group:tr("Arabic letter detected in Farsi name") - # -osmoseItemClassLevel:"5010/50109001/2" + # -osmoseItemClassLevel:"5010/50109001:0/2" # throwError:tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'","ي","ی") # fixAdd:concat("{0.key}=",replace("{0.value}","ي","ی")) # -osmoseAssertMatchWithContext:list('node name="روابط عمومي مجتمع مس شهربابك"','language=fa') @@ -62,13 +62,13 @@ def node(self, data, tags): except mapcss.RuleAbort: pass if match: # group:tr("Arabic letter detected in Farsi name") - # -osmoseItemClassLevel:"5010/50109001/2" + # -osmoseItemClassLevel:"5010/50109001:1/2" # throwError:tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'","ك","ک") # fixAdd:concat("{0.key}=",replace("{0.value}","ك","ک")) # -osmoseAssertMatchWithContext:list('node name="روابط عمومي مجتمع مس شهربابك"','language=fa') # assertMatch:'node name:fa="روابط عمومي مجتمع مس شهربابك"' # assertNoMatch:'node name="روابط عمومي مجتمع مس شهربابك"' - err.append({'class': 50109001, 'subclass': 0, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ك', u'ک'), 'allow_fix_override': True, 'fix': { + err.append({'class': 50109001, 'subclass': 1, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ك', u'ک'), 'allow_fix_override': True, 'fix': { '+': dict([ (mapcss.concat(mapcss._tag_uncapture(capture_tags, u'{0.key}='), mapcss.replace(mapcss._tag_uncapture(capture_tags, u'{0.value}'), u'ك', u'ک'))).split('=', 1)]) }}) @@ -95,7 +95,7 @@ def way(self, data, tags, nds): except mapcss.RuleAbort: pass if match: # group:tr("Arabic letter detected in Farsi name") - # -osmoseItemClassLevel:"5010/50109001/2" + # -osmoseItemClassLevel:"5010/50109001:0/2" # throwError:tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'","ي","ی") # fixAdd:concat("{0.key}=",replace("{0.value}","ي","ی")) err.append({'class': 50109001, 'subclass': 0, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ي', u'ی'), 'allow_fix_override': True, 'fix': { @@ -117,10 +117,10 @@ def way(self, data, tags, nds): except mapcss.RuleAbort: pass if match: # group:tr("Arabic letter detected in Farsi name") - # -osmoseItemClassLevel:"5010/50109001/2" + # -osmoseItemClassLevel:"5010/50109001:1/2" # throwError:tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'","ك","ک") # fixAdd:concat("{0.key}=",replace("{0.value}","ك","ک")) - err.append({'class': 50109001, 'subclass': 0, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ك', u'ک'), 'allow_fix_override': True, 'fix': { + err.append({'class': 50109001, 'subclass': 1, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ك', u'ک'), 'allow_fix_override': True, 'fix': { '+': dict([ (mapcss.concat(mapcss._tag_uncapture(capture_tags, u'{0.key}='), mapcss.replace(mapcss._tag_uncapture(capture_tags, u'{0.value}'), u'ك', u'ک'))).split('=', 1)]) }}) @@ -147,7 +147,7 @@ def relation(self, data, tags, members): except mapcss.RuleAbort: pass if match: # group:tr("Arabic letter detected in Farsi name") - # -osmoseItemClassLevel:"5010/50109001/2" + # -osmoseItemClassLevel:"5010/50109001:0/2" # throwError:tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'","ي","ی") # fixAdd:concat("{0.key}=",replace("{0.value}","ي","ی")) err.append({'class': 50109001, 'subclass': 0, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ي', u'ی'), 'allow_fix_override': True, 'fix': { @@ -169,10 +169,10 @@ def relation(self, data, tags, members): except mapcss.RuleAbort: pass if match: # group:tr("Arabic letter detected in Farsi name") - # -osmoseItemClassLevel:"5010/50109001/2" + # -osmoseItemClassLevel:"5010/50109001:1/2" # throwError:tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'","ك","ک") # fixAdd:concat("{0.key}=",replace("{0.value}","ك","ک")) - err.append({'class': 50109001, 'subclass': 0, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ك', u'ک'), 'allow_fix_override': True, 'fix': { + err.append({'class': 50109001, 'subclass': 1, 'text': mapcss.tr(u'In Farsi, the Arabic letter \'{0}\' should be replaced by \'{1}\'', u'ك', u'ک'), 'allow_fix_override': True, 'fix': { '+': dict([ (mapcss.concat(mapcss._tag_uncapture(capture_tags, u'{0.key}='), mapcss.replace(mapcss._tag_uncapture(capture_tags, u'{0.value}'), u'ك', u'ک'))).split('=', 1)]) }}) @@ -199,6 +199,6 @@ class father: self.check_err(n.node(data, {u'name:fa': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 0}) self.check_not_err(n.node(data, {u'name': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 0}) with with_options(n, {'language': 'fa'}): - self.check_err(n.node(data, {u'name': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 0}) - self.check_err(n.node(data, {u'name:fa': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 0}) - self.check_not_err(n.node(data, {u'name': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 0}) + self.check_err(n.node(data, {u'name': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 1}) + self.check_err(n.node(data, {u'name:fa': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 1}) + self.check_not_err(n.node(data, {u'name': u'روابط عمومي مجتمع مس شهربابك'}), expected={'class': 50109001, 'subclass': 1}) diff --git a/plugins/Name_MisspelledWordByRegex_Lang_fa.validator.mapcss b/plugins/Name_MisspelledWordByRegex_Lang_fa.validator.mapcss index 965693212..829ad7e27 100644 --- a/plugins/Name_MisspelledWordByRegex_Lang_fa.validator.mapcss +++ b/plugins/Name_MisspelledWordByRegex_Lang_fa.validator.mapcss @@ -35,7 +35,7 @@ meta[lang=fr] { *[name:fa=~/ي/] { group: tr("Arabic letter detected in Farsi name"); throwError: tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'", "ي", "ی"); - -osmoseItemClassLevel: "5010/50109001/2"; + -osmoseItemClassLevel: "5010/50109001:0/2"; fixAdd: concat("{0.key}=", replace("{0.value}", "ي", "ی")); -osmoseAssertMatchWithContext: list('node name="روابط عمومي مجتمع مس شهربابك"', 'language=fa'); @@ -47,7 +47,7 @@ meta[lang=fr] { *[name:fa=~/ك/] { group: tr("Arabic letter detected in Farsi name"); throwError: tr("In Farsi, the Arabic letter '{0}' should be replaced by '{1}'", "ك", "ک"); - -osmoseItemClassLevel: "5010/50109001/2"; + -osmoseItemClassLevel: "5010/50109001:1/2"; fixAdd: concat("{0.key}=", replace("{0.value}", "ك", "ک")); -osmoseAssertMatchWithContext: list('node name="روابط عمومي مجتمع مس شهربابك"', 'language=fa'); diff --git a/plugins/Name_Script.py b/plugins/Name_Script.py index 356c93af2..ed23c4dac 100644 --- a/plugins/Name_Script.py +++ b/plugins/Name_Script.py @@ -25,6 +25,7 @@ from modules.languages import language2scripts, gen_regex from modules import confusables from modules.py3 import ilen +from modules.Stablehash import stablehash class Name_Script(Plugin): @@ -96,33 +97,33 @@ def node(self, data, tags): for key, value in tags.items(): m = self.non_printable.search(key) if m: - err.append({"class": 50702, "subclass": 0, "text": T_f(u"\"{0}\" unexpected non printable char ({1}, 0x{2:04x}) in key at position {3}", key, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) + err.append({"class": 50702, "subclass": 0 + stablehash(key), "text": T_f(u"\"{0}\" unexpected non printable char ({1}, 0x{2:04x}) in key at position {3}", key, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) continue m = self.non_printable.search(value) if m: - err.append({"class": 50702, "subclass": 1, "text": T_f(u"\"{0}\"=\"{1}\" unexpected non printable char ({2}, 0x{3:04x}) in value at position {4}", key, value, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) + err.append({"class": 50702, "subclass": 1 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected non printable char ({2}, 0x{3:04x}) in value at position {4}", key, value, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) continue m = self.other_symbol.search(key) if m: - err.append({"class": 50703, "subclass": 0, "text": T_f(u"\"{0}\" unexpected symbol char ({1}, 0x{2:04x}) in key at position {3}", key, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) + err.append({"class": 50703, "subclass": 0 + stablehash(key), "text": T_f(u"\"{0}\" unexpected symbol char ({1}, 0x{2:04x}) in key at position {3}", key, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) continue m = self.other_symbol.search(value) if m: - err.append({"class": 50703, "subclass": 1, "text": T_f(u"\"{0}\"=\"{1}\" unexpected symbol char ({2}, 0x{3:04x}) in value at position {4}", key, value, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) + err.append({"class": 50703, "subclass": 1 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected symbol char ({2}, 0x{3:04x}) in value at position {4}", key, value, unicodedata.name(m.group(0), ''), ord(m.group(0)), m.start() + 1)}) continue # https://en.wikipedia.org/wiki/Bi-directional_text#Table_of_possible_BiDi-types for c in u"\u200E\u200F\u061C\u202A\u202D\u202B\u202E\u202C\u2066\u2067\u2068\u2069": m = key.find(c) if m > 0: - err.append({"class": 50702, "subclass": 2, "text": T_f(u"\"{0}\" unexpected non printable char ({1}, 0x{2:04x}) in key at position {3}", key, unicodedata.name(c, ''), ord(c), m + 1)}) + err.append({"class": 50702, "subclass": 2 + stablehash(key), "text": T_f(u"\"{0}\" unexpected non printable char ({1}, 0x{2:04x}) in key at position {3}", key, unicodedata.name(c, ''), ord(c), m + 1)}) m = value.find(c) if m > 0: - err.append({"class": 50702, "subclass": 2, "text": T_f(u"\"{0}\"=\"{1}\" unexpected non printable char ({2}, 0x{3:04x}) in value at position {4}", key, value, unicodedata.name(c, ''), ord(c), m + 1)}) + err.append({"class": 50702, "subclass": 3 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected non printable char ({2}, 0x{3:04x}) in value at position {4}", key, value, unicodedata.name(c, ''), ord(c), m + 1)}) if self.default: if key in self.names: @@ -135,16 +136,16 @@ def node(self, data, tags): c = s[0] u = self.uniq_script and confusables.unconfuse(c, self.uniq_script) if u: - err.append({"class": 50701, "subclass": 0, + err.append({"class": 50701, "subclass": 0 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected char \"{2}\" ({3}, 0x{4:04x}). Means \"{5}\" ({6}, 0x{7:04x})?", key, value, s, unicodedata.name(c, ''), ord(c), u, unicodedata.name(u, ''), ord(u)), "fix": {key: value.replace(c, u)} }) else: - err.append({"class": 50701, "subclass": 0, + err.append({"class": 50701, "subclass": 0 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected char \"{2}\" ({3}, 0x{4:04x})", key, value, s, unicodedata.name(c, ''), ord(c)) }) else: - err.append({"class": 50701, "subclass": 0, "text": T_f(u"\"{0}\"=\"{1}\" unexpected \"{2}\"", key, value, s)}) + err.append({"class": 50701, "subclass": 0 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected \"{2}\"", key, value, s)}) l = key.split(':') if len(l) > 1 and l[0] in self.names and l[1] in self.lang: @@ -157,16 +158,16 @@ def node(self, data, tags): c = s[0] u = self.uniq_scripts.get(l[1]) and confusables.unconfuse(c, self.uniq_scripts.get(l[1])) if u: - err.append({"class": 50701, "subclass": 1, + err.append({"class": 50701, "subclass": 1 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected char \"{2}\" ({3}, 0x{4:04x}). Means \"{5}\" ({6}, 0x{7:04x})?", key, value, s, unicodedata.name(c, ''), ord(c), u, unicodedata.name(u, ''), ord(u)), "fix": {key: value.replace(c, u)} }) else: - err.append({"class": 50701, "subclass": 1, + err.append({"class": 50701, "subclass": 1 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected char \"{2}\" ({3}, 0x{4:04x})", key, value, s, unicodedata.name(c, ''), ord(c)) }) else: - err.append({"class": 50701, "subclass": 1, "text": T_f(u"\"{0}\"=\"{1}\" unexpected \"{2}\"", key, value, s)}) + err.append({"class": 50701, "subclass": 1 + stablehash(key), "text": T_f(u"\"{0}\"=\"{1}\" unexpected \"{2}\"", key, value, s)}) return err diff --git a/plugins/Phone.py b/plugins/Phone.py index 4978fcba3..d9d7550a8 100644 --- a/plugins/Phone.py +++ b/plugins/Phone.py @@ -112,46 +112,46 @@ def check(self, tags): phone = phone.replace(sep, '; ') if p != phone: phone = phone.replace(' ', ' ') - err.append({"class": 30926, "subclass": stablehash(tag), "fix": {tag: phone.replace(' / ', '; ').replace(' - ', '; ').replace(',', ';')}}) + err.append({"class": 30926, "subclass": stablehash(tag), "text": {'en': u'='.join([tag, phone])}, "fix": {tag: phone.replace(' / ', '; ').replace(' - ', '; ').replace(',', ';')}}) continue phone_test = phone for c in '+0123456789 -./()': phone_test = phone_test.replace(c, '') if len(phone_test) > 0: - err.append({"class": 30925, "subclass": stablehash(tag), "text": T_f(u"Not allowed char \"{0}\" in phone number", phone_test)}) + err.append({"class": 30925, "subclass": stablehash(tag), "text": T_f(u"Not allowed char \"{0}\" in phone number tag \"{1}\"", phone_test, tag)}) continue # Before local prefix if self.InternationalPrefix: r = self.InternationalPrefix.match(phone) if r: - err.append({"class": 30924, "subclass": stablehash(tag), "fix": {tag: "+" + r.group(1)}}) + err.append({"class": 30924, "subclass": stablehash(tag), "text": {'en': u'='.join([tag, phone])}, "fix": {tag: "+" + r.group(1)}}) continue if self.InternationalAndLocalPrefix: r = self.InternationalAndLocalPrefix.match(phone) if r: - err.append({"class": 30921, "subclass": stablehash(tag), "fix": {tag: "+" + self.code + " " + r.group(1)}}) + err.append({"class": 30921, "subclass": stablehash(tag), "text": {'en': u'='.join([tag, phone])}, "fix": {tag: "+" + self.code + " " + r.group(1)}}) continue if self.MissingInternationalPrefix: r = self.MissingInternationalPrefix.match(phone) if r: - err.append({"class": 30923, "subclass": stablehash(tag), "fix": {tag: "+" + self.code + " " + r.group(1)}}) + err.append({"class": 30923, "subclass": stablehash(tag), "text": {'en': u'='.join([tag, phone])}, "fix": {tag: "+" + self.code + " " + r.group(1)}}) continue if self.BadShort: r = self.BadShort.match(phone) if r: - err.append({"class": 30922, "subclass": stablehash(tag), "fix": {tag: r.group(1)}}) + err.append({"class": 30922, "subclass": stablehash(tag), "text": {'en': u'='.join([tag, phone])}, "fix": {tag: r.group(1)}}) continue # Last if self.Format: r = self.Format.match(phone) if not r: - err.append({"class": 30920, "subclass": stablehash(tag), "text": {"en": phone}}) + err.append({"class": 30920, "subclass": stablehash(tag), "text": {'en': u'='.join([tag, phone])}, "text": {"en": phone}}) continue return err diff --git a/plugins/TagFix_MultipleTag.py b/plugins/TagFix_MultipleTag.py index 51d0dd4c0..9e5edcbdd 100644 --- a/plugins/TagFix_MultipleTag.py +++ b/plugins/TagFix_MultipleTag.py @@ -49,7 +49,7 @@ def init(self, logger): self.driving_side_right = not(self.father.config.options.get("driving_side") == "left") self.driving_direction = "anticlockwise" if self.driving_side_right else "clockwise" name_parent = [] - for i in ('type', 'aerialway', 'aeroway', 'amenity', 'barrier', 'boundary', 'building', 'craft', 'entrance', 'emergency', 'geological', 'highway', 'historic', 'landuse', 'leisure', 'man_made', 'military', 'natural', 'office', 'place', 'power', 'public_transport', 'railway', 'route', 'shop', 'sport', 'tourism', 'waterway', 'mountain_pass', 'traffic_sign', 'mountain_pass', 'golf', 'piste:type', 'junction', 'healthcare', 'health_facility:type', 'indoor', 'club', 'seamark:type'): + for i in ('type', 'aerialway', 'aeroway', 'amenity', 'barrier', 'boundary', 'building', 'craft', 'entrance', 'emergency', 'geological', 'highway', 'historic', 'landuse', 'leisure', 'man_made', 'military', 'natural', 'office', 'place', 'power', 'public_transport', 'railway', 'route', 'shop', 'sport', 'tourism', 'waterway', 'mountain_pass', 'traffic_sign', 'mountain_pass', 'golf', 'piste:type', 'junction', 'healthcare', 'health_facility:type', 'indoor', 'club', 'seamark:type', 'attraction'): name_parent.append(i) name_parent.append("disused:" + i) name_parent.append("abandoned:" + i) diff --git a/plugins/TagFix_MultipleTag_Lang_fr.py b/plugins/TagFix_MultipleTag_Lang_fr.py index 81c8320d2..6a5cd8bc6 100644 --- a/plugins/TagFix_MultipleTag_Lang_fr.py +++ b/plugins/TagFix_MultipleTag_Lang_fr.py @@ -50,7 +50,7 @@ def node(self, data, tags): if self.Eglise.match(tags["name"]) and not self.EgliseNot1.match(tags["name"]) and not self.EgliseNot2.match(tags["name"]): err.append({"class": 3032, "subclass": 1, "text": T_(u"\"name=%s\" is the localisation but not the name", tags["name"])}) else: - if "shop" not in tags and self.Marche.match(tags["name"]): + if "shop" not in tags and "public_transport" not in tags and self.Marche.match(tags["name"]): err.append({"class": 3032, "subclass": 5, "fix": {"amenity": "marketplace"}}) if "historic" in tags: diff --git a/plugins/TagWatchFrViPofm.py b/plugins/TagWatchFrViPofm.py index 166406f2c..655ea3159 100644 --- a/plugins/TagWatchFrViPofm.py +++ b/plugins/TagWatchFrViPofm.py @@ -87,27 +87,27 @@ def node(self, data, tags): err = [] for k in tags: if k in self._update_ks: - err.append({"class": self._update_ks[k][1], "subclass": stablehash(k), "text": T_f(u"tag key: {0} => {1} (rule ks)", k, self._update_ks[k][0])}) + err.append({"class": self._update_ks[k][1], "subclass": stablehash(u"%s|%s" % (self._update_ks, k)), "text": T_f(u"tag key: {0} => {1} (rule ks)", k, self._update_ks[k][0])}) if k in self._update_ks_vs and tags[k] in self._update_ks_vs[k]: - err.append({"class": self._update_ks_vs[k][tags[k]][1], "subclass": stablehash(u"%s=%s" % (k,tags[k])), "text": T_f(u"tag value: {0}={1} => {2} (rule ks_vs)", k, tags[k],self._update_ks_vs[k][tags[k]][0])}) + err.append({"class": self._update_ks_vs[k][tags[k]][1], "subclass": stablehash(u"%s|%s" % (self._update_ks, k)), "text": T_f(u"tag value: {0}={1} => {2} (rule ks_vs)", k, tags[k],self._update_ks_vs[k][tags[k]][0])}) if k in self._update_ks_vr: for v in self._update_ks_vr[k]: if v.match(tags[k]): - err.append({"class": self._update_ks_vr[k][v][1], "subclass": stablehash(u"%s=%s" % (k,tags[k])), "text": T_f(u"tag value: {0}={1} => {2} (rule ks_vr)", k, tags[k],self._update_ks_vr[k][v][0])}) + err.append({"class": self._update_ks_vr[k][v][1], "subclass": stablehash(u"%s|%s" % (self._update_ks_vr, k)), "text": T_f(u"tag value: {0}={1} => {2} (rule ks_vr)", k, tags[k],self._update_ks_vr[k][v][0])}) for kk in tags: for k in self._update_kr: if k.match(kk): - err.append({"class": self._update_kr[k][1], "subclass": stablehash(kk), "text": T_f(u"tag key: {0} => {1} (rule kr)", kk, self._update_kr[k][0])}) + err.append({"class": self._update_kr[k][1], "subclass": stablehash(u"%s|%s" % (kk, k)), "text": T_f(u"tag key: {0} => {1} (rule kr)", kk, self._update_kr[k][0])}) for k in self._update_kr_vs: if k.match(kk): if tags[kk] in self._update_kr_vs[k]: - err.append({"class": self._update_kr_vs[k][tags[kk]][1], "subclass": stablehash(u"%s=%s" % (kk,tags[kk])), "text": T_f(u"tag value: {0}={1} => {2} (rule kr_vs)", kk, tags[kk], self._update_kr_vs[k][tags[kk]][0])}) + err.append({"class": self._update_kr_vs[k][tags[kk]][1], "subclass": stablehash(u"%s|%s" % (kk, k)), "text": T_f(u"tag value: {0}={1} => {2} (rule kr_vs)", kk, tags[kk], self._update_kr_vs[k][tags[kk]][0])}) for k in self._update_kr_vr: if k.match(kk): for v in self._update_kr_vr[k]: if v.match(tags[kk]): - err.append({"class": self._update_kr_vr[k][v][1], "zsubclass": stablehash(u"%s=%s" % (kk,tags[kk])), "text": T_f(u"tag value: {0}={1} => {2} (rule ks_vr)", kk, tags[kk], self._update_kr_vr[k][v][0])}) + err.append({"class": self._update_kr_vr[k][v][1], "zsubclass": stablehash(u"%s|%s" % (kk, k)), "text": T_f(u"tag value: {0}={1} => {2} (rule ks_vr)", kk, tags[kk], self._update_kr_vr[k][v][0])}) return err def way(self, data, tags, nds):