diff --git a/tools/natural-earth-vector-tiling/level_info.txt b/tools/natural-earth-vector-tiling/level_info.txt new file mode 100644 index 000000000..31e3023c1 --- /dev/null +++ b/tools/natural-earth-vector-tiling/level_info.txt @@ -0,0 +1,28 @@ +*1 +ne_110m_land +ne_110m_admin_0_boundary_lines_land +*3 +ne_110m_land +ne_110m_admin_0_boundary_lines_land +*5 +ne_50m_land +ne_50m_admin_0_boundary_lines_land +ne_50m_admin_1_states_provinces_lines +*7 +ne_10m_land +ne_10m_admin_0_boundary_lines_land +ne_10m_admin_1_states_provinces +ne_10m_roads +ne_10m_railroads +ne_10m_rivers_lake_centerlines +ne_10m_lakes +ne_10m_lakes_north_america +ne_10m_lakes_europe +ne_10m_playas +ne_10m_antarctic_ice_shelves_lines +ne_10m_antarctic_ice_shelves_polys + + + + + diff --git a/tools/natural-earth-vector-tiling/ne_tilegenerator.py b/tools/natural-earth-vector-tiling/ne_tilegenerator.py new file mode 100644 index 000000000..df011db07 --- /dev/null +++ b/tools/natural-earth-vector-tiling/ne_tilegenerator.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +# +# This file is part of the Marble Virtual Globe. +# +# This program is free software licensed under the GNU LGPL. You can +# find a copy of this license in LICENSE.txt in the top directory of +# the source code. +# +# Copyright 2016 Akshat Tandon +# +""" +Generates low level tiles for Marble using Natural Earth data +""" + +import sys +import os +import math +import argparse +import urllib3 +import zipfile +sys.path.append('../shp2osm') +sys.path.append('../vectortilecreator') +import polyshp2osm +import vectortilecreator + +def unzip_file(filename, in_dir): + print(in_dir) + path_zip = os.path.join(in_dir, filename + '.zip') + with zipfile.ZipFile(path_zip ,"r") as zip_ref: + path_dir = os.path.join(in_dir, filename) + os.mkdir(path_dir) + zip_ref.extractall(path_dir) + os.remove(path_zip) + +def generate_url(filename): + url = 'www.naturalearthdata.com/http//www.naturalearthdata.com/download/' + cultural_tokens = ['admin', 'populated', 'roads', 'railroads', 'airports', 'ports', 'urban', 'parks', 'time', 'cultural'] + file_tokens = filename.split('_') + url += file_tokens[1] + '/' + data_type = 'physical' + for token in file_tokens: + if token in cultural_tokens: + data_type = 'cultural' + break + url += data_type + '/' + url += filename + '.zip' + return url + +def download(filename, in_dir): + url = generate_url(filename) + print('Url', url) + http = urllib3.PoolManager() + r = http.request('GET', url, preload_content=False) + chunk_size = 8192 + file_size_dl = 0 + fileSize = int(r.getheader("content-length")) + + with open(os.path.join(in_dir, filename + '.zip'), 'wb') as out: + while True: + data = r.read(chunk_size) + if data is None or len(data) == 0: + break + file_size_dl += len(data) + out.write(data) + print ("Downloading %s: %.1f/%.1f Mb (%3.1f%%)\r" % (filename, file_size_dl / 1024.0 / 1024.0, fileSize / 1024.0 / 1024.0, file_size_dl * 100. / fileSize), end='') + r.release_conn() + out.close() + print ("Done") + +def parse_file(filename, in_dir): + level_info = {} + path = os.path.join(in_dir, filename) + print('ak Path', path) + f = open(path, 'rU') + for line in f: + line = line.rstrip() + if len(line) == 0: + continue + if line[0] == '*': + level = int(line[1:]) + level_info[level] = [] + else: + level_info[level].append(line) + return level_info + +def check_existence(filename, in_dir): + path = os.path.join(in_dir, filename) + if not os.path.exists(path): + download(filename, in_dir) + unzip_file(filename, in_dir) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Generates low level tiles for Marble using Natural Earth data') + parser.add_argument('file', help='a file with information containing natural datasets for specific levels.') + parser.add_argument('-i', '--in_dir', help='directory to read/process data from', default='.') + parser.add_argument('-o', '--out_dir', help='directory to write tiles to', default='.') + parser.add_argument('-c', '--cache', help='directory to store intermediate files in', default='.') + parser.add_argument('-r', '--refresh', type=int, default=-1, help='Re-download cached OSM base file if it is older than REFRESH days (-1: do not re-download)') + parser.add_argument('-ow', '--overwrite', action='store_true', help='Create tiles even if they exist already') + args = parser.parse_args() + + exception_names = ['ne_50m_admin_1_states_provinces_lines'] + + level_info = parse_file(args.file, args.in_dir) + print("Level Info:", level_info) + for level in level_info: + abs_file_paths = [] + for filename in level_info[level]: + print('Checking - {}'.format(filename)) + if filename not in exception_names: + check_existence(filename, args.in_dir) + path = os.path.join(args.in_dir, filename) + '/' + filename + '.shp' + abs_file_paths.append(path) + else: + print('Download this data manually - {}'.format(filename)) + print('Level has following SHP datasets: ', abs_file_paths) + polyshp2osm.run(abs_file_paths, 1, 5000000, 'tiny_planet_{}'.format(level)) + print('Tiny planetosm for Level = {} complete.'.format(level)) + f = open('bound_info_{}'.format(level), "w") + print('tiny_planet_{}.1.osm;Level;-180.0;-86.0;180.0;86.0'.format(level), file=f) + f.close() + vectortilecreator.run(['bound_info_{}'.format(level)], args.cache, args.refresh, args.out_dir, args.overwrite, [level]) + + + + + + + + diff --git a/tools/shp2osm/polyshp2osm.py b/tools/shp2osm/polyshp2osm.py index aac5041ed..90e7bb75f 100644 --- a/tools/shp2osm/polyshp2osm.py +++ b/tools/shp2osm/polyshp2osm.py @@ -1,771 +1,784 @@ #!/usr/bin/python3 """ This script is designed to act as assistance in converting shapefiles to OpenStreetMap data. This file is optimized and tested with MassGIS shapefiles, converted to EPSG:4326 before being passed to the script. You can perform this conversion with ogr2ogr -t_srs EPSG:4326 new_file.shp old_file.shp It is expected that you will modify the fixed_tags, tag_mapping, and boring_tags attributes of this script before running. You should read, or at least skim, the code up until it says: DO NOT CHANGE AFTER THIS LINE. to accomodate your own data. """ __author__ = "Christopher Schmidt " __version__ = "$Id$" gdal_install = """ Installing GDAL depends on your platform. Information is available at: http://trac.osgeo.org/gdal/wiki/DownloadingGdalBinaries For Debian-based systems: apt-get install python-gdal will usually suffice. """ # These tags are attached to all exterior ways. You can put any key/value pairs # in this dictionary. fixed_tags = { 'source': 'Natural Earth (http://www.naturalearthdata.com/)', 'created_by': 'polyshp2osm' } feat_dict = {} node_dict = {} non_geom = 0 eflag = False nodes = [] #(id, lon, lat, tags) ways = [] #(id, node_refs, tags) relations = [] #(id, ways) non_polygons = ['Admin-1 aggregation', 'Admin-1 minor island', 'Admin-1 scale rank'] # Here are a number of functions: These functions define tag mappings. The API # For these functions is that they are passed the attributes from a feature, # and they return a list of two-tuples which match to key/value pairs. def access(data): """Access restrictions.""" keys = { 'Y': 'yes', 'N': 'private', 'L': 'restricted' } if 'pub_access' in data: if data['pub_access'] in keys: return [('access', keys[data['pub_access']])] return None def protection(data): keys = { 'P': 'perpetuity', 'T': 'temporary', 'L': 'limited', } if 'lev_prot' in data: if data['lev_prot'] in keys: return [('protected', keys[data['lev_prot']])] return None def owner_type(data): """See wiki:Key:ownership""" keys = { 'F': 'national', 'S': 'state', 'C': 'county', 'M': 'municipal', 'N': 'private_nonprofit', 'P': 'private', 'B': 'public_nonprofit', 'L': 'land_trust', 'G': 'conservation_rganization', 'I': 'inholding', } if 'owner_type' in data: if data['owner_type'] in keys: return [['ownership', keys[data['owner_type']]]] def purpose(data): """Based on a discussion on IRC""" keys = { 'R': [('leisure', 'recreation_ground')], 'C': [('leisure', 'nature_reserve'), ('landuse', 'conservation')], 'B': [('landuse','conservation'), ('leisure','recreation_ground')], 'H': [('historical', 'yes')], 'A': [('agricultural', 'yes'), ('landuse','farm')], 'W': [('landuse', 'resevoir')], 'S': [('scenic','yes')], 'F': [('landuse','land')], 'Q': [('landuse','conservation')], 'U': [('water','yes')] } if 'prim_purp' in data: if data['prim_purp'] in keys: return keys[data['prim_purp']] def road_map(data): keys = { #'Ferry Route': [('route','ferry')], 'Major Highway': [('highway','motorway')], 'Beltway': [('highway','primary')], #'Track': [('highway','tertiary')], #'Unknown': [('highway','unclassified')], #'Secondary Highway': [('highway','trunk')], #'Bypass': [('highway','secondary')], 'Road': [('highway','primary')] } if 'type' in data: if data['type'] in keys: return keys[data['type']] def city_map(data): population = 0 capital = 'no' country = '' if data['featurecla'] == 'Admin-0 capital' or data['featurecla'] == 'Admin-1 capital' or data['featurecla'] == 'Admin-0 region capital' or data['featurecla'] == 'Admin-1 region capital': capital = 'yes' if 'pop_max' in data: population = data['pop_max'] if 'adm0name' in data: country = data['adm0name'] temp = [('is_in:country', country), ('capital', capital), ('population', population), ('place', 'city') ] return temp def airport_map(data): global id_counter, file_counter, counter, file_name, open_file, namespace wid = id_counter - 1 print("" % wid, end = '\n', file = open_file) print(" " % i, end = '\n', file = open_file) if geom_name == 'POLYGON': print(" " % ids[0], end = '\n', file = open_file) write_tags(f) print("" , end = '\n', file = open_file) def mountain_map(data): elevation = 0 if 'elevation' in data: elevation = data['elevation'] temp = [('natural', 'peak'), ('ele', elevation)] return temp def feature_class(data): global non_fcla_dict keys = { 'Lake': [('natural', 'water')], 'Alkaline Lake': [('natural', 'water')], 'Reservoir': [('natural', 'water')], 'Road': [(road_map,None)], 'Ferry': [('route','ferry')], 'River': [('waterway', 'river')], 'Coastline': [('natural', 'coastline')], 'Minor coastline': [('natural', 'coastline')], 'Ocean': [('natural', 'water')], 'Land': [('marble_land', 'landmass')], 'Minor island': [('marble_land', 'landmass'), ('place', 'island')], 'Reefs': [('natural', 'reef')], 'Admin-0 country': [('marble_land', 'landmass')], 'Admin-0 sovereignty': [('marble_land', 'landmass')], 'Admin-0 map unit': [('marble_land', 'landmass')], 'Adm-0 scale ranks': [('marble_land', 'landmass')], 'International boundary (verify)': [('boundary', 'administrative'), ('admin_level', '2')], 'Overlay limit': [('boundary', 'administrative'), ('admin_level', '2')], 'Disputed (please verify)': [('boundary', 'administrative'), ('admin_level', '2')], 'Line of control (please verify)': [('boundary', 'administrative'), ('admin_level', '2')], 'Indefinite (please verify)': [('boundary', 'administrative'), ('admin_level', '2')], 'Lease limit': [('boundary', 'administrative'), ('admin_level', '2')], 'Indeterminant frontier': [('boundary', 'administrative'), ('admin_level', '2')], 'Admin-0 lease': [('marble_land', 'landmass')], 'Admin-0 claim area': [('marble_land', 'landmass')], 'Admin-0 breakaway and disputed': [('marble_land', 'landmass')], 'Admin-0 overlay': [('marble_land', 'landmass')], 'Admin-0 indeterminant': [('marble_land', 'landmass')], 'Admin-1 aggregation': [('boundary', 'administrative'), ('admin_level', '4')], 'Admin-1 minor island': [('boundary', 'administrative'), ('admin_level', '4')], 'Admin-1 scale rank': [('boundary', 'administrative'), ('admin_level', '4')], 'Railroad': [('railway', 'rail')], 'Railroad ferry': [('route', 'ferry')], 'Urban area': [('marble_land', 'landmass')], 'Timezone': [('marble_land', 'landmass')], 'Historic place': [(city_map,None)], 'Populated place': [(city_map,None)], 'Scientific station': [(city_map,None)], 'Meteorological Station': [(city_map,None)], 'Admin-0 capital': [(city_map,None)], 'Admin-1 capital': [(city_map,None)], 'Admin-0 region capital': [(city_map,None)], 'Admin-1 region capital': [(city_map,None)], 'Admin-0 capital alt': [(city_map,None)], 'Lake Centerline': [('natural', 'water')], 'Port': [('harbour', 'yes')], 'Island': [('marble_land', 'landmass'), ('place', 'island')], 'Island group': [('marble_land', 'landmass'), ('place', 'island')], 'Wetlands': [('natural', 'wetland')], 'Basin': [('landuse', 'basin')], 'Desert': [('natural', 'desert')], #no tag support in marble, no clue as to how to render 'Depression': [('natural', 'sinkhole')], #no tag support in marble, no clue as to how to render 'Range/mtn': [('marble_land', 'landmass')], # no clue as to how to render 'Geoarea': [('marble_land', 'landmass')], # no clue as to how to render 'Plain' : [('marble_land', 'landmass')], # no clue as to how to render 'Tundra': [('natural', 'tundra')], #no tag support in marble, no clue as to how to render 'Foothills' : [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, no clue as to how to render 'Lowland' : [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, no clue as to how to render 'Dragons-be-here': [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, no clue as to how to render 'Delta': [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, no clue as to how to render 'Peninsula': [('marble_land', 'landmass')], 'Plateau' : [('marble_land', 'landmass')], 'Pen/cape' : [('natural', 'cape')], #osm tag still in proposal stage, no clue as to how to render 'Gorge': [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, no clue as to how to render 'Coast': [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, no clue as to how to render 'Continent': [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, redundant data 'Isthmus': [('marble_land', 'landmass')], #no tag support in OSM, nothing in Marble, no clue as to how to render 'Valley': [('natural', 'valley')], #no tag support in marble 'waterfall' : [('waterway', 'waterfall')], #no tag support in marble 'cape': [('natural', 'cape')], #osm tag still in proposal stage, no clue as to how to render 'NONE' 'pole': [], 'plain': [], 'island group': [], 'island': [], 'mountain': [(mountain_map,None)], #this tag has to be mapped to Mountain feature of Marble 'spot elevation': [], 'plateau': [], 'depression': [], 'pass': [], 'sound': [('natural', 'water')], 'river': [('natural', 'water')], 'generic': [('natural', 'water')], #still have to confirm what generic means exactly in marine_polygons? 'lagoon': [('natural', 'water'), ('water', 'lagoon')], 'reef': [('natural', 'reef')], 'gulf': [('natural', 'water')], 'inlet': [('natural', 'water')], 'strait': [('natural', 'water')], 'bay': [('natural', 'water')], 'fjord': [('natural', 'water')], 'sea': [('natural', 'water')], 'ocean': [('natural', 'water')], 'channel': [('natural', 'water')], 'Playa': [('natural', 'water')], 'Antarctic Ice Shelf': [(' glacier:type','shelf'), ('natural', 'glacier')], #Marble does not support the glacier:type tag 'Antarctic Ice Shelf Edge': [('glacier:edge', 'calving_line')], #marble does not support this osm tag 'Glaciated areas': [('natural', 'glacier')], 'Admin-0 Tiny Countries': [], 'Admin-0 Tiny GeoUnit': [], 'Admin-0 Tiny GeoSubunit': [], 'Admin-0 Tiny Countries Pacific': [], 'Pacific Groupings': [], 'Admin-1 boundary': [('boundary', 'administrative'), ('admin_level', '4')], 'Map unit boundary':[], 'Marine Indicator Treaty':[], 'Marine Indicator Median':[], 'Boundary Treaty':[], 'Marine Indicator 200 mi nl':[], 'Marine Indicator Disputed':[], 'Claim boundary': [('boundary', 'administrative'), ('admin_level', '2')], 'Reference line': [('boundary', 'administrative'), ('admin_level', '2')], 'Breakaway': [('boundary', 'administrative'), ('admin_level', '2')], 'Elusive frontier': [('boundary', 'administrative'), ('admin_level', '2')], 'Country': [('marble_land', 'landmass')], '1st Order Admin Lines': [('boundary', 'administrative'), ('admin_level', '4')] #aeroway" v="aerodrome" } if 'featurecla' in data: if data['featurecla'] in feat_dict: feat_dict[data['featurecla']] += 1 else: feat_dict[data['featurecla']] = 1 if data['featurecla'] in keys: if len(keys[data['featurecla']]) == 0: return keys[data['featurecla']] if hasattr(keys[data['featurecla']][0][0], '__call__'): return keys[data['featurecla']][0][0](data) else: return keys[data['featurecla']] else: if data['featurecla'] in non_fcla_dict: non_fcla_dict[data['featurecla']] += 1 else: non_fcla_dict[data['featurecla']] = 1 def name_map(data): if 'name' in data: return [('name', data['name'])] def name_tags(data): """This function returns two things: a 'pretty' name to use, and may return a landuse of either 'cemetery' or 'forest' if the name contains those words; based on evaluation the dataset in question.""" tags = [] name = data.get('site_name', None) if not name: return name = name.title() if "cemetery" in name.lower(): tags.append(['landuse', 'cemetery']) elif "forest" in name.lower(): tags.append(['landuse', 'forest']) tags.append(['name', name]) return tags def cal_date(data): """Return YYYY-MM-DD or YYYY formatted dates, based on (m)m/(d)d/yyyy dates""" date = data.get('cal_date_r', None) if not date: return try: m, d, y = map(int, date.split("/")) if m == 1 and d == 1: return [['start_date', '%4i' % y]] return [['start_date', '%04i-%02i-%02i' % (y, m, d)]] except: print("Invalid date: %s" % date) eflag = True return None # The most important part of the code: define a set of key/value pairs # to iterate over to generate keys. This is a list of two-tuples: first # is a 'key', which is only used if the second value is a string. In # that case, it is a map of lowercased fielnames to OSM tag names: so # fee_owner maps to 'owner' in the OSM output. # if the latter is callable (has a __call__; is a function), then that # method is called, passing in a dict of feature attributes with # lowercased key names. Those functions can then return a list of # two-tuples to be used as tags, or nothin' to skip the tags. tag_mapping = [ ('fee_owner', 'owner'), ('cal_date', cal_date), ('pub_access', access), ('lev_prot', protection), ('owner_type', owner_type), ('prim_purp', purpose), ('site_name', name_tags), ('featurecla', feature_class), ('name', name_map) ] # These tags are not exported, even with the source data; this should be # used for tags which are usually calculated in a GIS. AREA and LEN are # common. boring_tags = [ 'AREA', 'LEN', 'GIS_ACRES'] # Namespace is used to prefix existing data attributes. If 'None', or # '--no-source' is set, then source attributes are not exported, only # attributes in tag_mapping. namespace = "natural_earth" #namespace = None # Uncomment the "DONT_RUN = False" line to get started. #DONT_RUN = True DONT_RUN = False # =========== DO NOT CHANGE AFTER THIS LINE. =========================== # Below here is regular code, part of the file. This is not designed to # be modified by users. # ====================================================================== import sys try: try: from osgeo import ogr except ImportError: import ogr eflag = True except ImportError: __doc__ += gdal_install if DONT_RUN: print(__doc__) sys.exit(2) print("OGR Python Bindings not installed.\n%s" % gdal_install) sys.exit(1) eflag = True def close_file(): """ Internal. Close an open file.""" global open_file if not open_file.closed: open_file.write("") open_file.close() def start_new_file(): """ Internal. Open a new file, closing existing file if neccesary.""" global open_file, file_counter, node_dict file_counter += 1 if open_file: close_file() open_file = open("%s.%s.osm" % (file_name, file_counter), "w") print("" , end = '\n', file = open_file) print("" , end = '\n', file = open_file) node_dict = {} def clean_attr(val): """Internal. Hacky way to make attribute XML safe.""" val = str(val) val = val.replace("&", "&").replace("'", """).replace("<", "<").replace(">", ">").strip() return val def add_point(f): """Adds a point geometry to the OSM file""" global id_counter pt = f.GetGeometryRef() node_id = add_node(id_counter, pt.GetX(0), pt.GetY(0), 'POINT', f) if node_id == id_counter: id_counter += 1 def add_relation_multipolygon(geom, f): """ Writes the multipolygon relation to the OSM file, returns 0 if no relation is formed""" global id_counter, file_counter, counter, file_name, open_file, namespace rel_ways = [] rel_id = 0 way_id = add_way(geom.GetGeometryRef(0), f, True) if way_id == None: print('Error in writing relation') return None rel_ways.append(way_id) if geom.GetGeometryCount() > 1: for i in range(1, geom.GetGeometryCount()): way_id = add_way(geom.GetGeometryRef(i), f, False) if way_id == None: print('Error in writing relation') return None rel_ways.append(way_id) rel_id = id_counter if f['featurecla'] in non_polygons: return 0 #means no relation is there relations.append((rel_id, rel_ways)) id_counter += 1 return rel_id #if rel_id return 0, means no relations is there def write_relation_multipolygon(relation): global open_file print("" % relation[0] , end = '\n', file = open_file) print('' % relation[1][0] , end = '\n', file = open_file) for way in relation[1][1:]: print('' % way , end = '\n', file = open_file) print("" , end = '\n', file = open_file) def write_tags(f): """Writes the tags associated with a way or a relation""" global id_counter, file_counter, counter, file_name, open_file, namespace field_count = f.GetFieldCount() fields = {} for field in range(field_count): value = f.GetFieldAsString(field) name = f.GetFieldDefnRef(field).GetName() if namespace and name and value and name not in boring_tags: print(" " % (namespace, name, clean_attr(value)) , end = '\n', file = open_file) fields[name.lower()] = value tags={} for tag_name, map_value in tag_mapping: if hasattr(map_value, '__call__'): tag_values = map_value(fields) if tag_values: for tag in tag_values: tags[tag[0]] = tag[1] else: if tag_name in fields: tags[map_value] = fields[tag_name].title() for key, value in tags.items(): if key and value: print(" " % (key, clean_attr(value)) , end = '\n', file = open_file) for name, value in fixed_tags.items(): print(" " % (name, clean_attr(value)) , end = '\n', file = open_file) if f.GetGeometryRef().GetGeometryName() == 'POLYGON' or f.GetGeometryRef().GetGeometryName() == 'MULTIPOLYGON': if f['featurecla'] not in non_polygons: print(" " , end = '\n', file = open_file) def add_way(geom, f, tag_flag): """ Writes the way of a particular geometry to the OSM file""" global open_file, id_counter, ways ids = add_way_nodes(geom, f) if len(ids) == 0: print('Error in writing way') return None way_id = id_counter id_counter += 1 node_refs = ids if tag_flag: tags = f else: tags = None ways.append((way_id, node_refs, tags)) return way_id def write_way(way): global open_file print("" % way[0] , end = '\n', file = open_file) for i in way[1]: print(" " % i , end = '\n', file = open_file) if way[2]: write_tags(way[2]) print("", end = '\n', file = open_file) def add_way_nodes(geom, f): """Writes the nodes of a particular way""" global open_file, id_counter ids = [] geom_name = geom.GetGeometryName() pt_count = geom.GetPointCount() if geom_name == 'LINESTRING': range_count = range(geom.GetPointCount()) else: range_count = range(geom.GetPointCount() - 1) if range_count == 0 or pt_count == 0: print( "Degenerate ", geom_name , end = '\n', file = sys.stderr) return #if geom_name != 'LINESTRING': # pt_count -= 1 for count in range(pt_count): node_id = add_node(id_counter, geom.GetX(count), geom.GetY(count), geom_name, f) if node_id == id_counter: #means a new node is created, if not means node already exists id_counter += 1 ids.append(node_id) return ids def add_node(num_id, lon, lat, geom_name, f): """ Writes the node to the OSM file""" global open_file, node_dict key = (lon, lat) if geom_name == 'POINT': nodes.append((num_id, lon, lat, f)) node_dict[key] = num_id else: if key in node_dict: num_id = node_dict[key] else: nodes.append((num_id, lon, lat, None)) node_dict[key] = num_id return num_id def write_node(node): global open_file if node[3] == None: print("" % (node[0], node[1], node[2]), end = '\n', file = open_file) else: print("" % (node[0], node[1], node[2]), end = '\n', file = open_file) write_tags(node[3]) print("", end = '\n', file = open_file) def add_way_around_node(f): """ Writes a way around a single point""" global id_counter, open_file nid = id_counter - 1 print("" % id_counter, end = '\n', file = open_file) id_counter += 1 print(" " % nid, end = '\n', file = open_file) tag_dict = {} tag_dict['name'] = f['name'] if f['featurecla'] == 'Airport': tag_dict['aeroway'] = 'aerodrome' for key in tag_dict: print(" " % (key, clean_attr(tag_dict[key])), end = '\n', file = open_file) print("", end = '\n', file = open_file) open_file = None file_name = None id_counter = 1 file_counter = 0 counter = 0 geom_counter = {} class AppError(Exception): pass def run(filenames, slice_count=1, obj_count=5000000, output_location=None, no_source=False): """Run the converter. Requires open_file, file_name, id_counter, file_counter, counter to be defined in global space; not really a very good singleton.""" - global id_counter, file_counter, counter, file_name, open_file, namespace, non_geom, non_fcla_dict + global id_counter, file_counter, counter, file_name, open_file, namespace, non_geom, non_fcla_dict, nodes, ways, relations + open_file = None + + file_name = None + + id_counter = 1 + + file_counter = 0 + counter = 0 + + geom_counter = {} if output_location: file_name = output_location start_new_file() for filename in filenames: non_geom = 0 non_fcla_dict = {} if no_source: namespace=None ds = ogr.Open(filename) if not ds: raise AppError("OGR Could not open the file %s" % filename) eflag = True l = ds.GetLayer(0) max_objs_per_file = obj_count extent = l.GetExtent() #if extent[0] < -180 or extent[0] > 180 or extent[2] < -90 or extent[2] > 90: # raise AppError("Extent does not look like degrees; are you sure it is? \n(%s, %s, %s, %s)" % (extent[0], extent[2], extent[1], extent[3])) slice_width = (extent[1] - extent[0]) / slice_count seen = {} print("Running %s slices with %s base filename against shapefile %s" % ( slice_count, file_name, filename)) for i in range(slice_count): l.ResetReading() l.SetSpatialFilterRect(extent[0] + slice_width * i, extent[2], extent[0] + (slice_width * (i + 1)), extent[3]) #start_new_file() f = l.GetNextFeature() obj_counter = 0 last_obj_split = 0 while f: start_id_counter = id_counter if f.GetFID() in seen: f = l.GetNextFeature() continue seen[f.GetFID()] = True if (obj_counter - last_obj_split) > max_objs_per_file: print("Splitting file with %s objs" % (obj_counter - last_obj_split)) start_new_file() last_obj_split = obj_counter feat_dict = f.items() geom = f.GetGeometryRef() geom_name = geom.GetGeometryName() if geom_name in geom_counter: geom_counter[geom_name] += 1 else: geom_counter[geom_name] = 1 if geom_name == 'POLYGON': rel_id = add_relation_multipolygon(geom, f) if rel_id == None: f = l.GetNextFeature() continue elif geom_name == 'LINESTRING': way_id = add_way(geom, f, True) if way_id == None: f = l.GetNextFeature() continue elif geom_name == 'MULTILINESTRING': for i in range(geom.GetGeometryCount()): way_id = add_way(geom.GetGeometryRef(i), f, True) if way_id == None: f = l.GetNextFeature() continue elif geom_name == 'MULTIPOLYGON': for i in range(geom.GetGeometryCount()): rel_id = add_relation_multipolygon(geom.GetGeometryRef(i), f) if rel_id == None: f = l.GetNextFeature() continue elif geom_name == 'POINT': add_point(f) if f['featurecla'] == 'Airport': add_way_around_node(f) else: ids = [] non_geom += 1 counter += 1 f = l.GetNextFeature() obj_counter += (id_counter - start_id_counter) for node in nodes: write_node(node) for way in ways: write_way(way) for relation in relations: write_relation_multipolygon(relation) close_file() + nodes = [] #(id, lon, lat, tags) + ways = [] #(id, node_refs, tags) + relations = [] #(id, ways) if __name__ == "__main__": if DONT_RUN: print(__doc__) sys.exit(2) from optparse import OptionParser parse = OptionParser(usage="%prog [args] filename.shp", version=__version__) parse.add_option("-s", "--slice-count", dest="slice_count", help="Number of horizontal slices of data", default=1, action="store", type="int") parse.add_option("-o", "--obj-count", dest="obj_count", help="Target Maximum number of objects in a single .osm file", default=5000000, type="int") parse.add_option("-n", "--no-source", dest="no_source", help="Do not store source attributes as tags.", action="store_true", default=False) parse.add_option("-l", "--output-location", dest="output_location", help="base filepath for output files.", default="poly_output") (options, args) = parse.parse_args() if not len(args): print("No shapefile name given!") parse.print_help() sys.exit(3) kw = {} for key in ('slice_count', 'obj_count', 'output_location', 'no_source'): kw[key] = getattr(options, key) try: run(args, **kw) except AppError as E: print("An error occurred: \n%s" % E) eflag = True print() print('Geometry types present: ') for key in geom_counter: print(key, geom_counter[key]) print() print('Feature type present: ') for key in feat_dict: print(key, feat_dict[key]) print() if eflag: print('Conversion not Successful :') else: if len(non_fcla_dict) == 0 and non_geom == 0: print('Conversion Successful') else: print('Conversion not Successful :') if len(non_fcla_dict) != 0: print('Unknown features present in SHP file: ', len(non_fcla_dict)) print() for key in non_fcla_dict: print(key, non_fcla_dict[key]) if non_geom != 0: print('Unknown geometry present in SHP file: ', non_geom) \ No newline at end of file diff --git a/tools/vectortilecreator/vectortilecreator.py b/tools/vectortilecreator/vectortilecreator.py index b7980d9b9..a5436124b 100755 --- a/tools/vectortilecreator/vectortilecreator.py +++ b/tools/vectortilecreator/vectortilecreator.py @@ -1,122 +1,126 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- # # This file is part of the Marble Virtual Globe. # # This program is free software licensed under the GNU LGPL. You can # find a copy of this license in LICENSE.txt in the top directory of # the source code. # # Copyright 2015 Dennis Nienhüser # """ Creates vector tiles (small .o5m files with standardized path/filenames) from larger osm (pbf) files """ import sys import os import math import csv import time from subprocess import call import argparse import urllib3 def deg2num(lat_deg, lon_deg, zoom): lat_rad = math.radians(lat_deg) n = 2.0 ** zoom xtile = int((lon_deg + 180.0) / 360.0 * n) ytile = int((1.0 - math.log(math.tan(lat_rad) + (1 / math.cos(lat_rad))) / math.pi) / 2.0 * n) return (xtile, ytile) def num2deg(xtile, ytile, zoom): n = 2.0 ** zoom lon_deg = xtile / n * 360.0 - 180.0 lat_rad = math.atan(math.sinh(math.pi * (1 - 2 * ytile / n))) lat_deg = math.degrees(lat_rad) return (lat_deg, lon_deg) def download(url, directory, refresh): filename = url.split('/')[-1] path = os.path.join(directory, filename) if os.path.exists(path): if refresh >= 0: aDay = 60 * 60 * 24 age = (time.time() - os.path.getmtime(path)) / aDay if age < refresh: return filename # else download again else: return filename - http = urllib3.PoolManager() r = http.request('GET', url, preload_content=False) chunk_size = 8192 file_size_dl = 0 fileSize = int(r.getheader("content-length")) with open(os.path.join(directory, filename), 'wb') as out: while True: data = r.read(chunk_size) if data is None or len(data) == 0: break file_size_dl += len(data) out.write(data) print ("Downloading %s: %.1f/%.1f Mb (%3.1f%%)\r" % (filename, file_size_dl / 1024.0 / 1024.0, fileSize / 1024.0 / 1024.0, file_size_dl * 100. / fileSize), end='') r.release_conn() out.close() print ("Done") return filename -if __name__ == "__main__": - parser = argparse.ArgumentParser(description='Create OSM Vector Tiles for Marble') - parser.add_argument('file', nargs='+', help='a file with semicolon separated lines in the form filename.osm.pbf;Area Name;west;south;east;north') - parser.add_argument('-o', '--overwrite', action='store_true', help='Create tiles even if they exist already') - parser.add_argument('-d', '--directory', help='directory to write tiles to', default='.') - parser.add_argument('-c', '--cache', help='directory to store intermediate files in', default='.') - parser.add_argument('-r', '--refresh', type=int, default=-1, help='Re-download cached OSM base file if it is older than REFRESH days (-1: do not re-download)') - parser.add_argument('-z', '--zoomLevels', type=int, nargs='+', help='zoom levels to generate', default=[13,15,17]) - args = parser.parse_args() - - for csvfilename in args.file: +def run(filenames, cache, refresh, directory, overwrite, zoomLevels): + for csvfilename in filenames: with open(csvfilename, 'r') as csvfile: reader = csv.reader(csvfile, delimiter=';', quotechar='|') for bounds in reader: - filename = download(bounds[0], args.cache, args.refresh) - for zoom in args.zoomLevels: + filename = download(bounds[0], cache, refresh) + for zoom in zoomLevels: bottomLeft = deg2num(float(bounds[3]), float(bounds[2]), zoom) topRight = deg2num(float(bounds[5]), float(bounds[4]), zoom) xDiff = topRight[0]-bottomLeft[0] yDiff = bottomLeft[1]-topRight[1] total = xDiff*yDiff count = 0 - cutted = "{}/{}.{}-{}-{}-{}.osm.o5m".format(args.cache, filename, bounds[2], bounds[3], bounds[4], bounds[5]) + cutted = "{}/{}.{}-{}-{}-{}.osm.o5m".format(cache, filename, bounds[2], bounds[3], bounds[4], bounds[5]) if not os.path.exists(cutted): print ("Creating cut out region {}".format(cutted)) - call(["osmconvert", "-t={}/osmconvert_tmp-".format(args.cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(bounds[2], bounds[3], bounds[4], bounds[5]), "-o={}".format(cutted), os.path.join(args.cache, filename)]) + call(["osmconvert", "-t={}/osmconvert_tmp-".format(cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(bounds[2], bounds[3], bounds[4], bounds[5]), "-o={}".format(cutted), os.path.join(cache, filename)]) for x in range(1+bottomLeft[0], topRight[0]+1): for y in range(1+topRight[1], bottomLeft[1]+1): count += 1 tl = num2deg(x-1, y-1, zoom) br = num2deg(x, y, zoom) - path = "{}/{}/{}".format(args.directory, zoom, x-1) + path = "{}/{}/{}".format(directory, zoom, x-1) target = "{}.o5m".format(y-1) filterTarget = "{}_tmp.o5m".format(y-1) - if not args.overwrite and os.path.exists(os.path.join(path, target)): + if not overwrite and os.path.exists(os.path.join(path, target)): print("Skipping existing file {}\r".format(os.path.join(path, target)), end='') else: call(["mkdir", "-p", path]) print ("{} level {}: {}/{} {}\r".format(bounds[1], zoom, count, total, os.path.join(path, target)), end='') filterLevel = "levels/{}.level".format(zoom) if os.path.exists(filterLevel): - call(["osmconvert", "-t={}/osmconvert_tmp-".format(args.cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, filterTarget))]) + call(["osmconvert", "-t={}/osmconvert_tmp-".format(cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, filterTarget))]) call(["osmfilter", "--parameter-file={}".format(filterLevel), os.path.join(path, filterTarget), "-o={}".format(os.path.join(path, target))]) os.remove(os.path.join(path, filterTarget)) else: - call(["osmconvert", "-t={}/osmconvert_tmp-".format(args.cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, target))]) + call(["osmconvert", "-t={}/osmconvert_tmp-".format(cache), "--complete-ways", "--complex-ways", "--drop-version", "-b={},{},{},{}".format(tl[1],br[0],br[1],tl[0]), cutted, "-o={}".format(os.path.join(path, target))]) call(["chmod", "644", os.path.join(path, target)]) + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description='Create OSM Vector Tiles for Marble') + parser.add_argument('file', nargs='+', help='a file with semicolon separated lines in the form filename.osm.pbf;Area Name;west;south;east;north') + parser.add_argument('-o', '--overwrite', action='store_true', help='Create tiles even if they exist already') + parser.add_argument('-d', '--directory', help='directory to write tiles to', default='.') + parser.add_argument('-c', '--cache', help='directory to store intermediate files in', default='.') + parser.add_argument('-r', '--refresh', type=int, default=-1, help='Re-download cached OSM base file if it is older than REFRESH days (-1: do not re-download)') + parser.add_argument('-z', '--zoomLevels', type=int, nargs='+', help='zoom levels to generate', default=[13,15,17]) + args = parser.parse_args() + run(args.file, args.cache, args.refresh, args.directory, args.overwrite) + + \ No newline at end of file