mirror of
https://github.com/protomaps/PMTiles.git
synced 2026-02-04 02:41:09 +00:00
implement pmtiles-to-mbtiles and pmtiles-to-dir conversions; don't delete redundant metadata keys (center, minzoom, etc)
This commit is contained in:
@@ -4,7 +4,7 @@ import json
|
||||
import os
|
||||
import sqlite3
|
||||
from pmtiles.writer import write
|
||||
from pmtiles.reader import Reader, MmapSource
|
||||
from pmtiles.reader import Reader, MmapSource, all_tiles
|
||||
from .tile import zxy_to_tileid, tileid_to_zxy, TileType, Compression
|
||||
|
||||
|
||||
@@ -12,23 +12,19 @@ def mbtiles_to_header_json(mbtiles_metadata):
|
||||
header = {}
|
||||
|
||||
header["min_zoom"] = int(mbtiles_metadata["minzoom"])
|
||||
del mbtiles_metadata["minzoom"]
|
||||
|
||||
header["max_zoom"] = int(mbtiles_metadata["maxzoom"])
|
||||
del mbtiles_metadata["maxzoom"]
|
||||
|
||||
bounds = mbtiles_metadata["bounds"].split(",")
|
||||
header["min_lon_e7"] = int(float(bounds[0]) * 10000000)
|
||||
header["min_lat_e7"] = int(float(bounds[1]) * 10000000)
|
||||
header["max_lon_e7"] = int(float(bounds[2]) * 10000000)
|
||||
header["max_lat_e7"] = int(float(bounds[3]) * 10000000)
|
||||
del mbtiles_metadata["bounds"]
|
||||
|
||||
center = mbtiles_metadata["center"].split(",")
|
||||
header["center_lon_e7"] = int(float(center[0]) * 10000000)
|
||||
header["center_lat_e7"] = int(float(center[1]) * 10000000)
|
||||
header["center_zoom"] = int(center[2])
|
||||
del mbtiles_metadata["center"]
|
||||
|
||||
tile_format = mbtiles_metadata["format"]
|
||||
if tile_format == "pbf":
|
||||
@@ -92,48 +88,72 @@ def mbtiles_to_pmtiles(input, output, maxzoom):
|
||||
|
||||
|
||||
def pmtiles_to_mbtiles(input, output):
|
||||
pass
|
||||
# conn = sqlite3.connect(output)
|
||||
# cursor = conn.cursor()
|
||||
# cursor.execute("CREATE TABLE metadata (name text, value text);")
|
||||
# cursor.execute(
|
||||
# "CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob);"
|
||||
# )
|
||||
conn = sqlite3.connect(output)
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("CREATE TABLE metadata (name text, value text);")
|
||||
cursor.execute(
|
||||
"CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob);"
|
||||
)
|
||||
|
||||
# with open(input, "r+b") as f:
|
||||
# source = MmapSource(f)
|
||||
# reader = Reader(source)
|
||||
# metadata = reader.header().metadata
|
||||
# for k, v in metadata.items():
|
||||
# cursor.execute("INSERT INTO metadata VALUES(?,?)", (k, v))
|
||||
# for tile, data in reader.tiles():
|
||||
# flipped = (1 << tile[0]) - 1 - tile[2]
|
||||
# cursor.execute(
|
||||
# "INSERT INTO tiles VALUES(?,?,?,?)",
|
||||
# (tile[0], tile[1], flipped, force_compress(data, gzip)),
|
||||
# )
|
||||
with open(input, "r+b") as f:
|
||||
source = MmapSource(f)
|
||||
|
||||
# cursor.execute(
|
||||
# "CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row);"
|
||||
# )
|
||||
# conn.commit()
|
||||
# conn.close()
|
||||
reader = Reader(source)
|
||||
header = reader.header()
|
||||
metadata = reader.metadata()
|
||||
|
||||
if "minzoom" not in metadata:
|
||||
metadata["minzoom"] = header["min_zoom"]
|
||||
|
||||
if "maxzoom" not in metadata:
|
||||
metadata["maxzoom"] = header["max_zoom"]
|
||||
|
||||
if "bounds" not in metadata:
|
||||
min_lon = header["min_lon_e7"] / 10000000
|
||||
min_lat = header["min_lat_e7"] / 10000000
|
||||
max_lon = header["max_lon_e7"] / 10000000
|
||||
max_lat = header["max_lat_e7"] / 10000000
|
||||
metadata["bounds"] = f"{min_lon},{min_lat},{max_lon},{max_lat}"
|
||||
|
||||
if "center" not in metadata:
|
||||
center_lon = header["center_lon_e7"] / 10000000
|
||||
center_lat = header["center_lat_e7"] / 10000000
|
||||
center_zoom = header["center_zoom"]
|
||||
metadata["center"] = f"{center_lon},{center_lat},{center_zoom}"
|
||||
|
||||
if "format" not in metadata:
|
||||
if header["tile_type"] == TileType.MVT:
|
||||
metadata["format"] = "pbf"
|
||||
|
||||
for k, v in metadata.items():
|
||||
cursor.execute("INSERT INTO metadata VALUES(?,?)", (k, v))
|
||||
|
||||
for zxy, tile_data in all_tiles(source):
|
||||
flipped_y = (1 << zxy[0]) - 1 - zxy[2]
|
||||
cursor.execute(
|
||||
"INSERT INTO tiles VALUES(?,?,?,?)",
|
||||
(zxy[0], zxy[1], flipped_y, tile_data),
|
||||
)
|
||||
|
||||
cursor.execute(
|
||||
"CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row);"
|
||||
)
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
def pmtiles_to_dir(input, output):
|
||||
pass
|
||||
# os.makedirs(output)
|
||||
os.makedirs(output)
|
||||
with open(input, "r+b") as f:
|
||||
source = MmapSource(f)
|
||||
|
||||
# with open(input, "r+b") as f:
|
||||
# source = MmapSource(f)
|
||||
# reader = Reader(source)
|
||||
# metadata = reader.header().metadata
|
||||
# with open(os.path.join(output, "metadata.json"), "w") as f:
|
||||
# f.write(json.dumps(metadata))
|
||||
reader = Reader(source)
|
||||
with open(os.path.join(output, "metadata.json"), "w") as f:
|
||||
f.write(json.dumps(reader.metadata()))
|
||||
|
||||
# for tile, data in reader.tiles():
|
||||
# directory = os.path.join(output, str(tile[0]), str(tile[1]))
|
||||
# path = os.path.join(directory, str(tile[2]) + "." + metadata["format"])
|
||||
# os.makedirs(directory, exist_ok=True)
|
||||
# with open(path, "wb") as f:
|
||||
# f.write(force_compress(data, gzip))
|
||||
for zxy, tile_data in all_tiles(source):
|
||||
directory = os.path.join(output, str(zxy[0]), str(zxy[1]))
|
||||
path = os.path.join(directory, str(zxy[2]) + "." + "mvt")
|
||||
os.makedirs(directory, exist_ok=True)
|
||||
with open(path, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
@@ -4,6 +4,7 @@ from .tile import (
|
||||
deserialize_header,
|
||||
deserialize_directory,
|
||||
zxy_to_tileid,
|
||||
tileid_to_zxy,
|
||||
find_tile,
|
||||
Compression,
|
||||
)
|
||||
@@ -56,3 +57,25 @@ class Reader:
|
||||
return self.get_bytes(
|
||||
header["tile_data_offset"] + result.offset, result.length
|
||||
)
|
||||
|
||||
|
||||
def traverse(get_bytes, header, dir_offset, dir_length):
|
||||
entries = deserialize_directory(get_bytes(dir_offset, dir_length))
|
||||
for entry in entries:
|
||||
if entry.run_length > 0:
|
||||
yield tileid_to_zxy(entry.tile_id), get_bytes(
|
||||
header["tile_data_offset"] + entry.offset, entry.length
|
||||
)
|
||||
else:
|
||||
for t in traverse(
|
||||
get_bytes,
|
||||
header,
|
||||
header["leaf_directory_offset"] + entry.offset,
|
||||
entry.length,
|
||||
):
|
||||
yield t
|
||||
|
||||
|
||||
def all_tiles(get_bytes):
|
||||
header = deserialize_header(get_bytes(0, 127))
|
||||
return traverse(get_bytes, header, header["root_offset"], header["root_length"])
|
||||
|
||||
@@ -131,3 +131,4 @@ class Writer:
|
||||
self.f.write(leaves_bytes)
|
||||
self.tile_f.seek(0)
|
||||
shutil.copyfileobj(self.tile_f, self.f)
|
||||
self.tile_f.close()
|
||||
|
||||
@@ -27,21 +27,34 @@ class TestConvert(unittest.TestCase):
|
||||
pass
|
||||
|
||||
def test_roundtrip(self):
|
||||
pass
|
||||
# with open("test_tmp.pmtiles", "wb") as f:
|
||||
# writer = Writer(f, 7)
|
||||
# writer.write_tile(1, 0, 0, b"0")
|
||||
# writer.write_tile(1, 0, 1, b"1")
|
||||
# writer.write_tile(1, 1, 0, b"2")
|
||||
# writer.write_tile(1, 1, 1, b"3")
|
||||
# writer.write_tile(2, 0, 0, b"4")
|
||||
# writer.write_tile(3, 0, 0, b"5")
|
||||
# writer.write_tile(2, 0, 1, b"6")
|
||||
# writer.write_tile(3, 0, 2, b"7")
|
||||
# writer.finalize({"key": "value"})
|
||||
with open("test_tmp.pmtiles", "wb") as f:
|
||||
writer = Writer(f)
|
||||
writer.write_tile(0, b"0")
|
||||
writer.write_tile(1, b"1")
|
||||
writer.write_tile(2, b"2")
|
||||
writer.write_tile(3, b"3")
|
||||
writer.write_tile(4, b"4")
|
||||
writer.write_tile(5, b"5")
|
||||
writer.write_tile(6, b"6")
|
||||
writer.write_tile(7, b"7")
|
||||
writer.finalize(
|
||||
{
|
||||
"tile_type": TileType.MVT,
|
||||
"min_zoom": 0,
|
||||
"max_zoom": 2,
|
||||
"min_lon_e7": 0,
|
||||
"max_lon_e7": 0,
|
||||
"min_lat_e7": 0,
|
||||
"max_lat_e7": 0,
|
||||
"center_zoom": 0,
|
||||
"center_lon_e7": 0,
|
||||
"center_lat_e7": 0,
|
||||
},
|
||||
{"": "value"},
|
||||
)
|
||||
|
||||
# pmtiles_to_mbtiles("test_tmp.pmtiles", "test_tmp.mbtiles", False)
|
||||
# mbtiles_to_pmtiles("test_tmp.mbtiles", "test_tmp_2.pmtiles", 3, False)
|
||||
pmtiles_to_mbtiles("test_tmp.pmtiles", "test_tmp.mbtiles")
|
||||
mbtiles_to_pmtiles("test_tmp.mbtiles", "test_tmp_2.pmtiles", 3)
|
||||
|
||||
def test_mbtiles_header(self):
|
||||
header, json_metadata = mbtiles_to_header_json(
|
||||
@@ -73,6 +86,3 @@ class TestConvert(unittest.TestCase):
|
||||
self.assertTrue("name" in json_metadata)
|
||||
self.assertTrue("format" in json_metadata)
|
||||
self.assertTrue("compression" in json_metadata)
|
||||
self.assertFalse("center" in json_metadata)
|
||||
self.assertFalse("bounds" in json_metadata)
|
||||
self.assertFalse("bounds" in json_metadata)
|
||||
|
||||
Reference in New Issue
Block a user