Merge pull request #206 from protomaps/python-writer-improvements

python writer improvements
This commit is contained in:
Brandon Liu
2023-06-23 11:39:51 +08:00
committed by GitHub
5 changed files with 76 additions and 41 deletions

View File

@@ -274,12 +274,16 @@ def serialize_header(h):
write_uint8(h["tile_type"].value)
write_uint8(h["min_zoom"])
write_uint8(h["max_zoom"])
write_int32(h["min_lon_e7"])
write_int32(h["min_lat_e7"])
write_int32(h["max_lon_e7"])
write_int32(h["max_lat_e7"])
write_uint8(h["center_zoom"])
write_int32(h["center_lon_e7"])
write_int32(h["center_lat_e7"])
min_lon_e7 = h.get("min_lon_e7",-180)
write_int32(min_lon_e7)
min_lat_e7 = h.get("min_lat_e7",-90)
write_int32(min_lat_e7)
max_lon_e7 = h.get("max_lon_e7",180)
write_int32(max_lon_e7)
max_lat_e7 = h.get("max_lat_e7",90)
write_int32(max_lat_e7)
write_uint8(h.get("center_zoom",h["min_zoom"]))
write_int32(h.get("center_lon_e7", round((min_lon_e7 + max_lon_e7) / 2)))
write_int32(h.get("center_lat_e7", round((min_lat_e7 + max_lat_e7) / 2)))
return b_io.getvalue()

View File

@@ -3,7 +3,13 @@ import tempfile
import gzip
import shutil
from contextlib import contextmanager
from .tile import Entry, serialize_directory, Compression, serialize_header
from .tile import (
Entry,
serialize_directory,
Compression,
serialize_header,
tileid_to_zxy,
)
@contextmanager
@@ -55,9 +61,12 @@ class Writer:
self.tile_f = tempfile.TemporaryFile()
self.offset = 0
self.addressed_tiles = 0
self.clustered = True
# TODO enforce ordered writes
def write_tile(self, tileid, data):
if len(self.tile_entries) > 0 and tileid < self.tile_entries[-1].tile_id:
self.clustered = False
hsh = hash(data)
if hsh in self.hash_to_offset:
last = self.tile_entries[-1]
@@ -83,6 +92,11 @@ class Writer:
header["tile_entries_count"] = len(self.tile_entries)
header["tile_contents_count"] = len(self.hash_to_offset)
self.tile_entries = sorted(self.tile_entries, key=lambda e: e.tile_id)
header["min_zoom"] = tileid_to_zxy(self.tile_entries[0].tile_id)[0]
header["max_zoom"] = tileid_to_zxy(self.tile_entries[-1].tile_id)[0]
root_bytes, leaves_bytes, num_leaves = optimize_directories(
self.tile_entries, 16384 - 127
)
@@ -105,7 +119,7 @@ class Writer:
)
compressed_metadata = gzip.compress(json.dumps(metadata).encode())
header["clustered"] = True
header["clustered"] = self.clustered
header["internal_compression"] = Compression.GZIP
header["root_offset"] = 127
header["root_length"] = len(root_bytes)

View File

@@ -1,30 +0,0 @@
import unittest
from io import BytesIO
from pmtiles.writer import Writer
from pmtiles.reader import Reader, MemorySource
class TestReader(unittest.TestCase):
def test_roundtrip(self):
buf = BytesIO()
# writer = Writer(buf, 5)
# writer.write_tile(1, 0, 0, b"0")
# writer.write_tile(1, 0, 1, b"1")
# writer.write_tile(1, 1, 0, b"2")
# writer.write_tile(2, 0, 0, b"4")
# writer.write_tile(3, 0, 0, b"5")
# writer.write_tile(2, 0, 1, b"6")
# writer.write_tile(3, 0, 2, b"7")
# writer.finalize({"key": "value"})
# reader = Reader(MemorySource(buf.getvalue()))
# self.assertEqual(reader.header().version, 2)
# self.assertEqual(reader.header().metadata["key"], "value")
# self.assertEqual(reader.get(1, 0, 0), b"0")
# self.assertEqual(reader.get(1, 0, 1), b"1")
# self.assertEqual(reader.get(1, 1, 0), b"2")
# self.assertEqual(reader.get(2, 0, 0), b"4")
# self.assertEqual(reader.get(3, 0, 0), b"5")
# self.assertEqual(reader.get(2, 0, 1), b"6")
# self.assertEqual(reader.get(3, 0, 2), b"7")
# self.assertEqual(reader.get(1, 1, 1), None)

View File

@@ -0,0 +1,48 @@
import unittest
from io import BytesIO
from pmtiles.writer import Writer
from pmtiles.reader import Reader, MemorySource
from pmtiles.tile import Compression, TileType, tileid_to_zxy, zxy_to_tileid
class TestReaderWriter(unittest.TestCase):
def test_roundtrip(self):
buf = BytesIO()
writer = Writer(buf)
writer.write_tile(zxy_to_tileid(0, 0, 0), b"1")
writer.write_tile(zxy_to_tileid(1, 0, 0), b"2")
writer.write_tile(zxy_to_tileid(2, 0, 0), b"3")
writer.finalize(
{
"tile_compression": Compression.UNKNOWN,
"tile_type": TileType.UNKNOWN,
},
{"key": "value"},
)
reader = Reader(MemorySource(buf.getvalue()))
self.assertEqual(reader.header()["version"], 3)
self.assertEqual(reader.header()["min_zoom"], 0)
self.assertEqual(reader.header()["max_zoom"], 2)
self.assertEqual(reader.header()["clustered"], True)
self.assertEqual(reader.metadata()["key"], "value")
self.assertEqual(reader.get(0, 0, 0), b"1")
self.assertEqual(reader.get(1, 0, 0), b"2")
self.assertEqual(reader.get(2, 0, 0), b"3")
self.assertEqual(reader.get(3, 0, 0), None)
def test_roundtrip_unclustered(self):
buf = BytesIO()
writer = Writer(buf)
writer.write_tile(zxy_to_tileid(1, 0, 0), b"2")
writer.write_tile(zxy_to_tileid(0, 0, 0), b"1")
writer.finalize(
{
"tile_compression": Compression.UNKNOWN,
"tile_type": TileType.UNKNOWN,
},
{},
)
reader = Reader(MemorySource(buf.getvalue()))
self.assertEqual(reader.header()["clustered"], False)

View File

@@ -1 +0,0 @@
import unittest