mirror of
https://github.com/protomaps/PMTiles.git
synced 2026-02-04 10:51:07 +00:00
reformat with black
This commit is contained in:
@@ -1,3 +1,3 @@
|
|||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
|
|
||||||
Entry = namedtuple('Entry',['z','x','y','offset','length','is_dir'])
|
Entry = namedtuple("Entry", ["z", "x", "y", "offset", "length", "is_dir"])
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
# pmtiles to files
|
# pmtiles to files
|
||||||
import gzip
|
import gzip
|
||||||
import json
|
import json
|
||||||
@@ -10,41 +9,46 @@ from pmtiles.writer import write
|
|||||||
# if the tile is GZIP-encoded, it won't work with range queries
|
# if the tile is GZIP-encoded, it won't work with range queries
|
||||||
# until transfer-encoding: gzip is well supported.
|
# until transfer-encoding: gzip is well supported.
|
||||||
def force_compress(data, compress):
|
def force_compress(data, compress):
|
||||||
if compress and data[0:2] != b'\x1f\x8b':
|
if compress and data[0:2] != b"\x1f\x8b":
|
||||||
return gzip.compress(data)
|
return gzip.compress(data)
|
||||||
if not compress and data[0:2] == b'\x1f\x8b':
|
if not compress and data[0:2] == b"\x1f\x8b":
|
||||||
return gzip.decompress(data)
|
return gzip.decompress(data)
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
def set_metadata_compression(metadata, gzip):
|
def set_metadata_compression(metadata, gzip):
|
||||||
if gzip:
|
if gzip:
|
||||||
metadata['compression'] = 'gzip'
|
metadata["compression"] = "gzip"
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
del metadata['compression']
|
del metadata["compression"]
|
||||||
except:
|
except:
|
||||||
pass
|
pass
|
||||||
return metadata
|
return metadata
|
||||||
|
|
||||||
|
|
||||||
def mbtiles_to_pmtiles(input, output, maxzoom, gzip):
|
def mbtiles_to_pmtiles(input, output, maxzoom, gzip):
|
||||||
conn = sqlite3.connect(input)
|
conn = sqlite3.connect(input)
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
|
|
||||||
with write(output) as writer:
|
with write(output) as writer:
|
||||||
for row in cursor.execute('SELECT zoom_level,tile_column,tile_row,tile_data FROM tiles WHERE zoom_level <= ? ORDER BY zoom_level,tile_column,tile_row ASC',(maxzoom or 99,)):
|
for row in cursor.execute(
|
||||||
|
"SELECT zoom_level,tile_column,tile_row,tile_data FROM tiles WHERE zoom_level <= ? ORDER BY zoom_level,tile_column,tile_row ASC",
|
||||||
|
(maxzoom or 99,),
|
||||||
|
):
|
||||||
flipped = (1 << row[0]) - 1 - row[2]
|
flipped = (1 << row[0]) - 1 - row[2]
|
||||||
writer.write_tile(row[0], row[1], flipped, force_compress(row[3], gzip))
|
writer.write_tile(row[0], row[1], flipped, force_compress(row[3], gzip))
|
||||||
|
|
||||||
metadata = {}
|
metadata = {}
|
||||||
for row in cursor.execute('SELECT name,value FROM metadata'):
|
for row in cursor.execute("SELECT name,value FROM metadata"):
|
||||||
metadata[row[0]] = row[1]
|
metadata[row[0]] = row[1]
|
||||||
if maxzoom:
|
if maxzoom:
|
||||||
metadata['maxzoom'] = str(maxzoom)
|
metadata["maxzoom"] = str(maxzoom)
|
||||||
metadata = set_metadata_compression(metadata, gzip)
|
metadata = set_metadata_compression(metadata, gzip)
|
||||||
result = writer.finalize(metadata)
|
result = writer.finalize(metadata)
|
||||||
print("Num tiles:",result['num_tiles'])
|
print("Num tiles:", result["num_tiles"])
|
||||||
print("Num unique tiles:",result['num_unique_tiles'])
|
print("Num unique tiles:", result["num_unique_tiles"])
|
||||||
print("Num leaves:",result['num_leaves'])
|
print("Num leaves:", result["num_leaves"])
|
||||||
|
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
@@ -52,34 +56,42 @@ def mbtiles_to_pmtiles(input, output, maxzoom, gzip):
|
|||||||
def pmtiles_to_mbtiles(input, output, gzip):
|
def pmtiles_to_mbtiles(input, output, gzip):
|
||||||
conn = sqlite3.connect(output)
|
conn = sqlite3.connect(output)
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
cursor.execute('CREATE TABLE metadata (name text, value text);')
|
cursor.execute("CREATE TABLE metadata (name text, value text);")
|
||||||
cursor.execute('CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob);')
|
cursor.execute(
|
||||||
|
"CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob);"
|
||||||
|
)
|
||||||
|
|
||||||
with read(input) as reader:
|
with read(input) as reader:
|
||||||
metadata = reader.metadata
|
metadata = reader.metadata
|
||||||
metadata = set_metadata_compression(metadata, gzip)
|
metadata = set_metadata_compression(metadata, gzip)
|
||||||
for k, v in metadata.items():
|
for k, v in metadata.items():
|
||||||
cursor.execute('INSERT INTO metadata VALUES(?,?)',(k,v))
|
cursor.execute("INSERT INTO metadata VALUES(?,?)", (k, v))
|
||||||
for tile, data in reader.tiles():
|
for tile, data in reader.tiles():
|
||||||
flipped = (1 << tile[0]) - 1 - tile[2]
|
flipped = (1 << tile[0]) - 1 - tile[2]
|
||||||
cursor.execute('INSERT INTO tiles VALUES(?,?,?,?)',(tile[0],tile[1],flipped,force_compress(data,gzip)))
|
cursor.execute(
|
||||||
|
"INSERT INTO tiles VALUES(?,?,?,?)",
|
||||||
|
(tile[0], tile[1], flipped, force_compress(data, gzip)),
|
||||||
|
)
|
||||||
|
|
||||||
cursor.execute('CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row);')
|
cursor.execute(
|
||||||
|
"CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row);"
|
||||||
|
)
|
||||||
conn.commit()
|
conn.commit()
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
def pmtiles_to_dir(input, output, gzip):
|
def pmtiles_to_dir(input, output, gzip):
|
||||||
os.makedirs(output)
|
os.makedirs(output)
|
||||||
|
|
||||||
with read(input) as reader:
|
with read(input) as reader:
|
||||||
metadata = reader.metadata
|
metadata = reader.metadata
|
||||||
metadata = set_metadata_compression(metadata, gzip)
|
metadata = set_metadata_compression(metadata, gzip)
|
||||||
with open(os.path.join(output,'metadata.json'),'w') as f:
|
with open(os.path.join(output, "metadata.json"), "w") as f:
|
||||||
f.write(json.dumps(metadata))
|
f.write(json.dumps(metadata))
|
||||||
|
|
||||||
for tile, data in reader.tiles():
|
for tile, data in reader.tiles():
|
||||||
directory = os.path.join(output, str(tile[0]), str(tile[1]))
|
directory = os.path.join(output, str(tile[0]), str(tile[1]))
|
||||||
path = os.path.join(directory,str(tile[2]) + '.' + metadata['format'])
|
path = os.path.join(directory, str(tile[2]) + "." + metadata["format"])
|
||||||
os.makedirs(directory, exist_ok=True)
|
os.makedirs(directory, exist_ok=True)
|
||||||
with open(path,'wb') as f:
|
with open(path, "wb") as f:
|
||||||
f.write(force_compress(data, gzip))
|
f.write(force_compress(data, gzip))
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import json
|
|||||||
import mmap
|
import mmap
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def read(fname):
|
def read(fname):
|
||||||
r = Reader(fname)
|
r = Reader(fname)
|
||||||
@@ -10,24 +11,27 @@ def read(fname):
|
|||||||
finally:
|
finally:
|
||||||
r.close()
|
r.close()
|
||||||
|
|
||||||
|
|
||||||
class Reader:
|
class Reader:
|
||||||
def __init__(self, fname):
|
def __init__(self, fname):
|
||||||
self.f = open(fname, "r+b")
|
self.f = open(fname, "r+b")
|
||||||
self.mmap = mmap.mmap(self.f.fileno(), 0)
|
self.mmap = mmap.mmap(self.f.fileno(), 0)
|
||||||
assert int.from_bytes(self.mmap[0:2],byteorder='little') == 0x4D50
|
assert int.from_bytes(self.mmap[0:2], byteorder="little") == 0x4D50
|
||||||
first_entry_idx = 10 + self.metadata_len
|
first_entry_idx = 10 + self.metadata_len
|
||||||
self.root_dir, self.leaves = self.load_directory(first_entry_idx,self.root_entries)
|
self.root_dir, self.leaves = self.load_directory(
|
||||||
|
first_entry_idx, self.root_entries
|
||||||
|
)
|
||||||
|
|
||||||
def load_directory(self, offset, num_entries):
|
def load_directory(self, offset, num_entries):
|
||||||
directory = {}
|
directory = {}
|
||||||
leaves = {}
|
leaves = {}
|
||||||
for i in range(offset, offset + num_entries * 17, 17):
|
for i in range(offset, offset + num_entries * 17, 17):
|
||||||
z = int.from_bytes(self.mmap[i:i+1],byteorder='little')
|
z = int.from_bytes(self.mmap[i : i + 1], byteorder="little")
|
||||||
x = int.from_bytes(self.mmap[i+1:i+4],byteorder='little')
|
x = int.from_bytes(self.mmap[i + 1 : i + 4], byteorder="little")
|
||||||
y = int.from_bytes(self.mmap[i+4:i+7],byteorder='little')
|
y = int.from_bytes(self.mmap[i + 4 : i + 7], byteorder="little")
|
||||||
tile_off = int.from_bytes(self.mmap[i+7:i+13],byteorder='little')
|
tile_off = int.from_bytes(self.mmap[i + 7 : i + 13], byteorder="little")
|
||||||
tile_len = int.from_bytes(self.mmap[i+13:i+17],byteorder='little')
|
tile_len = int.from_bytes(self.mmap[i + 13 : i + 17], byteorder="little")
|
||||||
if (z & 0b10000000):
|
if z & 0b10000000:
|
||||||
leaves[(z & 0b01111111, x, y)] = (tile_off, tile_len)
|
leaves[(z & 0b01111111, x, y)] = (tile_off, tile_len)
|
||||||
else:
|
else:
|
||||||
directory[(z, x, y)] = (tile_off, tile_len)
|
directory[(z, x, y)] = (tile_off, tile_len)
|
||||||
@@ -38,7 +42,7 @@ class Reader:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata_len(self):
|
def metadata_len(self):
|
||||||
return int.from_bytes(self.mmap[4:8],byteorder='little')
|
return int.from_bytes(self.mmap[4:8], byteorder="little")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def metadata(self):
|
def metadata(self):
|
||||||
@@ -47,11 +51,11 @@ class Reader:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self):
|
def version(self):
|
||||||
return int.from_bytes(self.mmap[2:4],byteorder='little')
|
return int.from_bytes(self.mmap[2:4], byteorder="little")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def root_entries(self):
|
def root_entries(self):
|
||||||
return int.from_bytes(self.mmap[8:10],byteorder='little')
|
return int.from_bytes(self.mmap[8:10], byteorder="little")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def leaf_level(self):
|
def leaf_level(self):
|
||||||
@@ -79,4 +83,3 @@ class Reader:
|
|||||||
leaf_dir, _ = self.load_directory(val[0], val[1] // 17)
|
leaf_dir, _ = self.load_directory(val[0], val[1] // 17)
|
||||||
for k, v in leaf_dir.items():
|
for k, v in leaf_dir.items():
|
||||||
yield (k, self.mmap[v[0] : v[0] + v[1]])
|
yield (k, self.mmap[v[0] : v[0] + v[1]])
|
||||||
|
|
||||||
|
|||||||
@@ -3,14 +3,17 @@ import json
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from pmtiles import Entry
|
from pmtiles import Entry
|
||||||
|
|
||||||
|
|
||||||
def entrysort(t):
|
def entrysort(t):
|
||||||
return (t.z, t.x, t.y)
|
return (t.z, t.x, t.y)
|
||||||
|
|
||||||
|
|
||||||
# Find best base zoom to avoid extra indirection for as many tiles as we can
|
# Find best base zoom to avoid extra indirection for as many tiles as we can
|
||||||
# precondition: entries is sorted, only tile entries, len(entries) > max_dir_size
|
# precondition: entries is sorted, only tile entries, len(entries) > max_dir_size
|
||||||
def find_leaf_level(entries, max_dir_size):
|
def find_leaf_level(entries, max_dir_size):
|
||||||
return entries[max_dir_size].z - 1
|
return entries[max_dir_size].z - 1
|
||||||
|
|
||||||
|
|
||||||
def make_pyramid(tile_entries, start_leaf_offset, max_dir_size=21845):
|
def make_pyramid(tile_entries, start_leaf_offset, max_dir_size=21845):
|
||||||
sorted_entries = sorted(tile_entries, key=entrysort)
|
sorted_entries = sorted(tile_entries, key=entrysort)
|
||||||
if len(sorted_entries) <= max_dir_size:
|
if len(sorted_entries) <= max_dir_size:
|
||||||
@@ -48,7 +51,11 @@ def make_pyramid(tile_entries,start_leaf_offset,max_dir_size=21845):
|
|||||||
# flush the current packed entries
|
# flush the current packed entries
|
||||||
|
|
||||||
for p in packed_roots:
|
for p in packed_roots:
|
||||||
root_entries.append(Entry(p[0],p[1],p[2],current_offset,17 * len(packed_entries),True))
|
root_entries.append(
|
||||||
|
Entry(
|
||||||
|
p[0], p[1], p[2], current_offset, 17 * len(packed_entries), True
|
||||||
|
)
|
||||||
|
)
|
||||||
# re-sort the packed_entries by ZXY order
|
# re-sort the packed_entries by ZXY order
|
||||||
packed_entries.sort(key=entrysort)
|
packed_entries.sort(key=entrysort)
|
||||||
leaf_dirs.append(packed_entries)
|
leaf_dirs.append(packed_entries)
|
||||||
@@ -61,13 +68,16 @@ def make_pyramid(tile_entries,start_leaf_offset,max_dir_size=21845):
|
|||||||
if len(packed_entries):
|
if len(packed_entries):
|
||||||
|
|
||||||
for p in packed_roots:
|
for p in packed_roots:
|
||||||
root_entries.append(Entry(p[0],p[1],p[2],current_offset,17 * len(packed_entries),True))
|
root_entries.append(
|
||||||
|
Entry(p[0], p[1], p[2], current_offset, 17 * len(packed_entries), True)
|
||||||
|
)
|
||||||
# re-sort the packed_entries by ZXY order
|
# re-sort the packed_entries by ZXY order
|
||||||
packed_entries.sort(key=entrysort)
|
packed_entries.sort(key=entrysort)
|
||||||
leaf_dirs.append(packed_entries)
|
leaf_dirs.append(packed_entries)
|
||||||
|
|
||||||
return (root_entries, leaf_dirs)
|
return (root_entries, leaf_dirs)
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def write(fname):
|
def write(fname):
|
||||||
w = Writer(fname)
|
w = Writer(fname)
|
||||||
@@ -76,18 +86,21 @@ def write(fname):
|
|||||||
finally:
|
finally:
|
||||||
w.close()
|
w.close()
|
||||||
|
|
||||||
|
|
||||||
class Writer:
|
class Writer:
|
||||||
def __init__(self, fname):
|
def __init__(self, fname):
|
||||||
self.f = open(fname,'wb')
|
self.f = open(fname, "wb")
|
||||||
self.offset = 512000
|
self.offset = 512000
|
||||||
self.f.write(b'\0' * self.offset)
|
self.f.write(b"\0" * self.offset)
|
||||||
self.tile_entries = []
|
self.tile_entries = []
|
||||||
self.hash_to_offset = {}
|
self.hash_to_offset = {}
|
||||||
|
|
||||||
def write_tile(self, z, x, y, data):
|
def write_tile(self, z, x, y, data):
|
||||||
hsh = hash(data)
|
hsh = hash(data)
|
||||||
if hsh in self.hash_to_offset:
|
if hsh in self.hash_to_offset:
|
||||||
self.tile_entries.append(Entry(z,x,y,self.hash_to_offset[hsh],len(data),False))
|
self.tile_entries.append(
|
||||||
|
Entry(z, x, y, self.hash_to_offset[hsh], len(data), False)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
self.f.write(data)
|
self.f.write(data)
|
||||||
self.tile_entries.append(Entry(z, x, y, self.offset, len(data), False))
|
self.tile_entries.append(Entry(z, x, y, self.offset, len(data), False))
|
||||||
@@ -99,21 +112,21 @@ class Writer:
|
|||||||
z_bytes = 0b10000000 | entry.z
|
z_bytes = 0b10000000 | entry.z
|
||||||
else:
|
else:
|
||||||
z_bytes = entry.z
|
z_bytes = entry.z
|
||||||
self.f.write(z_bytes.to_bytes(1,byteorder='little'))
|
self.f.write(z_bytes.to_bytes(1, byteorder="little"))
|
||||||
self.f.write(entry.x.to_bytes(3,byteorder='little'))
|
self.f.write(entry.x.to_bytes(3, byteorder="little"))
|
||||||
self.f.write(entry.y.to_bytes(3,byteorder='little'))
|
self.f.write(entry.y.to_bytes(3, byteorder="little"))
|
||||||
self.f.write(entry.offset.to_bytes(6,byteorder='little'))
|
self.f.write(entry.offset.to_bytes(6, byteorder="little"))
|
||||||
self.f.write(entry.length.to_bytes(4,byteorder='little'))
|
self.f.write(entry.length.to_bytes(4, byteorder="little"))
|
||||||
|
|
||||||
def write_header(self, metadata, root_entries_len):
|
def write_header(self, metadata, root_entries_len):
|
||||||
self.f.write((0x4D50).to_bytes(2,byteorder='little'))
|
self.f.write((0x4D50).to_bytes(2, byteorder="little"))
|
||||||
self.f.write((2).to_bytes(2,byteorder='little'))
|
self.f.write((2).to_bytes(2, byteorder="little"))
|
||||||
metadata_serialized = json.dumps(metadata)
|
metadata_serialized = json.dumps(metadata)
|
||||||
# 512000 - (17 * 21845) - 2 (magic) - 2 (version) - 4 (jsonlen) - 2 (dictentries) = 140625
|
# 512000 - (17 * 21845) - 2 (magic) - 2 (version) - 4 (jsonlen) - 2 (dictentries) = 140625
|
||||||
assert len(metadata_serialized) < 140625
|
assert len(metadata_serialized) < 140625
|
||||||
self.f.write(len(metadata_serialized).to_bytes(4,byteorder='little'))
|
self.f.write(len(metadata_serialized).to_bytes(4, byteorder="little"))
|
||||||
self.f.write(root_entries_len.to_bytes(2,byteorder='little'))
|
self.f.write(root_entries_len.to_bytes(2, byteorder="little"))
|
||||||
self.f.write(metadata_serialized.encode('utf-8'))
|
self.f.write(metadata_serialized.encode("utf-8"))
|
||||||
|
|
||||||
def finalize(self, metadata={}):
|
def finalize(self, metadata={}):
|
||||||
root_dir, leaf_dirs = make_pyramid(self.tile_entries, self.offset)
|
root_dir, leaf_dirs = make_pyramid(self.tile_entries, self.offset)
|
||||||
@@ -129,7 +142,11 @@ class Writer:
|
|||||||
for entry in root_dir:
|
for entry in root_dir:
|
||||||
self.write_entry(entry)
|
self.write_entry(entry)
|
||||||
|
|
||||||
return {'num_tiles':len(self.tile_entries),'num_unique_tiles':len(self.hash_to_offset),'num_leaves':len(leaf_dirs)}
|
return {
|
||||||
|
"num_tiles": len(self.tile_entries),
|
||||||
|
"num_unique_tiles": len(self.hash_to_offset),
|
||||||
|
"num_leaves": len(leaf_dirs),
|
||||||
|
}
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.f.close()
|
self.f.close()
|
||||||
|
|||||||
@@ -19,6 +19,6 @@ setuptools.setup(
|
|||||||
"License :: OSI Approved :: BSD License",
|
"License :: OSI Approved :: BSD License",
|
||||||
"Operating System :: OS Independent",
|
"Operating System :: OS Independent",
|
||||||
],
|
],
|
||||||
scripts=['bin/pmtiles-convert','bin/pmtiles-serve','bin/pmtiles-show'],
|
scripts=["bin/pmtiles-convert", "bin/pmtiles-serve", "bin/pmtiles-show"],
|
||||||
requires_python='>=3.0'
|
requires_python=">=3.0",
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import unittest
|
|||||||
from pmtiles import Entry
|
from pmtiles import Entry
|
||||||
from pmtiles.writer import find_leaf_level, make_pyramid
|
from pmtiles.writer import find_leaf_level, make_pyramid
|
||||||
|
|
||||||
|
|
||||||
class TestTilePyramid(unittest.TestCase):
|
class TestTilePyramid(unittest.TestCase):
|
||||||
def test_root_sorted(self):
|
def test_root_sorted(self):
|
||||||
entries = [
|
entries = [
|
||||||
@@ -9,7 +10,7 @@ class TestTilePyramid(unittest.TestCase):
|
|||||||
Entry(1, 0, 1, 2, 1, False),
|
Entry(1, 0, 1, 2, 1, False),
|
||||||
Entry(1, 1, 0, 3, 1, False),
|
Entry(1, 1, 0, 3, 1, False),
|
||||||
Entry(1, 1, 1, 4, 1, False),
|
Entry(1, 1, 1, 4, 1, False),
|
||||||
Entry(0,0,0,0,1,False)
|
Entry(0, 0, 0, 0, 1, False),
|
||||||
]
|
]
|
||||||
root_entries, leaf_dirs = make_pyramid(entries, 0, 6)
|
root_entries, leaf_dirs = make_pyramid(entries, 0, 6)
|
||||||
self.assertEqual(len(root_entries), 5)
|
self.assertEqual(len(root_entries), 5)
|
||||||
@@ -27,7 +28,7 @@ class TestTilePyramid(unittest.TestCase):
|
|||||||
Entry(2, 0, 0, 5, 1, False),
|
Entry(2, 0, 0, 5, 1, False),
|
||||||
Entry(3, 0, 0, 6, 1, False),
|
Entry(3, 0, 0, 6, 1, False),
|
||||||
Entry(2, 0, 1, 7, 1, False),
|
Entry(2, 0, 1, 7, 1, False),
|
||||||
Entry(3,0,2,8,1,False)
|
Entry(3, 0, 2, 8, 1, False),
|
||||||
]
|
]
|
||||||
root_entries, leaf_dirs = make_pyramid(entries, 0, 7)
|
root_entries, leaf_dirs = make_pyramid(entries, 0, 7)
|
||||||
self.assertEqual(len(root_entries), 7)
|
self.assertEqual(len(root_entries), 7)
|
||||||
@@ -40,7 +41,6 @@ class TestTilePyramid(unittest.TestCase):
|
|||||||
self.assertEqual(leaf_dirs[0][2].z, 3)
|
self.assertEqual(leaf_dirs[0][2].z, 3)
|
||||||
self.assertEqual(leaf_dirs[0][3].z, 3)
|
self.assertEqual(leaf_dirs[0][3].z, 3)
|
||||||
|
|
||||||
|
|
||||||
def test_leafdir_overflow(self):
|
def test_leafdir_overflow(self):
|
||||||
entries = [
|
entries = [
|
||||||
Entry(0, 0, 0, 0, 1, False),
|
Entry(0, 0, 0, 0, 1, False),
|
||||||
@@ -57,7 +57,7 @@ class TestTilePyramid(unittest.TestCase):
|
|||||||
Entry(3, 0, 2, 11, 1, False),
|
Entry(3, 0, 2, 11, 1, False),
|
||||||
Entry(3, 0, 3, 12, 1, False),
|
Entry(3, 0, 3, 12, 1, False),
|
||||||
Entry(3, 1, 2, 13, 1, False),
|
Entry(3, 1, 2, 13, 1, False),
|
||||||
Entry(3,1,3,14,1,False)
|
Entry(3, 1, 3, 14, 1, False),
|
||||||
]
|
]
|
||||||
root_entries, leaf_dirs = make_pyramid(entries, 0, 7)
|
root_entries, leaf_dirs = make_pyramid(entries, 0, 7)
|
||||||
self.assertEqual(len(root_entries), 7)
|
self.assertEqual(len(root_entries), 7)
|
||||||
@@ -74,7 +74,7 @@ class TestTilePyramid(unittest.TestCase):
|
|||||||
Entry(2, 0, 0, 5, 1, False),
|
Entry(2, 0, 0, 5, 1, False),
|
||||||
Entry(3, 0, 0, 6, 1, False),
|
Entry(3, 0, 0, 6, 1, False),
|
||||||
# Entry(2,0,1,7,1,False), make this entry missing
|
# Entry(2,0,1,7,1,False), make this entry missing
|
||||||
Entry(3,0,2,8,1,False)
|
Entry(3, 0, 2, 8, 1, False),
|
||||||
]
|
]
|
||||||
root_entries, leaf_dirs = make_pyramid(entries, 0, 7)
|
root_entries, leaf_dirs = make_pyramid(entries, 0, 7)
|
||||||
self.assertEqual(len(root_entries), 7)
|
self.assertEqual(len(root_entries), 7)
|
||||||
|
|||||||
Reference in New Issue
Block a user