python: default to max dir depth 3, don't default compression in writer

This commit is contained in:
Brandon Liu
2022-10-16 12:33:39 +08:00
parent 991cdb302d
commit 819a0e6f2b
2 changed files with 1 additions and 4 deletions

View File

@@ -46,7 +46,7 @@ class Reader:
header = deserialize_header(self.get_bytes(0, 127)) header = deserialize_header(self.get_bytes(0, 127))
dir_offset = header["root_offset"] dir_offset = header["root_offset"]
dir_length = header["root_length"] dir_length = header["root_length"]
for depth in range(0, 3): # max depth for depth in range(0, 4): # max depth
directory = deserialize_directory(self.get_bytes(dir_offset, dir_length)) directory = deserialize_directory(self.get_bytes(dir_offset, dir_length))
result = find_tile(directory, tile_id) result = find_tile(directory, tile_id)
if result: if result:

View File

@@ -107,9 +107,6 @@ class Writer:
compressed_metadata = gzip.compress(json.dumps(metadata).encode()) compressed_metadata = gzip.compress(json.dumps(metadata).encode())
header["clustered"] = True header["clustered"] = True
header["internal_compression"] = Compression.GZIP header["internal_compression"] = Compression.GZIP
header[
"tile_compression"
] = Compression.GZIP # TODO: not necessarily true for non-vector
header["root_offset"] = 127 header["root_offset"] = 127
header["root_length"] = len(root_bytes) header["root_length"] = len(root_bytes)
header["metadata_offset"] = header["root_offset"] + header["root_length"] header["metadata_offset"] = header["root_offset"] + header["root_length"]