Fail gracefully when ETag mismatch happens more than once.

* Some HTTP servers will return 206 Partial Content with an ETag for the contents, not the entire resource.
* This change allows the client to continue with mismatched ETags if this case is detected.
This commit is contained in:
Brandon Liu
2022-10-27 15:32:13 +08:00
parent 763c0e8099
commit afcd31b511
2 changed files with 103 additions and 37 deletions

View File

@@ -385,10 +385,10 @@ function detectVersion(a: ArrayBuffer): number {
return 3; return 3;
} }
export class VersionMismatch extends Error {} export class EtagMismatch extends Error {}
export interface Cache { export interface Cache {
getHeader: (source: Source) => Promise<Header>; getHeader: (source: Source, current_etag?: string) => Promise<Header>;
getDirectory: ( getDirectory: (
source: Source, source: Source,
offset: number, offset: number,
@@ -401,12 +401,13 @@ export interface Cache {
length: number, length: number,
header: Header header: Header
) => Promise<ArrayBuffer>; ) => Promise<ArrayBuffer>;
invalidate: (source: Source) => void; invalidate: (source: Source, current_etag: string) => Promise<void>;
} }
async function getHeaderAndRoot( async function getHeaderAndRoot(
source: Source, source: Source,
prefetch: boolean prefetch: boolean,
current_etag?: string
): Promise<[Header, [string, number, Entry[] | ArrayBuffer]?]> { ): Promise<[Header, [string, number, Entry[] | ArrayBuffer]?]> {
const resp = await source.getBytes(0, 16384); const resp = await source.getBytes(0, 16384);
@@ -421,7 +422,17 @@ async function getHeaderAndRoot(
} }
const headerData = resp.data.slice(0, HEADER_SIZE_BYTES); const headerData = resp.data.slice(0, HEADER_SIZE_BYTES);
const header = bytesToHeader(headerData, resp.etag);
let resp_etag = resp.etag;
if (current_etag && resp.etag != current_etag) {
console.warn(
"ETag conflict detected; your HTTP server might not support content-based ETag headers. ETags disabled for " +
source.getKey()
);
resp_etag = undefined;
}
const header = bytesToHeader(headerData, resp_etag);
// optimistically set the root directory // optimistically set the root directory
// TODO check root bounds // TODO check root bounds
@@ -457,7 +468,7 @@ async function getDirectory(
const resp = await source.getBytes(offset, length); const resp = await source.getBytes(offset, length);
if (header.etag && header.etag !== resp.etag) { if (header.etag && header.etag !== resp.etag) {
throw new VersionMismatch("ETag mismatch: " + header.etag); throw new EtagMismatch(resp.etag);
} }
const data = tryDecompress(resp.data, header.internalCompression); const data = tryDecompress(resp.data, header.internalCompression);
@@ -490,7 +501,7 @@ export class ResolvedValueCache {
this.prefetch = prefetch; this.prefetch = prefetch;
} }
async getHeader(source: Source): Promise<Header> { async getHeader(source: Source, current_etag?: string): Promise<Header> {
const cacheKey = source.getKey(); const cacheKey = source.getKey();
if (this.cache.has(cacheKey)) { if (this.cache.has(cacheKey)) {
this.cache.get(cacheKey)!.lastUsed = this.counter++; this.cache.get(cacheKey)!.lastUsed = this.counter++;
@@ -498,7 +509,7 @@ export class ResolvedValueCache {
return data as Header; return data as Header;
} }
const res = await getHeaderAndRoot(source, this.prefetch); const res = await getHeaderAndRoot(source, this.prefetch, current_etag);
if (res[1]) { if (res[1]) {
this.cache.set(res[1][0], { this.cache.set(res[1][0], {
lastUsed: this.counter++, lastUsed: this.counter++,
@@ -559,7 +570,7 @@ export class ResolvedValueCache {
const resp = await source.getBytes(offset, length); const resp = await source.getBytes(offset, length);
if (header.etag && header.etag !== resp.etag) { if (header.etag && header.etag !== resp.etag) {
throw new VersionMismatch("ETag mismatch: " + header.etag); throw new EtagMismatch(header.etag);
} }
this.cache.set(cacheKey, { this.cache.set(cacheKey, {
lastUsed: this.counter++, lastUsed: this.counter++,
@@ -588,8 +599,9 @@ export class ResolvedValueCache {
} }
} }
invalidate(source: Source) { async invalidate(source: Source, current_etag: string) {
this.cache.delete(source.getKey()); this.cache.delete(source.getKey());
await this.getHeader(source, current_etag);
} }
} }
@@ -618,7 +630,7 @@ export class SharedPromiseCache {
this.prefetch = prefetch; this.prefetch = prefetch;
} }
async getHeader(source: Source): Promise<Header> { async getHeader(source: Source, current_etag?: string): Promise<Header> {
const cacheKey = source.getKey(); const cacheKey = source.getKey();
if (this.cache.has(cacheKey)) { if (this.cache.has(cacheKey)) {
this.cache.get(cacheKey)!.lastUsed = this.counter++; this.cache.get(cacheKey)!.lastUsed = this.counter++;
@@ -627,7 +639,7 @@ export class SharedPromiseCache {
} }
const p = new Promise<Header>((resolve, reject) => { const p = new Promise<Header>((resolve, reject) => {
getHeaderAndRoot(source, this.prefetch) getHeaderAndRoot(source, this.prefetch, current_etag)
.then((res) => { .then((res) => {
if (this.cache.has(cacheKey)) { if (this.cache.has(cacheKey)) {
this.cache.get(cacheKey)!.size = HEADER_SIZE_BYTES; this.cache.get(cacheKey)!.size = HEADER_SIZE_BYTES;
@@ -704,7 +716,7 @@ export class SharedPromiseCache {
.getBytes(offset, length) .getBytes(offset, length)
.then((resp) => { .then((resp) => {
if (header.etag && header.etag !== resp.etag) { if (header.etag && header.etag !== resp.etag) {
throw new VersionMismatch("ETag mismatch: " + header.etag); throw new EtagMismatch(resp.etag);
} }
resolve(resp.data); resolve(resp.data);
if (this.cache.has(cacheKey)) { if (this.cache.has(cacheKey)) {
@@ -740,8 +752,9 @@ export class SharedPromiseCache {
} }
} }
invalidate(source: Source) { async invalidate(source: Source, current_etag: string) {
this.cache.delete(source.getKey()); this.cache.delete(source.getKey());
await this.getHeader(source, current_etag);
} }
} }
@@ -817,7 +830,7 @@ export class PMTiles {
signal signal
); );
if (header.etag && header.etag !== resp.etag) { if (header.etag && header.etag !== resp.etag) {
throw new VersionMismatch("ETag mismatch: " + header.etag); throw new EtagMismatch(resp.etag);
} }
return { return {
data: tryDecompress(resp.data, header.tileCompression), data: tryDecompress(resp.data, header.tileCompression),
@@ -844,8 +857,8 @@ export class PMTiles {
try { try {
return await this.getZxyAttempt(z, x, y, signal); return await this.getZxyAttempt(z, x, y, signal);
} catch (e) { } catch (e) {
if (e instanceof VersionMismatch) { if (e instanceof EtagMismatch) {
this.cache.invalidate(this.source); this.cache.invalidate(this.source, e.name);
return await this.getZxyAttempt(z, x, y, signal); return await this.getZxyAttempt(z, x, y, signal);
} else { } else {
throw e; throw e;
@@ -861,7 +874,7 @@ export class PMTiles {
header.jsonMetadataLength header.jsonMetadataLength
); );
if (header.etag && header.etag !== resp.etag) { if (header.etag && header.etag !== resp.etag) {
throw new VersionMismatch("Etag mismatch: " + header.etag); throw new EtagMismatch(resp.etag);
} }
const decompressed = tryDecompress(resp.data, header.internalCompression); const decompressed = tryDecompress(resp.data, header.internalCompression);
const dec = new TextDecoder("utf-8"); const dec = new TextDecoder("utf-8");
@@ -872,8 +885,8 @@ export class PMTiles {
try { try {
return await this.getMetadataAttempt(); return await this.getMetadataAttempt();
} catch (e) { } catch (e) {
if (e instanceof VersionMismatch) { if (e instanceof EtagMismatch) {
this.cache.invalidate(this.source); this.cache.invalidate(this.source, e.name);
return await this.getMetadataAttempt(); return await this.getMetadataAttempt();
} else { } else {
throw e; throw e;

View File

@@ -15,7 +15,7 @@ import {
BufferPosition, BufferPosition,
Source, Source,
RangeResponse, RangeResponse,
VersionMismatch, EtagMismatch,
PMTiles, PMTiles,
} from "../index"; } from "../index";
@@ -72,7 +72,9 @@ test("tile search for missing entry", (assertion) => {
}); });
test("tile search for first entry == id", (assertion) => { test("tile search for first entry == id", (assertion) => {
const entries: Entry[] = [{ tileId: 100, offset: 1, length: 1, runLength: 1 }]; const entries: Entry[] = [
{ tileId: 100, offset: 1, length: 1, runLength: 1 },
];
const entry = findTile(entries, 100)!; const entry = findTile(entries, 100)!;
assertion.eq(entry.offset, 1); assertion.eq(entry.offset, 1);
assertion.eq(entry.length, 1); assertion.eq(entry.length, 1);
@@ -104,7 +106,9 @@ test("tile search with multiple tile entries", (assertion) => {
}); });
test("leaf search", (assertion) => { test("leaf search", (assertion) => {
const entries: Entry[] = [{ tileId: 100, offset: 1, length: 1, runLength: 0 }]; const entries: Entry[] = [
{ tileId: 100, offset: 1, length: 1, runLength: 0 },
];
const entry = findTile(entries, 150); const entry = findTile(entries, 150);
assertion.eq(entry!.offset, 1); assertion.eq(entry!.offset, 1);
assertion.eq(entry!.length, 1); assertion.eq(entry!.length, 1);
@@ -132,19 +136,19 @@ class TestNodeFileSource implements Source {
this.buffer = fs.readFileSync(path); this.buffer = fs.readFileSync(path);
} }
async getBytes( async getBytes(offset: number, length: number): Promise<RangeResponse> {
offset: number,
length: number
): Promise<RangeResponse> {
const slice = new Uint8Array(this.buffer.slice(offset, offset + length)) const slice = new Uint8Array(this.buffer.slice(offset, offset + length))
.buffer; .buffer;
return {data:slice, etag:this.etag}; return { data: slice, etag: this.etag };
} }
} }
// echo '{"type":"Polygon","coordinates":[[[0,0],[0,1],[1,1],[1,0],[0,0]]]}' | ./tippecanoe -zg -o test_fixture_1.pmtiles // echo '{"type":"Polygon","coordinates":[[[0,0],[0,1],[1,1],[1,0],[0,0]]]}' | ./tippecanoe -zg -o test_fixture_1.pmtiles
test("cache getHeader", async (assertion) => { test("cache getHeader", async (assertion) => {
const source = new TestNodeFileSource("test/data/test_fixture_1.pmtiles", "1"); const source = new TestNodeFileSource(
"test/data/test_fixture_1.pmtiles",
"1"
);
const cache = new SharedPromiseCache(); const cache = new SharedPromiseCache();
const header = await cache.getHeader(source); const header = await cache.getHeader(source);
assertion.eq(header.rootDirectoryOffset, 127); assertion.eq(header.rootDirectoryOffset, 127);
@@ -194,7 +198,10 @@ test("cache check magic number", async (assertion) => {
}); });
test("cache getDirectory", async (assertion) => { test("cache getDirectory", async (assertion) => {
const source = new TestNodeFileSource("test/data/test_fixture_1.pmtiles", "1"); const source = new TestNodeFileSource(
"test/data/test_fixture_1.pmtiles",
"1"
);
let cache = new SharedPromiseCache(6400, false); let cache = new SharedPromiseCache(6400, false);
let header = await cache.getHeader(source); let header = await cache.getHeader(source);
@@ -226,8 +233,14 @@ test("cache getDirectory", async (assertion) => {
test("multiple sources in a single cache", async (assertion) => { test("multiple sources in a single cache", async (assertion) => {
const cache = new SharedPromiseCache(); const cache = new SharedPromiseCache();
const source1 = new TestNodeFileSource("test/data/test_fixture_1.pmtiles", "1"); const source1 = new TestNodeFileSource(
const source2 = new TestNodeFileSource("test/data/test_fixture_1.pmtiles", "2"); "test/data/test_fixture_1.pmtiles",
"1"
);
const source2 = new TestNodeFileSource(
"test/data/test_fixture_1.pmtiles",
"2"
);
await cache.getHeader(source1); await cache.getHeader(source1);
assertion.eq(cache.cache.size, 2); assertion.eq(cache.cache.size, 2);
await cache.getHeader(source2); await cache.getHeader(source2);
@@ -236,7 +249,10 @@ test("multiple sources in a single cache", async (assertion) => {
test("etags are part of key", async (assertion) => { test("etags are part of key", async (assertion) => {
const cache = new SharedPromiseCache(6400, false); const cache = new SharedPromiseCache(6400, false);
const source = new TestNodeFileSource("test/data/test_fixture_1.pmtiles", "1"); const source = new TestNodeFileSource(
"test/data/test_fixture_1.pmtiles",
"1"
);
source.etag = "etag_1"; source.etag = "etag_1";
let header = await cache.getHeader(source); let header = await cache.getHeader(source);
assertion.eq(header.etag, "etag_1"); assertion.eq(header.etag, "etag_1");
@@ -252,9 +268,9 @@ test("etags are part of key", async (assertion) => {
); );
assertion.fail("Should have thrown"); assertion.fail("Should have thrown");
} catch (e) { } catch (e) {
assertion.ok(e instanceof VersionMismatch); assertion.ok(e instanceof EtagMismatch);
} }
cache.invalidate(source); cache.invalidate(source, "etag_2");
header = await cache.getHeader(source); header = await cache.getHeader(source);
assertion.ok( assertion.ok(
await cache.getDirectory( await cache.getDirectory(
@@ -266,6 +282,37 @@ test("etags are part of key", async (assertion) => {
); );
}); });
test("soft failure on etag weirdness", async (assertion) => {
const cache = new SharedPromiseCache(6400, false);
const source = new TestNodeFileSource(
"test/data/test_fixture_1.pmtiles",
"1"
);
source.etag = "etag_1";
let header = await cache.getHeader(source);
assertion.eq(header.etag, "etag_1");
source.etag = "etag_2";
try {
await cache.getDirectory(
source,
header.rootDirectoryOffset,
header.rootDirectoryLength,
header
);
assertion.fail("Should have thrown");
} catch (e) {
assertion.ok(e instanceof EtagMismatch);
}
source.etag = "etag_1";
cache.invalidate(source, "etag_2");
header = await cache.getHeader(source);
assertion.eq(header.etag, undefined);
});
test("cache pruning by byte size", async (assertion) => { test("cache pruning by byte size", async (assertion) => {
const cache = new SharedPromiseCache(1000, false); const cache = new SharedPromiseCache(1000, false);
cache.cache.set("0", { lastUsed: 0, data: Promise.resolve([]), size: 400 }); cache.cache.set("0", { lastUsed: 0, data: Promise.resolve([]), size: 400 });
@@ -278,7 +325,10 @@ test("cache pruning by byte size", async (assertion) => {
}); });
test("pmtiles get metadata", async (assertion) => { test("pmtiles get metadata", async (assertion) => {
const source = new TestNodeFileSource("test/data/test_fixture_1.pmtiles", "1"); const source = new TestNodeFileSource(
"test/data/test_fixture_1.pmtiles",
"1"
);
const p = new PMTiles(source); const p = new PMTiles(source);
const metadata = await p.getMetadata(); const metadata = await p.getMetadata();
assertion.ok(metadata.name); assertion.ok(metadata.name);
@@ -286,7 +336,10 @@ test("pmtiles get metadata", async (assertion) => {
// echo '{"type":"Polygon","coordinates":[[[0,0],[0,1],[1,0],[0,0]]]}' | ./tippecanoe -zg -o test_fixture_2.pmtiles // echo '{"type":"Polygon","coordinates":[[[0,0],[0,1],[1,0],[0,0]]]}' | ./tippecanoe -zg -o test_fixture_2.pmtiles
test("pmtiles handle retries", async (assertion) => { test("pmtiles handle retries", async (assertion) => {
const source = new TestNodeFileSource("test/data/test_fixture_1.pmtiles", "1"); const source = new TestNodeFileSource(
"test/data/test_fixture_1.pmtiles",
"1"
);
source.etag = "1"; source.etag = "1";
const p = new PMTiles(source); const p = new PMTiles(source);
const metadata = await p.getMetadata(); const metadata = await p.getMetadata();