mirror of
https://github.com/protomaps/PMTiles.git
synced 2026-02-04 10:51:07 +00:00
migrate Cloudflare Workers implementation to v3 [#80]
This commit is contained in:
1
serverless/cloudflare/.gitignore
vendored
1
serverless/cloudflare/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
dist
|
||||
148
serverless/cloudflare/cloudflare.d.ts
vendored
148
serverless/cloudflare/cloudflare.d.ts
vendored
@@ -1,148 +0,0 @@
|
||||
// copied from https://github.com/cloudflare/workers-types/blob/master/index.d.ts
|
||||
// see https://github.com/cloudflare/workers-types/issues/164
|
||||
|
||||
/**
|
||||
* An instance of the R2 bucket binding.
|
||||
*/
|
||||
interface R2Bucket {
|
||||
head(key: string): Promise<R2Object | null>;
|
||||
get(key: string): Promise<R2ObjectBody | null>;
|
||||
/**
|
||||
* Returns R2Object on a failure of the conditional specified in onlyIf.
|
||||
*/
|
||||
get(
|
||||
key: string,
|
||||
options: R2GetOptions
|
||||
): Promise<R2ObjectBody | R2Object | null>;
|
||||
get(
|
||||
key: string,
|
||||
options?: R2GetOptions
|
||||
): Promise<R2ObjectBody | R2Object | null>;
|
||||
put(
|
||||
key: string,
|
||||
value:
|
||||
| ReadableStream
|
||||
| ArrayBuffer
|
||||
| ArrayBufferView
|
||||
| string
|
||||
| null
|
||||
| Blob,
|
||||
options?: R2PutOptions
|
||||
): Promise<R2Object>;
|
||||
delete(key: string): Promise<void>;
|
||||
list(options?: R2ListOptions): Promise<R2Objects>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform the operation conditionally based on meeting the defined criteria.
|
||||
*/
|
||||
interface R2Conditional {
|
||||
etagMatches?: string;
|
||||
etagDoesNotMatch?: string;
|
||||
uploadedBefore?: Date;
|
||||
uploadedAfter?: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for retrieving the object metadata nad payload.
|
||||
*/
|
||||
interface R2GetOptions {
|
||||
onlyIf?: R2Conditional | Headers;
|
||||
range?: R2Range;
|
||||
}
|
||||
|
||||
/**
|
||||
* Metadata that's automatically rendered into R2 HTTP API endpoints.
|
||||
* ```
|
||||
* * contentType -> content-type
|
||||
* * contentLanguage -> content-language
|
||||
* etc...
|
||||
* ```
|
||||
* This data is echoed back on GET responses based on what was originally
|
||||
* assigned to the object (and can typically also be overriden when issuing
|
||||
* the GET request).
|
||||
*/
|
||||
interface R2HTTPMetadata {
|
||||
contentType?: string;
|
||||
contentLanguage?: string;
|
||||
contentDisposition?: string;
|
||||
contentEncoding?: string;
|
||||
cacheControl?: string;
|
||||
cacheExpiry?: Date;
|
||||
}
|
||||
|
||||
interface R2ListOptions {
|
||||
limit?: number;
|
||||
prefix?: string;
|
||||
cursor?: string;
|
||||
delimiter?: string;
|
||||
/**
|
||||
* If you populate this array, then items returned will include this metadata.
|
||||
* A tradeoff is that fewer results may be returned depending on how big this
|
||||
* data is. For now the caps are TBD but expect the total memory usage for a list
|
||||
* operation may need to be <1MB or even <128kb depending on how many list operations
|
||||
* you are sending into one bucket. Make sure to look at `truncated` for the result
|
||||
* rather than having logic like
|
||||
* ```
|
||||
* while (listed.length < limit) {
|
||||
* listed = myBucket.list({ limit, include: ['customMetadata'] })
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
include?: ("httpMetadata" | "customMetadata")[];
|
||||
}
|
||||
|
||||
/**
|
||||
* The metadata for the object.
|
||||
*/
|
||||
declare abstract class R2Object {
|
||||
readonly key: string;
|
||||
readonly version: string;
|
||||
readonly size: number;
|
||||
readonly etag: string;
|
||||
readonly httpEtag: string;
|
||||
readonly uploaded: Date;
|
||||
readonly httpMetadata: R2HTTPMetadata;
|
||||
readonly customMetadata: Record<string, string>;
|
||||
writeHttpMetadata(headers: Headers): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* The metadata for the object and the body of the payload.
|
||||
*/
|
||||
interface R2ObjectBody extends R2Object {
|
||||
readonly body: ReadableStream;
|
||||
readonly bodyUsed: boolean;
|
||||
arrayBuffer(): Promise<ArrayBuffer>;
|
||||
text(): Promise<string>;
|
||||
json<T>(): Promise<T>;
|
||||
blob(): Promise<Blob>;
|
||||
}
|
||||
|
||||
interface R2Objects {
|
||||
objects: R2Object[];
|
||||
truncated: boolean;
|
||||
cursor?: string;
|
||||
delimitedPrefixes: string[];
|
||||
}
|
||||
|
||||
interface R2PutOptions {
|
||||
httpMetadata?: R2HTTPMetadata | Headers;
|
||||
customMetadata?: Record<string, string>;
|
||||
md5?: ArrayBuffer | string;
|
||||
}
|
||||
|
||||
declare type R2Range =
|
||||
| { offset: number; length?: number }
|
||||
| { offset?: number; length: number }
|
||||
| { suffix: number };
|
||||
|
||||
interface ReadResult {
|
||||
value?: any;
|
||||
done: boolean;
|
||||
}
|
||||
|
||||
interface ExecutionContext {
|
||||
waitUntil(promise: Promise<any>): void;
|
||||
passThroughOnException(): void;
|
||||
}
|
||||
2464
serverless/cloudflare/package-lock.json
generated
2464
serverless/cloudflare/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,13 +1,20 @@
|
||||
{
|
||||
"name": "pmtiles-cloudflare",
|
||||
"version": "0.0.0",
|
||||
"devDependencies": {
|
||||
"esbuild": "^0.14.42",
|
||||
"esbuild-runner": "^2.2.1",
|
||||
"typescript": "^4.7.2",
|
||||
"zora": "^5.0.2"
|
||||
"@cloudflare/workers-types": "^3.17.0",
|
||||
"typescript": "^4.8.4",
|
||||
"wrangler": "2.1.12"
|
||||
},
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "esbuild worker.ts --target=es2020 --outfile=dist/worker.js --format=esm --bundle --banner:js=//$(git describe --always)",
|
||||
"test": "node -r esbuild-runner/register worker.test.ts",
|
||||
"tsc": "tsc --noEmit --watch"
|
||||
"start": "wrangler dev",
|
||||
"deploy": "wrangler publish",
|
||||
"test": "node -r esbuild-runner/register src/index.test.ts",
|
||||
"tsc": "tsc --watch"
|
||||
},
|
||||
"dependencies": {
|
||||
"esbuild-runner": "^2.2.2",
|
||||
"zora": "^5.1.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { test } from "zora";
|
||||
import { pmtiles_path } from "./worker";
|
||||
import { pmtiles_path } from "./index";
|
||||
|
||||
test("pmtiles path", (assertion) => {
|
||||
let result = pmtiles_path(undefined, "foo");
|
||||
111
serverless/cloudflare/src/index.ts
Normal file
111
serverless/cloudflare/src/index.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
/**
|
||||
* - Run `wrangler dev src/index.ts` in your terminal to start a development server
|
||||
* - Open a browser tab at http://localhost:8787/ to see your worker in action
|
||||
* - Run `wrangler publish src/index.ts --name my-worker` to publish your worker
|
||||
*/
|
||||
|
||||
import {
|
||||
PMTiles,
|
||||
Source,
|
||||
RangeResponse,
|
||||
ResolvedValueCache,
|
||||
} from "../../../js";
|
||||
|
||||
export interface Env {
|
||||
BUCKET: R2Bucket;
|
||||
PMTILES_PATH?: string;
|
||||
}
|
||||
|
||||
class KeyNotFoundError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
const TILE = new RegExp(
|
||||
/^\/([0-9a-zA-Z\/!\-_\.\*\'\(\)]+)\/(\d+)\/(\d+)\/(\d+).([a-z]+)$/
|
||||
);
|
||||
|
||||
export const pmtiles_path = (p: string | undefined, name: string): string => {
|
||||
if (p) {
|
||||
return p.replace("{name}", name);
|
||||
}
|
||||
return name + ".pmtiles";
|
||||
};
|
||||
|
||||
const CACHE = new ResolvedValueCache();
|
||||
|
||||
export class R2Source implements Source {
|
||||
env: Env;
|
||||
archive_name: string;
|
||||
|
||||
constructor(env: Env, archive_name: string) {
|
||||
this.env = env;
|
||||
this.archive_name = archive_name;
|
||||
}
|
||||
|
||||
getKey() {
|
||||
return "";
|
||||
}
|
||||
|
||||
async getBytes(offset: number, length: number): Promise<RangeResponse> {
|
||||
const resp = await this.env.BUCKET.get(
|
||||
pmtiles_path(this.env.PMTILES_PATH, this.archive_name),
|
||||
{
|
||||
range: { offset: offset, length: length },
|
||||
}
|
||||
);
|
||||
if (!resp) {
|
||||
throw new KeyNotFoundError("Archive not found");
|
||||
}
|
||||
const o = resp as R2ObjectBody;
|
||||
const a = await o.arrayBuffer();
|
||||
return { data: a, etag: o.etag };
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(
|
||||
request: Request,
|
||||
env: Env,
|
||||
ctx: ExecutionContext
|
||||
): Promise<Response> {
|
||||
const url = new URL(request.url);
|
||||
const match = url.pathname.match(TILE)!;
|
||||
|
||||
if (match) {
|
||||
const archive_name = match[1];
|
||||
const z = +match[2];
|
||||
const x = +match[3];
|
||||
const y = +match[4];
|
||||
const ext = match[5];
|
||||
const source = new R2Source(env, archive_name);
|
||||
const p = new PMTiles(source, CACHE);
|
||||
|
||||
// TODO: optimize by checking header min/maxzoom
|
||||
// TODO: enforce extensions and MIME type using header information
|
||||
try {
|
||||
const tile = await p.getZxy(z, x, y);
|
||||
const headers = new Headers();
|
||||
headers.set("Access-Control-Allow-Origin", "*"); // TODO: make configurable
|
||||
headers.set("Content-Type", "application/protobuf");
|
||||
|
||||
// TODO: optimize by making decompression optional
|
||||
if (tile) {
|
||||
return new Response(tile.data, { headers: headers, status: 200 });
|
||||
} else {
|
||||
return new Response(undefined, { headers: headers, status: 204 });
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof KeyNotFoundError) {
|
||||
return new Response("Archive not found", { status: 404 });
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: metadata responses
|
||||
return new Response("Invalid URL", { status: 400 });
|
||||
},
|
||||
};
|
||||
@@ -1,12 +1,23 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"lib": ["es2020", "dom"],
|
||||
"strict": true,
|
||||
"moduleResolution": "node",
|
||||
"paths": {
|
||||
},
|
||||
"types": []
|
||||
},
|
||||
"include": ["worker.ts","cloudflare.d.ts"]
|
||||
"compilerOptions": {
|
||||
"target": "es2021",
|
||||
"lib": [
|
||||
"es2021"
|
||||
],
|
||||
"jsx": "react",
|
||||
"module": "es2022",
|
||||
"moduleResolution": "node",
|
||||
"types": [
|
||||
"@cloudflare/workers-types"
|
||||
],
|
||||
"resolveJsonModule": true,
|
||||
"allowJs": true,
|
||||
"checkJs": false,
|
||||
"noEmit": true,
|
||||
"isolatedModules": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"strict": true,
|
||||
"skipLibCheck": true
|
||||
}
|
||||
}
|
||||
@@ -1,163 +0,0 @@
|
||||
import { PMTiles, Source } from "../../js";
|
||||
|
||||
interface Env {
|
||||
BUCKET: R2Bucket;
|
||||
PMTILES_PATH: string | undefined;
|
||||
}
|
||||
|
||||
interface CacheEntry {
|
||||
lastUsed: number;
|
||||
buffer: DataView;
|
||||
}
|
||||
|
||||
class KeyNotFoundError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
export class LRUCache {
|
||||
entries: Map<string, CacheEntry>;
|
||||
counter: number;
|
||||
|
||||
constructor() {
|
||||
this.entries = new Map<string, CacheEntry>();
|
||||
this.counter = 0;
|
||||
}
|
||||
|
||||
async get(
|
||||
bucket: R2Bucket,
|
||||
key: string,
|
||||
offset: number,
|
||||
length: number
|
||||
): Promise<[boolean, DataView]> {
|
||||
let cacheKey = key + ":" + offset + "-" + length;
|
||||
let val = this.entries.get(cacheKey);
|
||||
if (val) {
|
||||
val.lastUsed = this.counter++;
|
||||
return [true, val.buffer];
|
||||
}
|
||||
|
||||
let resp = await bucket.get(key, {
|
||||
range: { offset: offset, length: length },
|
||||
});
|
||||
if (!resp) {
|
||||
throw new KeyNotFoundError("Key not found");
|
||||
}
|
||||
let a = await (resp as R2ObjectBody).arrayBuffer();
|
||||
let d = new DataView(a);
|
||||
|
||||
this.entries.set(cacheKey, {
|
||||
lastUsed: this.counter++,
|
||||
buffer: d,
|
||||
});
|
||||
if (this.entries.size > 128) {
|
||||
let minUsed = Infinity;
|
||||
let minKey = undefined;
|
||||
this.entries.forEach((val, key) => {
|
||||
if (val.lastUsed < minUsed) {
|
||||
minUsed = val.lastUsed;
|
||||
minKey = key;
|
||||
}
|
||||
});
|
||||
if (minKey) this.entries.delete(minKey);
|
||||
}
|
||||
|
||||
return [false, d];
|
||||
}
|
||||
}
|
||||
|
||||
let worker_cache = new LRUCache();
|
||||
|
||||
export const pmtiles_path = (p: string | undefined, name: string): string => {
|
||||
if (p) {
|
||||
return p.replace("{name}", name);
|
||||
}
|
||||
return name + ".pmtiles";
|
||||
};
|
||||
|
||||
const TILE = new RegExp(
|
||||
/^\/([0-9a-zA-Z\/!\-_\.\*\'\(\)]+)\/(\d+)\/(\d+)\/(\d+).pbf$/
|
||||
);
|
||||
|
||||
export default {
|
||||
async fetch(
|
||||
request: Request,
|
||||
env: Env,
|
||||
context: ExecutionContext
|
||||
): Promise<Response> {
|
||||
let url = new URL(request.url);
|
||||
|
||||
let match = url.pathname.match(TILE)!;
|
||||
|
||||
let subrequests = 1;
|
||||
|
||||
if (match) {
|
||||
let name = match[1];
|
||||
let z = +match[2];
|
||||
let x = +match[3];
|
||||
let y = +match[4];
|
||||
class TempSource {
|
||||
getKey() {
|
||||
return "";
|
||||
}
|
||||
|
||||
async getBytes(offset: number, length: number) {
|
||||
let result = await worker_cache.get(
|
||||
env.BUCKET,
|
||||
pmtiles_path(env.PMTILES_PATH, name),
|
||||
offset,
|
||||
length
|
||||
);
|
||||
|
||||
if (!result[0]) subrequests++;
|
||||
|
||||
return result[1];
|
||||
}
|
||||
}
|
||||
|
||||
let source = new TempSource();
|
||||
|
||||
let p = new PMTiles(source);
|
||||
try {
|
||||
let metadata = await p.metadata();
|
||||
|
||||
if (z < metadata.minzoom || z > metadata.maxzoom) {
|
||||
return new Response("Tile not found", { status: 404 });
|
||||
}
|
||||
let entry = await p.getZxy(z, x, y);
|
||||
if (entry) {
|
||||
let tile = await env.BUCKET.get(
|
||||
pmtiles_path(env.PMTILES_PATH, name),
|
||||
{
|
||||
range: { offset: entry.offset, length: entry.length },
|
||||
}
|
||||
);
|
||||
|
||||
let headers = new Headers();
|
||||
headers.set("Access-Control-Allow-Origin", "*");
|
||||
headers.set("Content-Type", "application/x-protobuf");
|
||||
headers.set("X-Pmap-Subrequests", subrequests.toString());
|
||||
|
||||
if (metadata.compression === "gzip") {
|
||||
headers.set("Content-Encoding", "gzip");
|
||||
}
|
||||
|
||||
return new Response((tile as R2ObjectBody).body, {
|
||||
headers: headers,
|
||||
encodeBody: "manual",
|
||||
} as any);
|
||||
} else {
|
||||
return new Response(undefined, { status: 204 });
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof KeyNotFoundError) {
|
||||
return new Response("Archive not found", { status: 404 });
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
return new Response("Invalid tile URL", { status: 400 });
|
||||
},
|
||||
};
|
||||
Reference in New Issue
Block a user