mirror of
https://github.com/protomaps/PMTiles.git
synced 2026-02-04 02:41:09 +00:00
serverless js: make formatting consistent
This commit is contained in:
@@ -1,16 +1,16 @@
|
||||
import { Readable } from "stream";
|
||||
import {
|
||||
Context,
|
||||
APIGatewayProxyResult,
|
||||
APIGatewayProxyEventV2,
|
||||
Context,
|
||||
APIGatewayProxyResult,
|
||||
APIGatewayProxyEventV2,
|
||||
} from "aws-lambda";
|
||||
import {
|
||||
PMTiles,
|
||||
ResolvedValueCache,
|
||||
RangeResponse,
|
||||
Source,
|
||||
Compression,
|
||||
TileType,
|
||||
PMTiles,
|
||||
ResolvedValueCache,
|
||||
RangeResponse,
|
||||
Source,
|
||||
Compression,
|
||||
TileType,
|
||||
} from "../../../js/index";
|
||||
|
||||
import https from "https";
|
||||
@@ -21,23 +21,23 @@ import { NodeHttpHandler } from "@aws-sdk/node-http-handler";
|
||||
|
||||
// the region should default to the same one as the function
|
||||
const s3client = new S3Client({
|
||||
requestHandler: new NodeHttpHandler({
|
||||
connectionTimeout: 500,
|
||||
socketTimeout: 500,
|
||||
}),
|
||||
requestHandler: new NodeHttpHandler({
|
||||
connectionTimeout: 500,
|
||||
socketTimeout: 500,
|
||||
}),
|
||||
});
|
||||
|
||||
async function nativeDecompress(
|
||||
buf: ArrayBuffer,
|
||||
compression: Compression
|
||||
buf: ArrayBuffer,
|
||||
compression: Compression
|
||||
): Promise<ArrayBuffer> {
|
||||
if (compression === Compression.None || compression === Compression.Unknown) {
|
||||
return buf;
|
||||
} else if (compression === Compression.Gzip) {
|
||||
return zlib.gunzipSync(buf);
|
||||
} else {
|
||||
throw Error("Compression method not supported");
|
||||
}
|
||||
if (compression === Compression.None || compression === Compression.Unknown) {
|
||||
return buf;
|
||||
} else if (compression === Compression.Gzip) {
|
||||
return zlib.gunzipSync(buf);
|
||||
} else {
|
||||
throw Error("Compression method not supported");
|
||||
}
|
||||
}
|
||||
|
||||
// Lambda needs to run with 512MB, empty function takes about 70
|
||||
@@ -45,219 +45,219 @@ const CACHE = new ResolvedValueCache(undefined, undefined, nativeDecompress);
|
||||
|
||||
// duplicated code below
|
||||
export const pmtiles_path = (name: string, setting?: string): string => {
|
||||
if (setting) {
|
||||
return setting.replaceAll("{name}", name);
|
||||
}
|
||||
return name + ".pmtiles";
|
||||
if (setting) {
|
||||
return setting.replaceAll("{name}", name);
|
||||
}
|
||||
return name + ".pmtiles";
|
||||
};
|
||||
|
||||
const TILE =
|
||||
/^\/(?<NAME>[0-9a-zA-Z\/!\-_\.\*\'\(\)]+)\/(?<Z>\d+)\/(?<X>\d+)\/(?<Y>\d+).(?<EXT>[a-z]+)$/;
|
||||
/^\/(?<NAME>[0-9a-zA-Z\/!\-_\.\*\'\(\)]+)\/(?<Z>\d+)\/(?<X>\d+)\/(?<Y>\d+).(?<EXT>[a-z]+)$/;
|
||||
|
||||
export const tile_path = (
|
||||
path: string,
|
||||
setting?: string
|
||||
path: string,
|
||||
setting?: string
|
||||
): {
|
||||
ok: boolean;
|
||||
name: string;
|
||||
tile: [number, number, number];
|
||||
ext: string;
|
||||
ok: boolean;
|
||||
name: string;
|
||||
tile: [number, number, number];
|
||||
ext: string;
|
||||
} => {
|
||||
let pattern = TILE;
|
||||
if (setting) {
|
||||
// escape regex
|
||||
setting = setting.replace(/[.*+?^$()|[\]\\]/g, "\\$&");
|
||||
setting = setting.replace("{name}", "(?<NAME>[0-9a-zA-Z/!-_.*'()]+)");
|
||||
setting = setting.replace("{z}", "(?<Z>\\d+)");
|
||||
setting = setting.replace("{x}", "(?<X>\\d+)");
|
||||
setting = setting.replace("{y}", "(?<Y>\\d+)");
|
||||
setting = setting.replace("{ext}", "(?<EXT>[a-z]+)");
|
||||
pattern = new RegExp(setting);
|
||||
}
|
||||
let pattern = TILE;
|
||||
if (setting) {
|
||||
// escape regex
|
||||
setting = setting.replace(/[.*+?^$()|[\]\\]/g, "\\$&");
|
||||
setting = setting.replace("{name}", "(?<NAME>[0-9a-zA-Z/!-_.*'()]+)");
|
||||
setting = setting.replace("{z}", "(?<Z>\\d+)");
|
||||
setting = setting.replace("{x}", "(?<X>\\d+)");
|
||||
setting = setting.replace("{y}", "(?<Y>\\d+)");
|
||||
setting = setting.replace("{ext}", "(?<EXT>[a-z]+)");
|
||||
pattern = new RegExp(setting);
|
||||
}
|
||||
|
||||
let match = path.match(pattern);
|
||||
let match = path.match(pattern);
|
||||
|
||||
if (match) {
|
||||
const g = match.groups!;
|
||||
return { ok: true, name: g.NAME, tile: [+g.Z, +g.X, +g.Y], ext: g.EXT };
|
||||
}
|
||||
return { ok: false, name: "", tile: [0, 0, 0], ext: "" };
|
||||
if (match) {
|
||||
const g = match.groups!;
|
||||
return { ok: true, name: g.NAME, tile: [+g.Z, +g.X, +g.Y], ext: g.EXT };
|
||||
}
|
||||
return { ok: false, name: "", tile: [0, 0, 0], ext: "" };
|
||||
};
|
||||
|
||||
class S3Source implements Source {
|
||||
archive_name: string;
|
||||
archive_name: string;
|
||||
|
||||
constructor(archive_name: string) {
|
||||
this.archive_name = archive_name;
|
||||
}
|
||||
constructor(archive_name: string) {
|
||||
this.archive_name = archive_name;
|
||||
}
|
||||
|
||||
getKey() {
|
||||
return this.archive_name;
|
||||
}
|
||||
getKey() {
|
||||
return this.archive_name;
|
||||
}
|
||||
|
||||
async getBytes(offset: number, length: number): Promise<RangeResponse> {
|
||||
const resp = await s3client.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: process.env.BUCKET!,
|
||||
Key: pmtiles_path(this.archive_name, process.env.PMTILES_PATH),
|
||||
Range: "bytes=" + offset + "-" + (offset + length - 1),
|
||||
})
|
||||
);
|
||||
async getBytes(offset: number, length: number): Promise<RangeResponse> {
|
||||
const resp = await s3client.send(
|
||||
new GetObjectCommand({
|
||||
Bucket: process.env.BUCKET!,
|
||||
Key: pmtiles_path(this.archive_name, process.env.PMTILES_PATH),
|
||||
Range: "bytes=" + offset + "-" + (offset + length - 1),
|
||||
})
|
||||
);
|
||||
|
||||
const arr = await resp.Body!.transformToByteArray();
|
||||
const arr = await resp.Body!.transformToByteArray();
|
||||
|
||||
return {
|
||||
data: arr.buffer,
|
||||
etag: resp.ETag,
|
||||
expires: resp.Expires?.toISOString(),
|
||||
cacheControl: resp.CacheControl,
|
||||
};
|
||||
}
|
||||
return {
|
||||
data: arr.buffer,
|
||||
etag: resp.ETag,
|
||||
expires: resp.Expires?.toISOString(),
|
||||
cacheControl: resp.CacheControl,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
interface Headers {
|
||||
[key: string]: string;
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
const apiResp = (
|
||||
statusCode: number,
|
||||
body: string,
|
||||
isBase64Encoded = false,
|
||||
headers: Headers = {}
|
||||
statusCode: number,
|
||||
body: string,
|
||||
isBase64Encoded = false,
|
||||
headers: Headers = {}
|
||||
): APIGatewayProxyResult => {
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: body,
|
||||
headers: headers,
|
||||
isBase64Encoded: isBase64Encoded,
|
||||
};
|
||||
return {
|
||||
statusCode: statusCode,
|
||||
body: body,
|
||||
headers: headers,
|
||||
isBase64Encoded: isBase64Encoded,
|
||||
};
|
||||
};
|
||||
|
||||
// Assumes event is a API Gateway V2 or Lambda Function URL formatted dict
|
||||
// and returns API Gateway V2 / Lambda Function dict responses
|
||||
// Does not work with CloudFront events/Lambda@Edge; see README
|
||||
export const handlerRaw = async (
|
||||
event: APIGatewayProxyEventV2,
|
||||
context: Context,
|
||||
tilePostprocess?: (a: ArrayBuffer, t: TileType) => ArrayBuffer
|
||||
event: APIGatewayProxyEventV2,
|
||||
context: Context,
|
||||
tilePostprocess?: (a: ArrayBuffer, t: TileType) => ArrayBuffer
|
||||
): Promise<APIGatewayProxyResult> => {
|
||||
var path;
|
||||
var is_api_gateway;
|
||||
if (event.pathParameters) {
|
||||
is_api_gateway = true;
|
||||
if (event.pathParameters.proxy) {
|
||||
path = "/" + event.pathParameters.proxy;
|
||||
} else {
|
||||
return apiResp(500, "Proxy integration missing tile_path parameter");
|
||||
}
|
||||
} else {
|
||||
path = event.rawPath;
|
||||
}
|
||||
var path;
|
||||
var is_api_gateway;
|
||||
if (event.pathParameters) {
|
||||
is_api_gateway = true;
|
||||
if (event.pathParameters.proxy) {
|
||||
path = "/" + event.pathParameters.proxy;
|
||||
} else {
|
||||
return apiResp(500, "Proxy integration missing tile_path parameter");
|
||||
}
|
||||
} else {
|
||||
path = event.rawPath;
|
||||
}
|
||||
|
||||
if (!path) {
|
||||
return apiResp(500, "Invalid event configuration");
|
||||
}
|
||||
if (!path) {
|
||||
return apiResp(500, "Invalid event configuration");
|
||||
}
|
||||
|
||||
var headers: Headers = {};
|
||||
// TODO: metadata and TileJSON
|
||||
var headers: Headers = {};
|
||||
// TODO: metadata and TileJSON
|
||||
|
||||
if (process.env.CORS) {
|
||||
headers["Access-Control-Allow-Origin"] = process.env.CORS;
|
||||
}
|
||||
if (process.env.CORS) {
|
||||
headers["Access-Control-Allow-Origin"] = process.env.CORS;
|
||||
}
|
||||
|
||||
const { ok, name, tile, ext } = tile_path(path, process.env.TILE_PATH);
|
||||
const { ok, name, tile, ext } = tile_path(path, process.env.TILE_PATH);
|
||||
|
||||
if (!ok) {
|
||||
return apiResp(400, "Invalid tile URL", false, headers);
|
||||
}
|
||||
if (!ok) {
|
||||
return apiResp(400, "Invalid tile URL", false, headers);
|
||||
}
|
||||
|
||||
const source = new S3Source(name);
|
||||
const p = new PMTiles(source, CACHE, nativeDecompress);
|
||||
try {
|
||||
const header = await p.getHeader();
|
||||
if (tile[0] < header.minZoom || tile[0] > header.maxZoom) {
|
||||
return apiResp(404, "", false, headers);
|
||||
}
|
||||
const source = new S3Source(name);
|
||||
const p = new PMTiles(source, CACHE, nativeDecompress);
|
||||
try {
|
||||
const header = await p.getHeader();
|
||||
if (tile[0] < header.minZoom || tile[0] > header.maxZoom) {
|
||||
return apiResp(404, "", false, headers);
|
||||
}
|
||||
|
||||
for (const pair of [
|
||||
[TileType.Mvt, "mvt"],
|
||||
[TileType.Png, "png"],
|
||||
[TileType.Jpeg, "jpg"],
|
||||
[TileType.Webp, "webp"],
|
||||
]) {
|
||||
if (header.tileType === pair[0] && ext !== pair[1]) {
|
||||
if (header.tileType == TileType.Mvt && ext === "pbf") {
|
||||
// allow this for now. Eventually we will delete this in favor of .mvt
|
||||
continue;
|
||||
}
|
||||
return apiResp(
|
||||
400,
|
||||
"Bad request: archive has type ." + pair[1],
|
||||
false,
|
||||
headers
|
||||
);
|
||||
}
|
||||
}
|
||||
for (const pair of [
|
||||
[TileType.Mvt, "mvt"],
|
||||
[TileType.Png, "png"],
|
||||
[TileType.Jpeg, "jpg"],
|
||||
[TileType.Webp, "webp"],
|
||||
]) {
|
||||
if (header.tileType === pair[0] && ext !== pair[1]) {
|
||||
if (header.tileType == TileType.Mvt && ext === "pbf") {
|
||||
// allow this for now. Eventually we will delete this in favor of .mvt
|
||||
continue;
|
||||
}
|
||||
return apiResp(
|
||||
400,
|
||||
"Bad request: archive has type ." + pair[1],
|
||||
false,
|
||||
headers
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const tile_result = await p.getZxy(tile[0], tile[1], tile[2]);
|
||||
if (tile_result) {
|
||||
switch (header.tileType) {
|
||||
case TileType.Mvt:
|
||||
// part of the list of Cloudfront compressible types.
|
||||
headers["Content-Type"] = "application/vnd.mapbox-vector-tile";
|
||||
break;
|
||||
case TileType.Png:
|
||||
headers["Content-Type"] = "image/png";
|
||||
break;
|
||||
case TileType.Jpeg:
|
||||
headers["Content-Type"] = "image/jpeg";
|
||||
break;
|
||||
case TileType.Webp:
|
||||
headers["Content-Type"] = "image/webp";
|
||||
break;
|
||||
}
|
||||
const tile_result = await p.getZxy(tile[0], tile[1], tile[2]);
|
||||
if (tile_result) {
|
||||
switch (header.tileType) {
|
||||
case TileType.Mvt:
|
||||
// part of the list of Cloudfront compressible types.
|
||||
headers["Content-Type"] = "application/vnd.mapbox-vector-tile";
|
||||
break;
|
||||
case TileType.Png:
|
||||
headers["Content-Type"] = "image/png";
|
||||
break;
|
||||
case TileType.Jpeg:
|
||||
headers["Content-Type"] = "image/jpeg";
|
||||
break;
|
||||
case TileType.Webp:
|
||||
headers["Content-Type"] = "image/webp";
|
||||
break;
|
||||
}
|
||||
|
||||
let data = tile_result.data;
|
||||
let data = tile_result.data;
|
||||
|
||||
if (tilePostprocess) {
|
||||
data = tilePostprocess(data, header.tileType);
|
||||
}
|
||||
if (tilePostprocess) {
|
||||
data = tilePostprocess(data, header.tileType);
|
||||
}
|
||||
|
||||
if (is_api_gateway) {
|
||||
// this is wasted work, but we need to force API Gateway to interpret the Lambda response as binary
|
||||
// without depending on clients sending matching Accept: headers in the request.
|
||||
const recompressed_data = zlib.gzipSync(data);
|
||||
headers["Content-Encoding"] = "gzip";
|
||||
return apiResp(
|
||||
200,
|
||||
Buffer.from(recompressed_data).toString("base64"),
|
||||
true,
|
||||
headers
|
||||
);
|
||||
} else {
|
||||
// returns uncompressed response
|
||||
return apiResp(
|
||||
200,
|
||||
Buffer.from(data).toString("base64"),
|
||||
true,
|
||||
headers
|
||||
);
|
||||
}
|
||||
} else {
|
||||
return apiResp(204, "", false, headers);
|
||||
}
|
||||
} catch (e) {
|
||||
if ((e as Error).name === "AccessDenied") {
|
||||
return apiResp(403, "Bucket access unauthorized", false, headers);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
return apiResp(404, "Invalid URL", false, headers);
|
||||
if (is_api_gateway) {
|
||||
// this is wasted work, but we need to force API Gateway to interpret the Lambda response as binary
|
||||
// without depending on clients sending matching Accept: headers in the request.
|
||||
const recompressed_data = zlib.gzipSync(data);
|
||||
headers["Content-Encoding"] = "gzip";
|
||||
return apiResp(
|
||||
200,
|
||||
Buffer.from(recompressed_data).toString("base64"),
|
||||
true,
|
||||
headers
|
||||
);
|
||||
} else {
|
||||
// returns uncompressed response
|
||||
return apiResp(
|
||||
200,
|
||||
Buffer.from(data).toString("base64"),
|
||||
true,
|
||||
headers
|
||||
);
|
||||
}
|
||||
} else {
|
||||
return apiResp(204, "", false, headers);
|
||||
}
|
||||
} catch (e) {
|
||||
if ((e as Error).name === "AccessDenied") {
|
||||
return apiResp(403, "Bucket access unauthorized", false, headers);
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
return apiResp(404, "Invalid URL", false, headers);
|
||||
};
|
||||
|
||||
export const handler = async (
|
||||
event: APIGatewayProxyEventV2,
|
||||
context: Context
|
||||
event: APIGatewayProxyEventV2,
|
||||
context: Context
|
||||
): Promise<APIGatewayProxyResult> => {
|
||||
return handlerRaw(event, context);
|
||||
return handlerRaw(event, context);
|
||||
};
|
||||
|
||||
@@ -5,240 +5,240 @@
|
||||
*/
|
||||
|
||||
import {
|
||||
PMTiles,
|
||||
Source,
|
||||
RangeResponse,
|
||||
ResolvedValueCache,
|
||||
TileType,
|
||||
Compression,
|
||||
PMTiles,
|
||||
Source,
|
||||
RangeResponse,
|
||||
ResolvedValueCache,
|
||||
TileType,
|
||||
Compression,
|
||||
} from "../../../js/index";
|
||||
|
||||
interface Env {
|
||||
BUCKET: R2Bucket;
|
||||
ALLOWED_ORIGINS?: string;
|
||||
PMTILES_PATH?: string;
|
||||
TILE_PATH?: string;
|
||||
CACHE_MAX_AGE?: number;
|
||||
BUCKET: R2Bucket;
|
||||
ALLOWED_ORIGINS?: string;
|
||||
PMTILES_PATH?: string;
|
||||
TILE_PATH?: string;
|
||||
CACHE_MAX_AGE?: number;
|
||||
}
|
||||
|
||||
class KeyNotFoundError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
}
|
||||
}
|
||||
|
||||
export const pmtiles_path = (name: string, setting?: string): string => {
|
||||
if (setting) {
|
||||
return setting.replaceAll("{name}", name);
|
||||
}
|
||||
return name + ".pmtiles";
|
||||
if (setting) {
|
||||
return setting.replaceAll("{name}", name);
|
||||
}
|
||||
return name + ".pmtiles";
|
||||
};
|
||||
|
||||
const TILE =
|
||||
/^\/(?<NAME>[0-9a-zA-Z\/!\-_\.\*\'\(\)]+)\/(?<Z>\d+)\/(?<X>\d+)\/(?<Y>\d+).(?<EXT>[a-z]+)$/;
|
||||
/^\/(?<NAME>[0-9a-zA-Z\/!\-_\.\*\'\(\)]+)\/(?<Z>\d+)\/(?<X>\d+)\/(?<Y>\d+).(?<EXT>[a-z]+)$/;
|
||||
|
||||
export const tile_path = (
|
||||
path: string,
|
||||
setting?: string
|
||||
path: string,
|
||||
setting?: string
|
||||
): {
|
||||
ok: boolean;
|
||||
name: string;
|
||||
tile: [number, number, number];
|
||||
ext: string;
|
||||
ok: boolean;
|
||||
name: string;
|
||||
tile: [number, number, number];
|
||||
ext: string;
|
||||
} => {
|
||||
let pattern = TILE;
|
||||
if (setting) {
|
||||
// escape regex
|
||||
setting = setting.replace(/[.*+?^$()|[\]\\]/g, "\\$&");
|
||||
setting = setting.replace("{name}", "(?<NAME>[0-9a-zA-Z/!-_.*'()]+)");
|
||||
setting = setting.replace("{z}", "(?<Z>\\d+)");
|
||||
setting = setting.replace("{x}", "(?<X>\\d+)");
|
||||
setting = setting.replace("{y}", "(?<Y>\\d+)");
|
||||
setting = setting.replace("{ext}", "(?<EXT>[a-z]+)");
|
||||
pattern = new RegExp(setting);
|
||||
}
|
||||
let pattern = TILE;
|
||||
if (setting) {
|
||||
// escape regex
|
||||
setting = setting.replace(/[.*+?^$()|[\]\\]/g, "\\$&");
|
||||
setting = setting.replace("{name}", "(?<NAME>[0-9a-zA-Z/!-_.*'()]+)");
|
||||
setting = setting.replace("{z}", "(?<Z>\\d+)");
|
||||
setting = setting.replace("{x}", "(?<X>\\d+)");
|
||||
setting = setting.replace("{y}", "(?<Y>\\d+)");
|
||||
setting = setting.replace("{ext}", "(?<EXT>[a-z]+)");
|
||||
pattern = new RegExp(setting);
|
||||
}
|
||||
|
||||
let match = path.match(pattern);
|
||||
let match = path.match(pattern);
|
||||
|
||||
if (match) {
|
||||
const g = match.groups!;
|
||||
return { ok: true, name: g.NAME, tile: [+g.Z, +g.X, +g.Y], ext: g.EXT };
|
||||
}
|
||||
return { ok: false, name: "", tile: [0, 0, 0], ext: "" };
|
||||
if (match) {
|
||||
const g = match.groups!;
|
||||
return { ok: true, name: g.NAME, tile: [+g.Z, +g.X, +g.Y], ext: g.EXT };
|
||||
}
|
||||
return { ok: false, name: "", tile: [0, 0, 0], ext: "" };
|
||||
};
|
||||
|
||||
async function nativeDecompress(
|
||||
buf: ArrayBuffer,
|
||||
compression: Compression
|
||||
buf: ArrayBuffer,
|
||||
compression: Compression
|
||||
): Promise<ArrayBuffer> {
|
||||
if (compression === Compression.None || compression === Compression.Unknown) {
|
||||
return buf;
|
||||
} else if (compression === Compression.Gzip) {
|
||||
let stream = new Response(buf).body!;
|
||||
let result = stream.pipeThrough(new DecompressionStream("gzip"));
|
||||
return new Response(result).arrayBuffer();
|
||||
} else {
|
||||
throw Error("Compression method not supported");
|
||||
}
|
||||
if (compression === Compression.None || compression === Compression.Unknown) {
|
||||
return buf;
|
||||
} else if (compression === Compression.Gzip) {
|
||||
let stream = new Response(buf).body!;
|
||||
let result = stream.pipeThrough(new DecompressionStream("gzip"));
|
||||
return new Response(result).arrayBuffer();
|
||||
} else {
|
||||
throw Error("Compression method not supported");
|
||||
}
|
||||
}
|
||||
|
||||
const CACHE = new ResolvedValueCache(25, undefined, nativeDecompress);
|
||||
|
||||
class R2Source implements Source {
|
||||
env: Env;
|
||||
archive_name: string;
|
||||
env: Env;
|
||||
archive_name: string;
|
||||
|
||||
constructor(env: Env, archive_name: string) {
|
||||
this.env = env;
|
||||
this.archive_name = archive_name;
|
||||
}
|
||||
constructor(env: Env, archive_name: string) {
|
||||
this.env = env;
|
||||
this.archive_name = archive_name;
|
||||
}
|
||||
|
||||
getKey() {
|
||||
return this.archive_name;
|
||||
}
|
||||
getKey() {
|
||||
return this.archive_name;
|
||||
}
|
||||
|
||||
async getBytes(offset: number, length: number): Promise<RangeResponse> {
|
||||
const resp = await this.env.BUCKET.get(
|
||||
pmtiles_path(this.archive_name, this.env.PMTILES_PATH),
|
||||
{
|
||||
range: { offset: offset, length: length },
|
||||
}
|
||||
);
|
||||
if (!resp) {
|
||||
throw new KeyNotFoundError("Archive not found");
|
||||
}
|
||||
const o = resp as R2ObjectBody;
|
||||
const a = await o.arrayBuffer();
|
||||
return {
|
||||
data: a,
|
||||
etag: o.etag,
|
||||
cacheControl: o.httpMetadata?.cacheControl,
|
||||
expires: o.httpMetadata?.cacheExpiry?.toISOString(),
|
||||
};
|
||||
}
|
||||
async getBytes(offset: number, length: number): Promise<RangeResponse> {
|
||||
const resp = await this.env.BUCKET.get(
|
||||
pmtiles_path(this.archive_name, this.env.PMTILES_PATH),
|
||||
{
|
||||
range: { offset: offset, length: length },
|
||||
}
|
||||
);
|
||||
if (!resp) {
|
||||
throw new KeyNotFoundError("Archive not found");
|
||||
}
|
||||
const o = resp as R2ObjectBody;
|
||||
const a = await o.arrayBuffer();
|
||||
return {
|
||||
data: a,
|
||||
etag: o.etag,
|
||||
cacheControl: o.httpMetadata?.cacheControl,
|
||||
expires: o.httpMetadata?.cacheExpiry?.toISOString(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default {
|
||||
async fetch(
|
||||
request: Request,
|
||||
env: Env,
|
||||
ctx: ExecutionContext
|
||||
): Promise<Response> {
|
||||
if (request.method.toUpperCase() === "POST")
|
||||
return new Response(undefined, { status: 405 });
|
||||
async fetch(
|
||||
request: Request,
|
||||
env: Env,
|
||||
ctx: ExecutionContext
|
||||
): Promise<Response> {
|
||||
if (request.method.toUpperCase() === "POST")
|
||||
return new Response(undefined, { status: 405 });
|
||||
|
||||
const url = new URL(request.url);
|
||||
const { ok, name, tile, ext } = tile_path(url.pathname, env.TILE_PATH);
|
||||
const url = new URL(request.url);
|
||||
const { ok, name, tile, ext } = tile_path(url.pathname, env.TILE_PATH);
|
||||
|
||||
const cache = caches.default;
|
||||
const cache = caches.default;
|
||||
|
||||
if (ok) {
|
||||
let allowed_origin = "";
|
||||
if (typeof env.ALLOWED_ORIGINS !== "undefined") {
|
||||
for (let o of env.ALLOWED_ORIGINS.split(",")) {
|
||||
if (o === request.headers.get("Origin") || o === "*") {
|
||||
allowed_origin = o;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (ok) {
|
||||
let allowed_origin = "";
|
||||
if (typeof env.ALLOWED_ORIGINS !== "undefined") {
|
||||
for (let o of env.ALLOWED_ORIGINS.split(",")) {
|
||||
if (o === request.headers.get("Origin") || o === "*") {
|
||||
allowed_origin = o;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let cached = await cache.match(request.url);
|
||||
if (cached) {
|
||||
let resp_headers = new Headers(cached.headers);
|
||||
if (allowed_origin)
|
||||
resp_headers.set("Access-Control-Allow-Origin", allowed_origin);
|
||||
resp_headers.set("Vary", "Origin");
|
||||
let cached = await cache.match(request.url);
|
||||
if (cached) {
|
||||
let resp_headers = new Headers(cached.headers);
|
||||
if (allowed_origin)
|
||||
resp_headers.set("Access-Control-Allow-Origin", allowed_origin);
|
||||
resp_headers.set("Vary", "Origin");
|
||||
|
||||
return new Response(cached.body, {
|
||||
headers: resp_headers,
|
||||
status: cached.status,
|
||||
});
|
||||
}
|
||||
return new Response(cached.body, {
|
||||
headers: resp_headers,
|
||||
status: cached.status,
|
||||
});
|
||||
}
|
||||
|
||||
const cacheableResponse = (
|
||||
body: ArrayBuffer | string | undefined,
|
||||
cacheable_headers: Headers,
|
||||
status: number
|
||||
) => {
|
||||
cacheable_headers.set(
|
||||
"Cache-Control",
|
||||
"max-age=" + (env.CACHE_MAX_AGE | 86400)
|
||||
);
|
||||
let cacheable = new Response(body, {
|
||||
headers: cacheable_headers,
|
||||
status: status,
|
||||
});
|
||||
const cacheableResponse = (
|
||||
body: ArrayBuffer | string | undefined,
|
||||
cacheable_headers: Headers,
|
||||
status: number
|
||||
) => {
|
||||
cacheable_headers.set(
|
||||
"Cache-Control",
|
||||
"max-age=" + (env.CACHE_MAX_AGE | 86400)
|
||||
);
|
||||
let cacheable = new Response(body, {
|
||||
headers: cacheable_headers,
|
||||
status: status,
|
||||
});
|
||||
|
||||
// normalize HEAD requests
|
||||
ctx.waitUntil(cache.put(request.url, cacheable));
|
||||
// normalize HEAD requests
|
||||
ctx.waitUntil(cache.put(request.url, cacheable));
|
||||
|
||||
let resp_headers = new Headers(cacheable_headers);
|
||||
if (allowed_origin)
|
||||
resp_headers.set("Access-Control-Allow-Origin", allowed_origin);
|
||||
resp_headers.set("Vary", "Origin");
|
||||
return new Response(body, { headers: resp_headers, status: status });
|
||||
};
|
||||
let resp_headers = new Headers(cacheable_headers);
|
||||
if (allowed_origin)
|
||||
resp_headers.set("Access-Control-Allow-Origin", allowed_origin);
|
||||
resp_headers.set("Vary", "Origin");
|
||||
return new Response(body, { headers: resp_headers, status: status });
|
||||
};
|
||||
|
||||
const cacheable_headers = new Headers();
|
||||
const source = new R2Source(env, name);
|
||||
const p = new PMTiles(source, CACHE, nativeDecompress);
|
||||
try {
|
||||
const p_header = await p.getHeader();
|
||||
if (tile[0] < p_header.minZoom || tile[0] > p_header.maxZoom) {
|
||||
return cacheableResponse(undefined, cacheable_headers, 404);
|
||||
}
|
||||
const cacheable_headers = new Headers();
|
||||
const source = new R2Source(env, name);
|
||||
const p = new PMTiles(source, CACHE, nativeDecompress);
|
||||
try {
|
||||
const p_header = await p.getHeader();
|
||||
if (tile[0] < p_header.minZoom || tile[0] > p_header.maxZoom) {
|
||||
return cacheableResponse(undefined, cacheable_headers, 404);
|
||||
}
|
||||
|
||||
for (const pair of [
|
||||
[TileType.Mvt, "mvt"],
|
||||
[TileType.Png, "png"],
|
||||
[TileType.Jpeg, "jpg"],
|
||||
[TileType.Webp, "webp"],
|
||||
]) {
|
||||
if (p_header.tileType === pair[0] && ext !== pair[1]) {
|
||||
if (p_header.tileType == TileType.Mvt && ext === "pbf") {
|
||||
// allow this for now. Eventually we will delete this in favor of .mvt
|
||||
continue;
|
||||
}
|
||||
return cacheableResponse(
|
||||
"Bad request: archive has type ." + pair[1],
|
||||
cacheable_headers,
|
||||
400
|
||||
);
|
||||
}
|
||||
}
|
||||
for (const pair of [
|
||||
[TileType.Mvt, "mvt"],
|
||||
[TileType.Png, "png"],
|
||||
[TileType.Jpeg, "jpg"],
|
||||
[TileType.Webp, "webp"],
|
||||
]) {
|
||||
if (p_header.tileType === pair[0] && ext !== pair[1]) {
|
||||
if (p_header.tileType == TileType.Mvt && ext === "pbf") {
|
||||
// allow this for now. Eventually we will delete this in favor of .mvt
|
||||
continue;
|
||||
}
|
||||
return cacheableResponse(
|
||||
"Bad request: archive has type ." + pair[1],
|
||||
cacheable_headers,
|
||||
400
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const tiledata = await p.getZxy(tile[0], tile[1], tile[2]);
|
||||
const tiledata = await p.getZxy(tile[0], tile[1], tile[2]);
|
||||
|
||||
switch (p_header.tileType) {
|
||||
case TileType.Mvt:
|
||||
cacheable_headers.set("Content-Type", "application/x-protobuf");
|
||||
break;
|
||||
case TileType.Png:
|
||||
cacheable_headers.set("Content-Type", "image/png");
|
||||
break;
|
||||
case TileType.Jpeg:
|
||||
cacheable_headers.set("Content-Type", "image/jpeg");
|
||||
break;
|
||||
case TileType.Webp:
|
||||
cacheable_headers.set("Content-Type", "image/webp");
|
||||
break;
|
||||
}
|
||||
switch (p_header.tileType) {
|
||||
case TileType.Mvt:
|
||||
cacheable_headers.set("Content-Type", "application/x-protobuf");
|
||||
break;
|
||||
case TileType.Png:
|
||||
cacheable_headers.set("Content-Type", "image/png");
|
||||
break;
|
||||
case TileType.Jpeg:
|
||||
cacheable_headers.set("Content-Type", "image/jpeg");
|
||||
break;
|
||||
case TileType.Webp:
|
||||
cacheable_headers.set("Content-Type", "image/webp");
|
||||
break;
|
||||
}
|
||||
|
||||
if (tiledata) {
|
||||
return cacheableResponse(tiledata.data, cacheable_headers, 200);
|
||||
} else {
|
||||
return cacheableResponse(undefined, cacheable_headers, 204);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof KeyNotFoundError) {
|
||||
return cacheableResponse("Archive not found", cacheable_headers, 404);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (tiledata) {
|
||||
return cacheableResponse(tiledata.data, cacheable_headers, 200);
|
||||
} else {
|
||||
return cacheableResponse(undefined, cacheable_headers, 204);
|
||||
}
|
||||
} catch (e) {
|
||||
if (e instanceof KeyNotFoundError) {
|
||||
return cacheableResponse("Archive not found", cacheable_headers, 404);
|
||||
} else {
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: metadata responses, tileJSON
|
||||
return new Response("Invalid URL", { status: 404 });
|
||||
},
|
||||
// TODO: metadata responses, tileJSON
|
||||
return new Response("Invalid URL", { status: 404 });
|
||||
},
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user