Initial commit
This commit is contained in:
211
node_modules/maplibre-gl/src/source/canvas_source.test.ts
generated
vendored
Normal file
211
node_modules/maplibre-gl/src/source/canvas_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,211 @@
|
||||
import {describe, beforeEach, test, expect, vi, afterEach} from 'vitest';
|
||||
import {CanvasSource, type CanvasSourceSpecification} from '../source/canvas_source';
|
||||
import {Event, Evented} from '../util/evented';
|
||||
import {extend} from '../util/util';
|
||||
import {Tile} from '../tile/tile';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {MercatorTransform} from '../geo/projection/mercator_transform';
|
||||
import {waitForEvent} from '../util/test/util';
|
||||
import type {IReadonlyTransform} from '../geo/transform_interface';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {MapSourceDataEvent} from '../ui/events';
|
||||
|
||||
function createSource(options?: { canvas?: any; eventedParent?: any} & Partial<CanvasSourceSpecification>) {
|
||||
const c = options && options.canvas || window.document.createElement('canvas');
|
||||
c.width = 20;
|
||||
c.height = 20;
|
||||
|
||||
options = extend({
|
||||
canvas: 'id',
|
||||
coordinates: [[0, 0], [1, 0], [1, 1], [0, 1]],
|
||||
}, options);
|
||||
|
||||
const source = new CanvasSource('id', options as CanvasSourceSpecification, {} as Dispatcher, options.eventedParent);
|
||||
|
||||
source.canvas = c;
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
class StubMap extends Evented {
|
||||
transform: IReadonlyTransform;
|
||||
style: any;
|
||||
painter: any;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.transform = new MercatorTransform();
|
||||
this.style = {};
|
||||
this.painter = {
|
||||
context: {
|
||||
gl: {}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
triggerRepaint() {
|
||||
this.fire(new Event('rerender'));
|
||||
}
|
||||
}
|
||||
|
||||
describe('CanvasSource', () => {
|
||||
let map;
|
||||
beforeEach(() => {
|
||||
map = new StubMap();
|
||||
});
|
||||
|
||||
test('constructor', async () => {
|
||||
const source = createSource();
|
||||
|
||||
expect(source.minzoom).toBe(0);
|
||||
expect(source.maxzoom).toBe(22);
|
||||
expect(source.tileSize).toBe(512);
|
||||
expect(source.animate).toBe(true);
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.dataType === 'source' && e.sourceDataType === 'metadata');
|
||||
|
||||
source.onAdd(map);
|
||||
await promise;
|
||||
|
||||
expect(typeof source.play).toBe('function');
|
||||
});
|
||||
|
||||
describe('Validations', () => {
|
||||
const errorSpy = vi.fn();
|
||||
let eventedParent: Evented;
|
||||
beforeEach(() => {
|
||||
eventedParent = new Evented();
|
||||
eventedParent.on('error', errorSpy);
|
||||
});
|
||||
afterEach(() => {
|
||||
errorSpy.mockClear();
|
||||
});
|
||||
test('self-validates coordinates array length', () => {
|
||||
createSource({coordinates: [], eventedParent} as any);
|
||||
expect(errorSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('self-validates coordinates as string', () => {
|
||||
createSource({coordinates: 'asdf', eventedParent} as any);
|
||||
expect(errorSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('self-validates animate as number', () => {
|
||||
createSource({animate: 8, eventedParent} as any);
|
||||
expect(errorSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('self-validates canvas as empty opbject', () => {
|
||||
createSource({canvas: {}, eventedParent} as any);
|
||||
expect(errorSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('self-validates passes on valid canvas object', () => {
|
||||
const canvasEl = document.createElement('canvas');
|
||||
createSource({canvas: canvasEl, eventedParent});
|
||||
expect(errorSpy).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
test('can be initialized with HTML element', async () => {
|
||||
const el = document.createElement('canvas');
|
||||
const source = createSource({
|
||||
canvas: el
|
||||
});
|
||||
|
||||
const prmoise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.dataType === 'source' && e.sourceDataType === 'metadata');
|
||||
|
||||
source.onAdd(map);
|
||||
|
||||
await prmoise;
|
||||
expect(source.canvas).toBe(el);
|
||||
});
|
||||
|
||||
test('rerenders if animated', async () => {
|
||||
const source = createSource();
|
||||
|
||||
const promise = waitForEvent(map, 'rerender', () => true);
|
||||
|
||||
source.onAdd(map);
|
||||
|
||||
await expect(promise).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('can be static', async () => {
|
||||
const source = createSource({
|
||||
animate: false
|
||||
});
|
||||
|
||||
const spy = vi.fn();
|
||||
map.on('rerender', spy);
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.dataType === 'source' && e.sourceDataType === 'metadata');
|
||||
|
||||
source.onAdd(map);
|
||||
|
||||
await expect(promise).resolves.toBeDefined();
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('onRemove stops animation', () => {
|
||||
const source = createSource();
|
||||
|
||||
source.onAdd(map);
|
||||
|
||||
expect(source.hasTransition()).toBe(true);
|
||||
|
||||
source.onRemove();
|
||||
|
||||
expect(source.hasTransition()).toBe(false);
|
||||
|
||||
source.onAdd(map);
|
||||
|
||||
expect(source.hasTransition()).toBe(true);
|
||||
|
||||
});
|
||||
|
||||
test('play and pause animation', () => {
|
||||
const source = createSource();
|
||||
|
||||
source.onAdd(map);
|
||||
|
||||
expect(source.hasTransition()).toBe(true);
|
||||
|
||||
source.pause();
|
||||
|
||||
expect(source.hasTransition()).toBe(false);
|
||||
|
||||
source.play();
|
||||
|
||||
expect(source.hasTransition()).toBe(true);
|
||||
|
||||
});
|
||||
|
||||
test('fires idle event on prepare call when there is at least one not loaded tile', async () => {
|
||||
const source = createSource();
|
||||
const tile = new Tile(new OverscaledTileID(1, 0, 1, 0, 0), 512);
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.dataType === 'source' && e.sourceDataType === 'idle');
|
||||
source.onAdd(map);
|
||||
|
||||
source.tiles[String(tile.tileID.wrap)] = tile;
|
||||
// assign dummies directly so we don't need to stub the gl things
|
||||
source.texture = {
|
||||
update: () => {}
|
||||
} as any;
|
||||
source.prepare();
|
||||
|
||||
await promise;
|
||||
expect(tile.state).toBe('loaded');
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
test('CanvasSource.serialize', () => {
|
||||
const source = createSource();
|
||||
|
||||
const serialized = source.serialize();
|
||||
expect(serialized.type).toBe('canvas');
|
||||
expect(serialized.coordinates).toEqual([[0, 0], [1, 0], [1, 1], [0, 1]]);
|
||||
|
||||
});
|
||||
220
node_modules/maplibre-gl/src/source/canvas_source.ts
generated
vendored
Normal file
220
node_modules/maplibre-gl/src/source/canvas_source.ts
generated
vendored
Normal file
@@ -0,0 +1,220 @@
|
||||
import {ImageSource} from './image_source';
|
||||
|
||||
import {Texture} from '../render/texture';
|
||||
import {Event, ErrorEvent} from '../util/evented';
|
||||
import {ValidationError} from '@maplibre/maplibre-gl-style-spec';
|
||||
|
||||
import type {Map} from '../ui/map';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {Evented} from '../util/evented';
|
||||
|
||||
/**
|
||||
* Options to add a canvas source type to the map.
|
||||
*/
|
||||
export type CanvasSourceSpecification = {
|
||||
/**
|
||||
* Source type. Must be `"canvas"`.
|
||||
*/
|
||||
type: 'canvas';
|
||||
/**
|
||||
* Four geographical coordinates denoting where to place the corners of the canvas, specified in `[longitude, latitude]` pairs.
|
||||
*/
|
||||
coordinates: [[number, number], [number, number], [number, number], [number, number]];
|
||||
/**
|
||||
* Whether the canvas source is animated. If the canvas is static (i.e. pixels do not need to be re-read on every frame), `animate` should be set to `false` to improve performance.
|
||||
* @defaultValue true
|
||||
*/
|
||||
animate?: boolean;
|
||||
/**
|
||||
* Canvas source from which to read pixels. Can be a string representing the ID of the canvas element, or the `HTMLCanvasElement` itself.
|
||||
*/
|
||||
canvas?: string | HTMLCanvasElement;
|
||||
};
|
||||
|
||||
/**
|
||||
* A data source containing the contents of an HTML canvas. See {@link CanvasSourceSpecification} for detailed documentation of options.
|
||||
*
|
||||
* @group Sources
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // add to map
|
||||
* map.addSource('some id', {
|
||||
* type: 'canvas',
|
||||
* canvas: 'idOfMyHTMLCanvas',
|
||||
* animate: true,
|
||||
* coordinates: [
|
||||
* [-76.54, 39.18],
|
||||
* [-76.52, 39.18],
|
||||
* [-76.52, 39.17],
|
||||
* [-76.54, 39.17]
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* // update
|
||||
* let mySource = map.getSource('some id');
|
||||
* mySource.setCoordinates([
|
||||
* [-76.54335737228394, 39.18579907229748],
|
||||
* [-76.52803659439087, 39.1838364847587],
|
||||
* [-76.5295386314392, 39.17683392507606],
|
||||
* [-76.54520273208618, 39.17876344106642]
|
||||
* ]);
|
||||
*
|
||||
* map.removeSource('some id'); // remove
|
||||
* ```
|
||||
*/
|
||||
export class CanvasSource extends ImageSource {
|
||||
options: CanvasSourceSpecification;
|
||||
animate: boolean;
|
||||
canvas: HTMLCanvasElement;
|
||||
width: number;
|
||||
height: number;
|
||||
/**
|
||||
* Enables animation. The image will be copied from the canvas to the map on each frame.
|
||||
*/
|
||||
play: () => void;
|
||||
/**
|
||||
* Disables animation. The map will display a static copy of the canvas image.
|
||||
*/
|
||||
pause: () => void;
|
||||
_playing: boolean;
|
||||
|
||||
/** @internal */
|
||||
constructor(id: string, options: CanvasSourceSpecification, dispatcher: Dispatcher, eventedParent: Evented) {
|
||||
super(id, options, dispatcher, eventedParent);
|
||||
|
||||
// We build in some validation here, since canvas sources aren't included in the style spec:
|
||||
if (!options.coordinates) {
|
||||
this.fire(new ErrorEvent(new ValidationError(`sources.${id}`, null, 'missing required property "coordinates"')));
|
||||
} else if (!Array.isArray(options.coordinates) || options.coordinates.length !== 4 ||
|
||||
options.coordinates.some(c => !Array.isArray(c) || c.length !== 2 || c.some(l => typeof l !== 'number'))) {
|
||||
this.fire(new ErrorEvent(new ValidationError(`sources.${id}`, null, '"coordinates" property must be an array of 4 longitude/latitude array pairs')));
|
||||
}
|
||||
|
||||
if (options.animate && typeof options.animate !== 'boolean') {
|
||||
this.fire(new ErrorEvent(new ValidationError(`sources.${id}`, null, 'optional "animate" property must be a boolean value')));
|
||||
}
|
||||
|
||||
if (!options.canvas) {
|
||||
this.fire(new ErrorEvent(new ValidationError(`sources.${id}`, null, 'missing required property "canvas"')));
|
||||
} else if (typeof options.canvas !== 'string' && !(options.canvas instanceof HTMLCanvasElement)) {
|
||||
this.fire(new ErrorEvent(new ValidationError(`sources.${id}`, null, '"canvas" must be either a string representing the ID of the canvas element from which to read, or an HTMLCanvasElement instance')));
|
||||
}
|
||||
|
||||
this.options = options;
|
||||
this.animate = options.animate !== undefined ? options.animate : true;
|
||||
}
|
||||
|
||||
async load() {
|
||||
this._loaded = true;
|
||||
if (!this.canvas) {
|
||||
this.canvas = (this.options.canvas instanceof HTMLCanvasElement) ?
|
||||
this.options.canvas :
|
||||
document.getElementById(this.options.canvas) as HTMLCanvasElement;
|
||||
// cast to HTMLCanvasElement in else of ternary
|
||||
// should we do a safety check and throw if it's not actually HTMLCanvasElement?
|
||||
}
|
||||
this.width = this.canvas.width;
|
||||
this.height = this.canvas.height;
|
||||
|
||||
if (this._hasInvalidDimensions()) {
|
||||
this.fire(new ErrorEvent(new Error('Canvas dimensions cannot be less than or equal to zero.')));
|
||||
return;
|
||||
}
|
||||
|
||||
this.play = function() {
|
||||
this._playing = true;
|
||||
this.map.triggerRepaint();
|
||||
};
|
||||
|
||||
this.pause = function() {
|
||||
if (this._playing) {
|
||||
this.prepare();
|
||||
this._playing = false;
|
||||
}
|
||||
};
|
||||
|
||||
this._finishLoading();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the HTML `canvas` element.
|
||||
*
|
||||
* @returns The HTML `canvas` element.
|
||||
*/
|
||||
getCanvas(): HTMLCanvasElement {
|
||||
return this.canvas;
|
||||
}
|
||||
|
||||
onAdd(map: Map) {
|
||||
this.map = map;
|
||||
this.load();
|
||||
if (this.canvas) {
|
||||
if (this.animate) this.play();
|
||||
}
|
||||
}
|
||||
|
||||
onRemove() {
|
||||
this.pause();
|
||||
}
|
||||
|
||||
prepare() {
|
||||
let resize = false;
|
||||
if (this.canvas.width !== this.width) {
|
||||
this.width = this.canvas.width;
|
||||
resize = true;
|
||||
}
|
||||
if (this.canvas.height !== this.height) {
|
||||
this.height = this.canvas.height;
|
||||
resize = true;
|
||||
}
|
||||
|
||||
if (this._hasInvalidDimensions()) return;
|
||||
|
||||
if (Object.keys(this.tiles).length === 0) return; // not enough data for current position
|
||||
|
||||
const context = this.map.painter.context;
|
||||
const gl = context.gl;
|
||||
|
||||
if (!this.texture) {
|
||||
this.texture = new Texture(context, this.canvas, gl.RGBA, {premultiply: true});
|
||||
this.texture.bind(gl.LINEAR, gl.CLAMP_TO_EDGE);
|
||||
} else if (resize || this._playing) {
|
||||
this.texture.update(this.canvas, {premultiply: true});
|
||||
}
|
||||
|
||||
let newTilesLoaded = false;
|
||||
for (const w in this.tiles) {
|
||||
const tile = this.tiles[w];
|
||||
if (tile.state !== 'loaded') {
|
||||
tile.state = 'loaded';
|
||||
tile.texture = this.texture;
|
||||
newTilesLoaded = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (newTilesLoaded) {
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'idle', sourceId: this.id}));
|
||||
}
|
||||
}
|
||||
|
||||
serialize(): CanvasSourceSpecification {
|
||||
return {
|
||||
type: 'canvas',
|
||||
animate: this.animate,
|
||||
canvas: this.options.canvas,
|
||||
coordinates: this.coordinates
|
||||
};
|
||||
}
|
||||
|
||||
hasTransition() {
|
||||
return this._playing;
|
||||
}
|
||||
|
||||
_hasInvalidDimensions() {
|
||||
for (const x of [this.canvas.width, this.canvas.height]) {
|
||||
if (isNaN(x) || x <= 0) return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
1233
node_modules/maplibre-gl/src/source/geojson_source.test.ts
generated
vendored
Normal file
1233
node_modules/maplibre-gl/src/source/geojson_source.test.ts
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
647
node_modules/maplibre-gl/src/source/geojson_source.ts
generated
vendored
Normal file
647
node_modules/maplibre-gl/src/source/geojson_source.ts
generated
vendored
Normal file
@@ -0,0 +1,647 @@
|
||||
import {Event, ErrorEvent, Evented} from '../util/evented';
|
||||
import {extend, warnOnce, type ExactlyOne} from '../util/util';
|
||||
import {EXTENT} from '../data/extent';
|
||||
import {ResourceType} from '../util/request_manager';
|
||||
import {browser} from '../util/browser';
|
||||
import {applySourceDiff, mergeSourceDiffs, toUpdateable} from './geojson_source_diff';
|
||||
import {getGeoJSONBounds} from '../util/geojson_bounds';
|
||||
import {MessageType} from '../util/actor_messages';
|
||||
import {tileIdToLngLatBounds} from '../tile/tile_id_to_lng_lat_bounds';
|
||||
|
||||
import type {LngLatBounds} from '../geo/lng_lat_bounds';
|
||||
import type {Source} from './source';
|
||||
import type {Map} from '../ui/map';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {Tile} from '../tile/tile';
|
||||
import type {Actor} from '../util/actor';
|
||||
import type {GeoJSONWorkerSourceLoadDataResult} from '../util/actor_messages';
|
||||
import type {GeoJSONSourceSpecification, PromoteIdSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {GeoJSONFeatureId, GeoJSONSourceDiff} from './geojson_source_diff';
|
||||
import type {GeoJSONWorkerOptions, LoadGeoJSONParameters} from './geojson_worker_source';
|
||||
import type {WorkerTileParameters} from './worker_source';
|
||||
|
||||
/**
|
||||
* Options object for GeoJSONSource.
|
||||
*/
|
||||
export type GeoJSONSourceOptions = GeoJSONSourceSpecification & {
|
||||
workerOptions?: GeoJSONWorkerOptions;
|
||||
collectResourceTiming?: boolean;
|
||||
data: GeoJSON.GeoJSON | string;
|
||||
};
|
||||
|
||||
export type GeoJSONSourceInternalOptions = {
|
||||
data?: GeoJSON.GeoJSON | string | undefined;
|
||||
cluster?: boolean;
|
||||
clusterMaxZoom?: number;
|
||||
clusterRadius?: number;
|
||||
clusterMinPoints?: number;
|
||||
generateId?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
export type GeoJSONSourceShouldReloadTileOptions = {
|
||||
/**
|
||||
* Refresh all tiles that WILL contain these bounds.
|
||||
*/
|
||||
affectedBounds: LngLatBounds[];
|
||||
};
|
||||
|
||||
/**
|
||||
* The cluster options to set
|
||||
*/
|
||||
export type SetClusterOptions = {
|
||||
/**
|
||||
* Whether or not to cluster
|
||||
*/
|
||||
cluster?: boolean;
|
||||
/**
|
||||
* The cluster's max zoom.
|
||||
* Non-integer values are rounded to the closest integer due to supercluster integer value requirements.
|
||||
*/
|
||||
clusterMaxZoom?: number;
|
||||
/**
|
||||
* The cluster's radius
|
||||
*/
|
||||
clusterRadius?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* A source containing GeoJSON.
|
||||
* (See the [Style Specification](https://maplibre.org/maplibre-style-spec/#sources-geojson) for detailed documentation of options.)
|
||||
*
|
||||
* @group Sources
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.addSource('some id', {
|
||||
* type: 'geojson',
|
||||
* data: 'https://d2ad6b4ur7yvpq.cloudfront.net/naturalearth-3.3.0/ne_10m_ports.geojson'
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.addSource('some id', {
|
||||
* type: 'geojson',
|
||||
* data: {
|
||||
* "type": "FeatureCollection",
|
||||
* "features": [{
|
||||
* "type": "Feature",
|
||||
* "properties": {},
|
||||
* "geometry": {
|
||||
* "type": "Point",
|
||||
* "coordinates": [
|
||||
* -76.53063297271729,
|
||||
* 39.18174077994108
|
||||
* ]
|
||||
* }
|
||||
* }]
|
||||
* }
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.getSource('some id').setData({
|
||||
* "type": "FeatureCollection",
|
||||
* "features": [{
|
||||
* "type": "Feature",
|
||||
* "properties": { "name": "Null Island" },
|
||||
* "geometry": {
|
||||
* "type": "Point",
|
||||
* "coordinates": [ 0, 0 ]
|
||||
* }
|
||||
* }]
|
||||
* });
|
||||
* ```
|
||||
* @see [Draw GeoJSON points](https://maplibre.org/maplibre-gl-js/docs/examples/draw-geojson-points/)
|
||||
* @see [Add a GeoJSON line](https://maplibre.org/maplibre-gl-js/docs/examples/add-a-geojson-line/)
|
||||
* @see [Create a heatmap from points](https://maplibre.org/maplibre-gl-js/docs/examples/create-a-heatmap-layer/)
|
||||
* @see [Create and style clusters](https://maplibre.org/maplibre-gl-js/docs/examples/create-and-style-clusters/)
|
||||
*/
|
||||
export class GeoJSONSource extends Evented implements Source {
|
||||
type: 'geojson';
|
||||
id: string;
|
||||
minzoom: number;
|
||||
maxzoom: number;
|
||||
tileSize: number;
|
||||
attribution: string;
|
||||
promoteId: PromoteIdSpecification;
|
||||
|
||||
isTileClipped: boolean;
|
||||
reparseOverscaled: boolean;
|
||||
_data: ExactlyOne<{
|
||||
url: string;
|
||||
geojson: GeoJSON.GeoJSON;
|
||||
updateable: globalThis.Map<GeoJSONFeatureId, GeoJSON.Feature>;
|
||||
}>;
|
||||
_options: GeoJSONSourceInternalOptions;
|
||||
workerOptions: GeoJSONWorkerOptions;
|
||||
map: Map;
|
||||
actor: Actor;
|
||||
_isUpdatingWorker: boolean;
|
||||
_pendingWorkerUpdate: {
|
||||
data?: GeoJSON.GeoJSON | string;
|
||||
diff?: GeoJSONSourceDiff;
|
||||
updateCluster?: boolean;
|
||||
};
|
||||
_collectResourceTiming: boolean;
|
||||
_removed: boolean;
|
||||
|
||||
/** @internal */
|
||||
constructor(id: string, options: GeoJSONSourceOptions, dispatcher: Dispatcher, eventedParent: Evented) {
|
||||
super();
|
||||
|
||||
this.id = id;
|
||||
|
||||
// `type` is a property rather than a constant to make it easy for 3rd
|
||||
// parties to use GeoJSONSource to build their own source types.
|
||||
this.type = 'geojson';
|
||||
|
||||
this.minzoom = 0;
|
||||
this.maxzoom = 18;
|
||||
this.tileSize = 512;
|
||||
this.isTileClipped = true;
|
||||
this.reparseOverscaled = true;
|
||||
this._removed = false;
|
||||
this._isUpdatingWorker = false;
|
||||
this._pendingWorkerUpdate = {data: options.data};
|
||||
|
||||
this.actor = dispatcher.getActor();
|
||||
this.setEventedParent(eventedParent);
|
||||
|
||||
this._data = typeof options.data === 'string' ? {url: options.data} : {geojson: options.data};
|
||||
this._options = extend({}, options);
|
||||
|
||||
this._collectResourceTiming = options.collectResourceTiming;
|
||||
|
||||
if (options.maxzoom !== undefined) this.maxzoom = options.maxzoom;
|
||||
if (options.type) this.type = options.type;
|
||||
if (options.attribution) this.attribution = options.attribution;
|
||||
this.promoteId = options.promoteId;
|
||||
|
||||
if (options.clusterMaxZoom !== undefined && this.maxzoom <= options.clusterMaxZoom) {
|
||||
warnOnce(`The maxzoom value "${this.maxzoom}" is expected to be greater than the clusterMaxZoom value "${options.clusterMaxZoom}".`);
|
||||
}
|
||||
|
||||
// sent to the worker, along with `url: ...` or `data: literal geojson`,
|
||||
// so that it can load/parse/index the geojson data
|
||||
// extending with `options.workerOptions` helps to make it easy for
|
||||
// third-party sources to hack/reuse GeoJSONSource.
|
||||
this.workerOptions = extend({
|
||||
source: this.id,
|
||||
geojsonVtOptions: {
|
||||
buffer: this._pixelsToTileUnits(options.buffer !== undefined ? options.buffer : 128),
|
||||
tolerance: this._pixelsToTileUnits(options.tolerance !== undefined ? options.tolerance : 0.375),
|
||||
extent: EXTENT,
|
||||
maxZoom: this.maxzoom,
|
||||
lineMetrics: options.lineMetrics || false,
|
||||
generateId: options.generateId || false,
|
||||
promoteId: typeof options.promoteId === 'string' ? options.promoteId : undefined,
|
||||
cluster: options.cluster || false,
|
||||
clusterOptions: {
|
||||
maxZoom: this._getClusterMaxZoom(options.clusterMaxZoom),
|
||||
minPoints: Math.max(2, options.clusterMinPoints || 2),
|
||||
extent: EXTENT,
|
||||
radius: this._pixelsToTileUnits(options.clusterRadius || 50),
|
||||
log: false,
|
||||
generateId: options.generateId || false
|
||||
},
|
||||
},
|
||||
clusterProperties: options.clusterProperties,
|
||||
filter: options.filter
|
||||
}, options.workerOptions);
|
||||
}
|
||||
|
||||
private _hasPendingWorkerUpdate(): boolean {
|
||||
return this._pendingWorkerUpdate.data !== undefined || this._pendingWorkerUpdate.diff !== undefined || this._pendingWorkerUpdate.updateCluster;
|
||||
}
|
||||
|
||||
private _pixelsToTileUnits(pixelValue: number): number {
|
||||
return pixelValue * (EXTENT / this.tileSize);
|
||||
}
|
||||
|
||||
private _getClusterMaxZoom(clusterMaxZoom: number): number {
|
||||
const effectiveClusterMaxZoom = clusterMaxZoom ? Math.round(clusterMaxZoom) : this.maxzoom - 1;
|
||||
if (!(Number.isInteger(clusterMaxZoom) || clusterMaxZoom === undefined)) {
|
||||
warnOnce(`Integer expected for option 'clusterMaxZoom': provided value "${clusterMaxZoom}" rounded to "${effectiveClusterMaxZoom}"`);
|
||||
}
|
||||
return effectiveClusterMaxZoom;
|
||||
}
|
||||
|
||||
async load() {
|
||||
await this._updateWorkerData();
|
||||
}
|
||||
|
||||
onAdd(map: Map) {
|
||||
this.map = map;
|
||||
this.load();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the GeoJSON data and re-renders the map.
|
||||
*
|
||||
* @param data - A GeoJSON data object or a URL to one. The latter is preferable in the case of large GeoJSON files.
|
||||
* @param waitForCompletion - If true, the method will return a promise that resolves when set data is complete.
|
||||
*/
|
||||
setData(data: GeoJSON.GeoJSON | string, waitForCompletion: true): Promise<void>;
|
||||
setData(data: GeoJSON.GeoJSON | string, waitForCompletion?: false): this;
|
||||
setData(data: GeoJSON.GeoJSON | string, waitForCompletion?: boolean): this | Promise<void> {
|
||||
this._data = typeof data === 'string' ? {url: data} : {geojson: data};
|
||||
this._pendingWorkerUpdate = {data};
|
||||
const updatePromise = this._updateWorkerData();
|
||||
if (waitForCompletion) return updatePromise;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the source's GeoJSON, and re-renders the map.
|
||||
*
|
||||
* For sources with lots of features, this method can be used to make updates more quickly.
|
||||
*
|
||||
* This approach requires unique IDs for every feature in the source. The IDs can either be specified on the feature,
|
||||
* or by using the promoteId option to specify which property should be used as the ID.
|
||||
*
|
||||
* It is an error to call updateData on a source that did not have unique IDs for each of its features already.
|
||||
*
|
||||
* Updates are applied on a best-effort basis, updating an ID that does not exist will not result in an error.
|
||||
*
|
||||
* @param diff - The changes that need to be applied.
|
||||
* @param waitForCompletion - If true, the method will return a promise that resolves when the update is complete.
|
||||
*/
|
||||
updateData(diff: GeoJSONSourceDiff, waitForCompletion: true): Promise<void>;
|
||||
updateData(diff: GeoJSONSourceDiff, waitForCompletion?: false): this;
|
||||
updateData(diff: GeoJSONSourceDiff, waitForCompletion?: boolean): this | Promise<void> {
|
||||
this._pendingWorkerUpdate.diff = mergeSourceDiffs(this._pendingWorkerUpdate.diff, diff);
|
||||
const updatePromise = this._updateWorkerData();
|
||||
if (waitForCompletion) return updatePromise;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to get the source's actual GeoJSON data.
|
||||
*
|
||||
* @returns a promise which resolves to the source's actual GeoJSON data
|
||||
*/
|
||||
async getData(): Promise<GeoJSON.GeoJSON> {
|
||||
if (this._data.url) {
|
||||
await this.once('data'); // wait for loading to complete
|
||||
}
|
||||
if (this._data.geojson) {
|
||||
return this._data.geojson;
|
||||
}
|
||||
return {
|
||||
type: 'FeatureCollection',
|
||||
features: Array.from(this._data.updateable.values())
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows getting the source's boundaries.
|
||||
* If there's a problem with the source's data, it will return an empty {@link LngLatBounds}.
|
||||
* @returns a promise which resolves to the source's boundaries
|
||||
*/
|
||||
async getBounds(): Promise<LngLatBounds> {
|
||||
return getGeoJSONBounds(await this.getData());
|
||||
}
|
||||
|
||||
/**
|
||||
* To disable/enable clustering on the source options
|
||||
* @param options - The options to set
|
||||
* @example
|
||||
* ```ts
|
||||
* map.getSource('some id').setClusterOptions({cluster: false});
|
||||
* map.getSource('some id').setClusterOptions({cluster: false, clusterRadius: 50, clusterMaxZoom: 14});
|
||||
* ```
|
||||
*/
|
||||
setClusterOptions(options: SetClusterOptions): this {
|
||||
this.workerOptions.geojsonVtOptions.cluster = options.cluster;
|
||||
if (options.clusterRadius !== undefined) {
|
||||
this.workerOptions.geojsonVtOptions.clusterOptions.radius = this._pixelsToTileUnits(options.clusterRadius);
|
||||
}
|
||||
if (options.clusterMaxZoom !== undefined) {
|
||||
this.workerOptions.geojsonVtOptions.clusterOptions.maxZoom = this._getClusterMaxZoom(options.clusterMaxZoom);
|
||||
}
|
||||
this._pendingWorkerUpdate.updateCluster = true;
|
||||
this._updateWorkerData();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* For clustered sources, fetches the zoom at which the given cluster expands.
|
||||
*
|
||||
* @param clusterId - The value of the cluster's `cluster_id` property.
|
||||
* @returns a promise that is resolved with the zoom number
|
||||
*/
|
||||
getClusterExpansionZoom(clusterId: number): Promise<number> {
|
||||
return this.actor.sendAsync({type: MessageType.getClusterExpansionZoom, data: {type: this.type, clusterId, source: this.id}});
|
||||
}
|
||||
|
||||
/**
|
||||
* For clustered sources, fetches the children of the given cluster on the next zoom level (as an array of GeoJSON features).
|
||||
*
|
||||
* @param clusterId - The value of the cluster's `cluster_id` property.
|
||||
* @returns a promise that is resolved when the features are retrieved
|
||||
*/
|
||||
getClusterChildren(clusterId: number): Promise<Array<GeoJSON.Feature>> {
|
||||
return this.actor.sendAsync({type: MessageType.getClusterChildren, data: {type: this.type, clusterId, source: this.id}});
|
||||
}
|
||||
|
||||
/**
|
||||
* For clustered sources, fetches the original points that belong to the cluster (as an array of GeoJSON features).
|
||||
*
|
||||
* @param clusterId - The value of the cluster's `cluster_id` property.
|
||||
* @param limit - The maximum number of features to return.
|
||||
* @param offset - The number of features to skip (e.g. for pagination).
|
||||
* @returns a promise that is resolved when the features are retrieved
|
||||
* @example
|
||||
* Retrieve cluster leaves on click
|
||||
* ```ts
|
||||
* map.on('click', 'clusters', (e) => {
|
||||
* let features = map.queryRenderedFeatures(e.point, {
|
||||
* layers: ['clusters']
|
||||
* });
|
||||
*
|
||||
* let clusterId = features[0].properties.cluster_id;
|
||||
* let pointCount = features[0].properties.point_count;
|
||||
* let clusterSource = map.getSource('clusters');
|
||||
*
|
||||
* const features = await clusterSource.getClusterLeaves(clusterId, pointCount);
|
||||
* // Print cluster leaves in the console
|
||||
* console.log('Cluster leaves:', features);
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
getClusterLeaves(clusterId: number, limit: number, offset: number): Promise<Array<GeoJSON.Feature>> {
|
||||
return this.actor.sendAsync({type: MessageType.getClusterLeaves, data: {
|
||||
type: this.type,
|
||||
source: this.id,
|
||||
clusterId,
|
||||
limit,
|
||||
offset
|
||||
}});
|
||||
}
|
||||
|
||||
/**
|
||||
* Responsible for invoking WorkerSource's geojson.loadData target, which
|
||||
* handles loading the geojson data and preparing to serve it up as tiles,
|
||||
* using geojson-vt or supercluster as appropriate.
|
||||
*/
|
||||
async _updateWorkerData(): Promise<void> {
|
||||
if (this._isUpdatingWorker) return;
|
||||
|
||||
if (!this._hasPendingWorkerUpdate()) {
|
||||
warnOnce(`No pending worker updates for GeoJSONSource ${this.id}.`);
|
||||
return;
|
||||
}
|
||||
|
||||
const {data, diff, updateCluster} = this._pendingWorkerUpdate;
|
||||
// delay awaiting params until _isUpdatingWorker is set, otherwise, a race condition could happen
|
||||
const params = this._getLoadGeoJSONParameters(data, diff, updateCluster);
|
||||
|
||||
if (data !== undefined) {
|
||||
this._pendingWorkerUpdate.data = undefined;
|
||||
} else if (diff) {
|
||||
this._pendingWorkerUpdate.diff = undefined;
|
||||
} else if (updateCluster) {
|
||||
this._pendingWorkerUpdate.updateCluster = undefined;
|
||||
}
|
||||
|
||||
await this._dispatchWorkerUpdate(params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the parameters object that will be sent to the worker and used to load GeoJSON.
|
||||
*/
|
||||
private async _getLoadGeoJSONParameters(data: string | GeoJSON.GeoJSON<GeoJSON.Geometry>, diff: GeoJSONSourceDiff, updateCluster: boolean): Promise<LoadGeoJSONParameters | undefined> {
|
||||
const params: LoadGeoJSONParameters = extend({type: this.type}, this.workerOptions);
|
||||
|
||||
// Data comes from a remote url
|
||||
if (typeof data === 'string') {
|
||||
params.request = await this.map._requestManager.transformRequest(browser.resolveURL(data as string), ResourceType.Source);
|
||||
params.request.collectResourceTiming = this._collectResourceTiming;
|
||||
return params;
|
||||
}
|
||||
|
||||
// Data is a geojson object
|
||||
if (data !== undefined) {
|
||||
params.data = data;
|
||||
return params;
|
||||
}
|
||||
|
||||
// Data is a differential update
|
||||
if (diff) {
|
||||
params.dataDiff = diff;
|
||||
return params;
|
||||
}
|
||||
|
||||
// Update supercluster with the latest worker cluster options
|
||||
if (updateCluster) {
|
||||
params.updateCluster = true;
|
||||
return params;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send the worker update data from the main thread to the worker
|
||||
*/
|
||||
private async _dispatchWorkerUpdate(optionsPromise: Promise<LoadGeoJSONParameters>) {
|
||||
this._isUpdatingWorker = true;
|
||||
this.fire(new Event('dataloading', {dataType: 'source'}));
|
||||
|
||||
try {
|
||||
const options = await optionsPromise;
|
||||
const result = await this.actor.sendAsync({type: MessageType.loadData, data: options});
|
||||
this._isUpdatingWorker = false;
|
||||
|
||||
if (this._removed || result.abandoned) {
|
||||
this.fire(new Event('dataabort', {dataType: 'source'}));
|
||||
return;
|
||||
}
|
||||
|
||||
// Update the copy of the data in this source with the worker result. (only sent for url based geojson data)
|
||||
if (result.data) {
|
||||
this._data = {geojson: result.data};
|
||||
}
|
||||
|
||||
const affectedGeometries = this._applyDiffToSource(options.dataDiff);
|
||||
const shouldReloadTileOptions = this._getShouldReloadTileOptions(affectedGeometries);
|
||||
|
||||
const eventData = {dataType: 'source'};
|
||||
this._applyResourceTiming(eventData, result);
|
||||
|
||||
// Fire the metadata event to let the TileManager know it's ok to start requesting tiles.
|
||||
this.fire(new Event('data', {...eventData, sourceDataType: 'metadata'}));
|
||||
this.fire(new Event('data', {...eventData, sourceDataType: 'content', shouldReloadTileOptions}));
|
||||
} catch (err) {
|
||||
this._isUpdatingWorker = false;
|
||||
|
||||
if (this._removed) {
|
||||
this.fire(new Event('dataabort', {dataType: 'source'}));
|
||||
return;
|
||||
}
|
||||
|
||||
this.fire(new ErrorEvent(err));
|
||||
} finally {
|
||||
// If there is more pending data, update the worker again.
|
||||
if (this._hasPendingWorkerUpdate()) {
|
||||
this._updateWorkerData();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply resource timing data to the event object.
|
||||
*/
|
||||
private _applyResourceTiming(eventData: {dataType: string}, result: GeoJSONWorkerSourceLoadDataResult) {
|
||||
if (!this._collectResourceTiming) return;
|
||||
|
||||
const timingData = result.resourceTiming?.[this.id];
|
||||
if (!timingData) return;
|
||||
|
||||
const resourceTiming = timingData.slice(0);
|
||||
if (!resourceTiming?.length) return;
|
||||
|
||||
extend(eventData, {resourceTiming});
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a diff to this source's data and return the affected feature geometries.
|
||||
* @param diff - The {@link GeoJSONSourceDiff} to apply.
|
||||
* @returns The affected geometries, or undefined if the diff is not applicable or all geometries are affected.
|
||||
*/
|
||||
private _applyDiffToSource(diff: GeoJSONSourceDiff): GeoJSON.Geometry[] | undefined {
|
||||
if (!diff) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const promoteId = typeof this.promoteId === 'string' ? this.promoteId : undefined;
|
||||
|
||||
// Lazily convert `this._data` to updateable if it's not already
|
||||
if (!this._data.url && !this._data.updateable) {
|
||||
const updateable = toUpdateable(this._data.geojson, promoteId);
|
||||
if (!updateable) throw new Error(`GeoJSONSource "${this.id}": GeoJSON data is not compatible with updateData`);
|
||||
this._data = {updateable};
|
||||
}
|
||||
|
||||
if (!this._data.updateable) {
|
||||
return undefined;
|
||||
}
|
||||
const affectedGeometries = applySourceDiff(this._data.updateable, diff, promoteId);
|
||||
|
||||
if (diff.removeAll || this._options.cluster) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
return affectedGeometries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get options for use in determining whether to reload a tile based on the modified features.
|
||||
* @param affectedGeometries - The feature geometries affected by the update.
|
||||
* @returns A {@link GeoJSONSourceShouldReloadTileOptions} object which contains an array of affected bounds caused by the update.
|
||||
*/
|
||||
private _getShouldReloadTileOptions(affectedGeometries: GeoJSON.Geometry[]): GeoJSONSourceShouldReloadTileOptions | undefined {
|
||||
if (!affectedGeometries) return undefined;
|
||||
|
||||
const affectedBounds = affectedGeometries
|
||||
.filter(Boolean)
|
||||
.map(g => getGeoJSONBounds(g));
|
||||
|
||||
return {affectedBounds};
|
||||
}
|
||||
|
||||
/**
|
||||
* Determine whether a tile should be reloaded based on a set of options associated with a {@link MapSourceDataChangedEvent}.
|
||||
* @internal
|
||||
*/
|
||||
shouldReloadTile(tile: Tile, {affectedBounds}: GeoJSONSourceShouldReloadTileOptions) : boolean {
|
||||
if (tile.state === 'loading') {
|
||||
return true;
|
||||
}
|
||||
if (tile.state === 'unloaded') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Update the tile if contained or will contain an updated feature.
|
||||
const {buffer, extent} = this.workerOptions.geojsonVtOptions;
|
||||
const tileBounds = tileIdToLngLatBounds(
|
||||
tile.tileID.canonical,
|
||||
buffer / extent
|
||||
);
|
||||
for (const bounds of affectedBounds) {
|
||||
if (tileBounds.intersects(bounds)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
loaded(): boolean {
|
||||
return !this._isUpdatingWorker && !this._hasPendingWorkerUpdate();
|
||||
}
|
||||
|
||||
async loadTile(tile: Tile): Promise<void> {
|
||||
const message = !tile.actor ? MessageType.loadTile : MessageType.reloadTile;
|
||||
tile.actor = this.actor;
|
||||
const params: WorkerTileParameters = {
|
||||
type: this.type,
|
||||
uid: tile.uid,
|
||||
tileID: tile.tileID,
|
||||
zoom: tile.tileID.overscaledZ,
|
||||
maxZoom: this.maxzoom,
|
||||
tileSize: this.tileSize,
|
||||
source: this.id,
|
||||
pixelRatio: this.map.getPixelRatio(),
|
||||
showCollisionBoxes: this.map.showCollisionBoxes,
|
||||
promoteId: this.promoteId,
|
||||
subdivisionGranularity: this.map.style.projection.subdivisionGranularity
|
||||
};
|
||||
|
||||
tile.abortController = new AbortController();
|
||||
const data = await this.actor.sendAsync({type: message, data: params}, tile.abortController);
|
||||
delete tile.abortController;
|
||||
tile.unloadVectorData();
|
||||
|
||||
if (!tile.aborted) {
|
||||
tile.loadVectorData(data, this.map.painter, message === MessageType.reloadTile);
|
||||
}
|
||||
}
|
||||
|
||||
async abortTile(tile: Tile) {
|
||||
if (tile.abortController) {
|
||||
tile.abortController.abort();
|
||||
delete tile.abortController;
|
||||
}
|
||||
tile.aborted = true;
|
||||
}
|
||||
|
||||
async unloadTile(tile: Tile) {
|
||||
tile.unloadVectorData();
|
||||
await this.actor.sendAsync({type: MessageType.removeTile, data: {uid: tile.uid, type: this.type, source: this.id}});
|
||||
}
|
||||
|
||||
onRemove() {
|
||||
this._removed = true;
|
||||
this.actor.sendAsync({type: MessageType.removeSource, data: {type: this.type, source: this.id}});
|
||||
}
|
||||
|
||||
serialize(): GeoJSONSourceSpecification {
|
||||
return extend({}, this._options, {
|
||||
type: this.type,
|
||||
data: this._data.updateable ?
|
||||
{
|
||||
type: 'FeatureCollection',
|
||||
features: Array.from(this._data.updateable.values())
|
||||
} :
|
||||
this._data.url || this._data.geojson
|
||||
});
|
||||
}
|
||||
|
||||
hasTransition() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
717
node_modules/maplibre-gl/src/source/geojson_source_diff.test.ts
generated
vendored
Normal file
717
node_modules/maplibre-gl/src/source/geojson_source_diff.test.ts
generated
vendored
Normal file
@@ -0,0 +1,717 @@
|
||||
import {describe, beforeEach, test, expect} from 'vitest';
|
||||
import {setPerformance} from '../util/test/util';
|
||||
import {type GeoJSONFeatureId, type GeoJSONSourceDiff, toUpdateable, applySourceDiff, mergeSourceDiffs} from './geojson_source_diff';
|
||||
|
||||
beforeEach(() => {
|
||||
setPerformance();
|
||||
});
|
||||
|
||||
describe('toUpdateable', () => {
|
||||
test('feature without id is not updateable', () => {
|
||||
// no feature id -> false
|
||||
expect(toUpdateable({
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
})).toBeUndefined();
|
||||
});
|
||||
|
||||
test('feature with id is updateable', () => {
|
||||
// has a feature id -> true
|
||||
expect(toUpdateable({
|
||||
type: 'Feature',
|
||||
id: 'feature_id',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
})).toBeDefined();
|
||||
});
|
||||
|
||||
test('promoteId missing is not updateable', () => {
|
||||
expect(toUpdateable({
|
||||
type: 'Feature',
|
||||
id: 'feature_id',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
}, 'propId')).toBeUndefined();
|
||||
});
|
||||
|
||||
test('promoteId present is updateable', () => {
|
||||
expect(toUpdateable({
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {
|
||||
propId: 'feature_id',
|
||||
},
|
||||
}, 'propId')).toBeDefined();
|
||||
});
|
||||
|
||||
test('feature collection with unique ids is updateable', () => {
|
||||
expect(toUpdateable({
|
||||
type: 'FeatureCollection',
|
||||
features: [{
|
||||
type: 'Feature',
|
||||
id: 'feature_id',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
}, {
|
||||
type: 'Feature',
|
||||
id: 'feature_id_2',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
}]
|
||||
})).toBeDefined();
|
||||
});
|
||||
|
||||
test('feature collection with unique promoteIds is updateable', () => {
|
||||
expect(toUpdateable({
|
||||
type: 'FeatureCollection',
|
||||
features: [{
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {
|
||||
propId: 'feature_id',
|
||||
},
|
||||
}, {
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {
|
||||
propId: 'feature_id_2',
|
||||
},
|
||||
}]
|
||||
}, 'propId')).toBeDefined();
|
||||
});
|
||||
|
||||
test('feature collection without unique ids is not updateable', () => {
|
||||
expect(toUpdateable({
|
||||
type: 'FeatureCollection',
|
||||
features: [{
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
}]
|
||||
})).toBeUndefined();
|
||||
});
|
||||
|
||||
test('feature collection with duplicate feature ids is not updateable', () => {
|
||||
expect(toUpdateable({
|
||||
type: 'FeatureCollection',
|
||||
features: [{
|
||||
type: 'Feature',
|
||||
id: 'feature_id',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
}, {
|
||||
type: 'Feature',
|
||||
id: 'feature_id',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
}]
|
||||
})).toBeUndefined();
|
||||
});
|
||||
|
||||
test('geometries are not updateable', () => {
|
||||
expect(toUpdateable({type: 'Point', coordinates: [0, 0]})).toBeUndefined();
|
||||
});
|
||||
|
||||
test('works with a single feature - feature id', () => {
|
||||
const updateable = toUpdateable({
|
||||
type: 'Feature',
|
||||
id: 'point',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
}, properties: {}});
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(updateable.has('point')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('works with a single feature - promoteId', () => {
|
||||
const updateable2 = toUpdateable({
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
}, properties: {
|
||||
promoteId: 'point',
|
||||
}}, 'promoteId');
|
||||
expect(updateable2.size).toBe(1);
|
||||
expect(updateable2.has('point')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('works with a FeatureCollection - feature id', () => {
|
||||
const updateable = toUpdateable({
|
||||
type: 'FeatureCollection',
|
||||
features: [
|
||||
{
|
||||
type: 'Feature',
|
||||
id: 'point',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
}, properties: {}},
|
||||
{
|
||||
type: 'Feature',
|
||||
id: 'point2',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
}, properties: {}}
|
||||
]
|
||||
});
|
||||
expect(updateable.size).toBe(2);
|
||||
expect(updateable.has('point')).toBeTruthy();
|
||||
expect(updateable.has('point2')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('works with a FeatureCollection - promoteId', () => {
|
||||
const updateable2 = toUpdateable({
|
||||
type: 'FeatureCollection',
|
||||
features: [
|
||||
{
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
}, properties: {
|
||||
promoteId: 'point'
|
||||
}},
|
||||
{
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
}, properties: {
|
||||
promoteId: 'point2'
|
||||
}}
|
||||
]
|
||||
}, 'promoteId');
|
||||
expect(updateable2.size).toBe(2);
|
||||
expect(updateable2.has('point')).toBeTruthy();
|
||||
expect(updateable2.has('point2')).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('applySourceDiff', () => {
|
||||
const point: GeoJSON.Feature = {
|
||||
type: 'Feature',
|
||||
id: 'point',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {},
|
||||
};
|
||||
|
||||
const point2: GeoJSON.Feature = {
|
||||
type: 'Feature',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [1, 1],
|
||||
},
|
||||
properties: {
|
||||
promoteId: 'point2'
|
||||
},
|
||||
};
|
||||
|
||||
// freeze our input data to guarantee that applySourceDiff works immutably
|
||||
Object.freeze(point);
|
||||
Object.freeze(point.geometry);
|
||||
Object.freeze((point.geometry as GeoJSON.Point).coordinates);
|
||||
Object.freeze(point.properties);
|
||||
Object.freeze(point2);
|
||||
Object.freeze(point2.geometry);
|
||||
Object.freeze((point2.geometry as GeoJSON.Point).coordinates);
|
||||
Object.freeze(point2.properties);
|
||||
|
||||
test('adds a feature using the feature id', () => {
|
||||
const updateable = new Map<GeoJSONFeatureId, GeoJSON.Feature>();
|
||||
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
add: [point]
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(updateable.has('point')).toBeTruthy();
|
||||
expect(affectedGeometries).toStrictEqual([point.geometry]);
|
||||
});
|
||||
|
||||
test('adds a feature using the promoteId', () => {
|
||||
const updateable = new Map<GeoJSONFeatureId, GeoJSON.Feature>();
|
||||
applySourceDiff(updateable, {
|
||||
add: [point2]
|
||||
}, 'promoteId');
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(updateable.has('point2')).toBeTruthy();
|
||||
});
|
||||
|
||||
test('adds a feature that exist and change its geometry', () => {
|
||||
const updateable = new Map([[point.id, point]]);
|
||||
const updatedPoint1: GeoJSON.Feature = {
|
||||
type: 'Feature',
|
||||
id: point.id,
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [1, 1],
|
||||
},
|
||||
properties: {}
|
||||
};
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
add: [updatedPoint1]
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(updateable.has(point.id)).toBeTruthy();
|
||||
expect(updateable.get(point.id).geometry).toStrictEqual(updatedPoint1.geometry);
|
||||
expect(affectedGeometries).toStrictEqual([point.geometry, updatedPoint1.geometry]);
|
||||
});
|
||||
|
||||
test('removes a feature by its id', () => {
|
||||
const updateable = new Map([[point.id, point], ['point2', point2]]);
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
remove: ['point2'],
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(updateable.has('point2')).toBeFalsy();
|
||||
expect(affectedGeometries).toStrictEqual([point2.geometry]);
|
||||
});
|
||||
|
||||
test('removes a feature by its id and dont return undefined geometries', () => {
|
||||
const updateable = new Map([[point.id, point], ['point2', point2]]);
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
remove: ['point2', 'point3'],
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(updateable.has('point2')).toBeFalsy();
|
||||
expect(affectedGeometries).toStrictEqual([point2.geometry]);
|
||||
});
|
||||
|
||||
test('updates a feature geometry', () => {
|
||||
const updateable = new Map([[point.id, point]]);
|
||||
const newGeometry: GeoJSON.Point = {
|
||||
type: 'Point',
|
||||
coordinates: [1, 0]
|
||||
};
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
update: [{
|
||||
id: point.id,
|
||||
newGeometry: newGeometry,
|
||||
}]
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
expect((updateable.get(point.id)?.geometry as GeoJSON.Point).coordinates[0]).toBe(1);
|
||||
expect(affectedGeometries).toStrictEqual([point.geometry, newGeometry]);
|
||||
});
|
||||
|
||||
test('adds properties', () => {
|
||||
const updateable = new Map([[point.id, point]]);
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
update: [{
|
||||
id: point.id,
|
||||
addOrUpdateProperties: [
|
||||
{key: 'prop', value: 'value'},
|
||||
{key: 'prop2', value: 'value2'}
|
||||
]
|
||||
}]
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
const properties = updateable.get(point.id)?.properties!;
|
||||
expect(Object.keys(properties)).toHaveLength(2);
|
||||
expect(properties.prop).toBe('value');
|
||||
expect(properties.prop2).toBe('value2');
|
||||
expect(affectedGeometries).toStrictEqual([point.geometry]);
|
||||
});
|
||||
|
||||
test('updates properties', () => {
|
||||
const updateable = new Map([[point.id, {...point, properties: {prop: 'value', prop2: 'value2'}}]]);
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
update: [{
|
||||
id: point.id,
|
||||
addOrUpdateProperties: [
|
||||
{key: 'prop2', value: 'value3'}
|
||||
]
|
||||
}]
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
const properties2 = updateable.get(point.id)?.properties!;
|
||||
expect(Object.keys(properties2)).toHaveLength(2);
|
||||
expect(properties2.prop).toBe('value');
|
||||
expect(properties2.prop2).toBe('value3');
|
||||
expect(affectedGeometries).toStrictEqual([point.geometry]);
|
||||
});
|
||||
|
||||
test('removes properties', () => {
|
||||
const updateable = new Map([[point.id, {...point, properties: {prop: 'value', prop2: 'value2'}}]]);
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
update: [{
|
||||
id: point.id,
|
||||
removeProperties: ['prop2']
|
||||
}]
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
const properties3 = updateable.get(point.id)?.properties!;
|
||||
expect(Object.keys(properties3)).toHaveLength(1);
|
||||
expect(properties3.prop).toBe('value');
|
||||
expect(affectedGeometries).toStrictEqual([point.geometry]);
|
||||
});
|
||||
|
||||
test('removes all properties', () => {
|
||||
const updateable = new Map([[point.id, {...point, properties: {prop: 'value', prop2: 'value2'}}]]);
|
||||
const affectedGeometries = applySourceDiff(updateable, {
|
||||
update: [{
|
||||
id: point.id,
|
||||
removeAllProperties: true,
|
||||
}]
|
||||
});
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(Object.keys(updateable.get(point.id)?.properties!)).toHaveLength(0);
|
||||
expect(affectedGeometries).toStrictEqual([point.geometry]);
|
||||
});
|
||||
|
||||
test('adds a feature with properties, removes the feature, then adds it back with different geometry and properties', () => {
|
||||
const updateable = new Map<GeoJSONFeatureId, GeoJSON.Feature>();
|
||||
|
||||
const add1: GeoJSON.Feature = {
|
||||
type: 'Feature',
|
||||
id: 'feature1',
|
||||
geometry: {
|
||||
type: 'LineString',
|
||||
coordinates: [[0, 0], [1, 1]]
|
||||
},
|
||||
properties: {test1: 'test1'}
|
||||
};
|
||||
const add2: GeoJSON.Feature = {
|
||||
type: 'Feature',
|
||||
id: 'feature1',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [1, 1]
|
||||
},
|
||||
properties: {test2: 'test2'}
|
||||
};
|
||||
|
||||
applySourceDiff(updateable, {
|
||||
add: [add1]
|
||||
});
|
||||
applySourceDiff(updateable, {
|
||||
remove: ['feature1']
|
||||
});
|
||||
applySourceDiff(updateable, {
|
||||
add: [add2]
|
||||
});
|
||||
|
||||
expect(updateable.size).toBe(1);
|
||||
expect(updateable.has('feature1')).toBeTruthy();
|
||||
|
||||
const feature = updateable.get('feature1');
|
||||
expect(feature.geometry).toEqual({type: 'Point', coordinates: [1, 1]});
|
||||
expect(feature.properties.test1).toBeUndefined();
|
||||
expect(feature.properties.test2).toBe('test2');
|
||||
});
|
||||
});
|
||||
|
||||
describe('mergeSourceDiffs', () => {
|
||||
test('merges two diffs with different features ids', () => {
|
||||
const diff1 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [0, 0]}, properties: {}}],
|
||||
remove: ['feature2'],
|
||||
update: [{id: 'feature3', newGeometry: {type: 'Point', coordinates: [1, 1]}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
add: [{type: 'Feature', id: 'feature4', geometry: {type: 'Point', coordinates: [2, 2]}, properties: {}}],
|
||||
remove: ['feature5'],
|
||||
update: [{id: 'feature6', addOrUpdateProperties: [{key: 'prop', value: 'value'}]}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.add).toHaveLength(2);
|
||||
expect(merged.remove).toHaveLength(2);
|
||||
expect(merged.update).toHaveLength(2);
|
||||
});
|
||||
|
||||
test('merges two diffs with equivalent feature ids', () => {
|
||||
const diff1 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [0, 0]}, properties: {param: 1}}],
|
||||
remove: ['feature2'],
|
||||
update: [{id: 'feature3', newGeometry: {type: 'Point', coordinates: [1, 1]}, addOrUpdateProperties: [{key: 'prop1', value: 'value'}]}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [2, 2]}, properties: {param: 2}}],
|
||||
remove: ['feature2', 'feature4'],
|
||||
update: [{id: 'feature3', addOrUpdateProperties: [{key: 'prop2', value: 'value'}], removeProperties: ['prop3'], removeAllProperties: true}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.add).toHaveLength(1);
|
||||
expect(merged.add[0].geometry).toEqual({type: 'Point', coordinates: [2, 2]});
|
||||
expect(merged.add[0].properties).toEqual({param: 2});
|
||||
expect(merged.remove).toHaveLength(2);
|
||||
expect(merged.update).toHaveLength(1);
|
||||
expect(merged.update[0].newGeometry).toBeDefined();
|
||||
expect(merged.update[0].addOrUpdateProperties).toHaveLength(1);
|
||||
expect(merged.update[0].removeProperties).toBeUndefined();
|
||||
expect(merged.update[0].removeAllProperties).toBe(true);
|
||||
});
|
||||
|
||||
test('merges two diffs add then removeAll', () => {
|
||||
const diff1 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [1, 1]}, properties: {}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
removeAll: true,
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.add).toHaveLength(0);
|
||||
expect(merged.removeAll).toBe(true);
|
||||
});
|
||||
|
||||
test('merges two diffs removeAll then add', () => {
|
||||
const diff1 = {
|
||||
removeAll: true,
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [1, 1]}, properties: {}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.add).toHaveLength(1);
|
||||
expect(merged.removeAll).toBe(true);
|
||||
});
|
||||
|
||||
test('merges two diffs with removeAll and add in both', () => {
|
||||
const diff1 = {
|
||||
removeAll: true,
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [1, 1]}, properties: {}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
removeAll: true,
|
||||
add: [{type: 'Feature', id: 'feature2', geometry: {type: 'Point', coordinates: [1, 1]}, properties: {}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.removeAll).toBe(true);
|
||||
expect(merged.add).toHaveLength(1);
|
||||
expect(merged.add[0].id).toBe('feature2');
|
||||
});
|
||||
|
||||
test('removeAll in new diff clears explicit remove lists', () => {
|
||||
const diff1 = {
|
||||
remove: ['a']
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
const diff2 = {
|
||||
removeAll: true,
|
||||
remove: ['b']
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.removeAll).toBe(true);
|
||||
expect(merged.remove).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('removeAllProperties wipes earlier feature property operations (add/update/remove)', () => {
|
||||
const diff1 = {
|
||||
update: [{
|
||||
id: 'f1',
|
||||
addOrUpdateProperties: [{key: 'new', value: 1}],
|
||||
removeProperties: ['old']
|
||||
}]
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
update: [{
|
||||
id: 'f1',
|
||||
removeAllProperties: true,
|
||||
addOrUpdateProperties: [{key: 'fresh', value: 2}]
|
||||
}]
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.update.length).toBe(1);
|
||||
expect(merged.update[0].removeAllProperties).toBe(true);
|
||||
expect(merged.update[0].removeProperties).toBeUndefined();
|
||||
expect(merged.update[0].addOrUpdateProperties).toEqual([{key: 'fresh', value: 2}]);
|
||||
});
|
||||
|
||||
test('remove and add same feature using promote id', () => {
|
||||
const diff1 = {
|
||||
remove: ['pid']
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
const diff2 = {
|
||||
add: [{
|
||||
type: 'Feature',
|
||||
geometry: {type: 'Point', coordinates: [0, 0]},
|
||||
properties: {promoted: 'pid'}
|
||||
}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2, 'promoted');
|
||||
expect(merged.add).toHaveLength(1);
|
||||
expect(merged.remove).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('add two separate features using promote id', () => {
|
||||
const diff1 = {
|
||||
add: [{
|
||||
type: 'Feature',
|
||||
geometry: {type: 'Point', coordinates: [0, 0]},
|
||||
properties: {promoted: 'pid1'}
|
||||
}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
add: [{
|
||||
type: 'Feature',
|
||||
geometry: {type: 'Point', coordinates: [1, 1]},
|
||||
properties: {promoted: 'pid2'}
|
||||
}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2, 'promoted');
|
||||
expect(merged.add).toBeDefined();
|
||||
expect(merged.add.length).toBe(2);
|
||||
});
|
||||
|
||||
test('merges two diffs update feature then remove', () => {
|
||||
const diff1 = {
|
||||
update: [{id: 'feature1', newGeometry: {type: 'Point', coordinates: [1, 1]}, addOrUpdateProperties: [{key: 'prop1', value: 'value'}]}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
remove: ['feature1']
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.update).toHaveLength(0);
|
||||
expect(merged.remove).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('merges two diffs update feature properties then remove feature properties - and retains the remove', () => {
|
||||
const diff1 = {
|
||||
update: [{id: 'feature1', addOrUpdateProperties: [{key: 'prop1', value: 'value'}]}]
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
update: [{id: 'feature1', removeProperties: ['prop1']}]
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.update[0].addOrUpdateProperties).toHaveLength(0);
|
||||
// Since a feature with the same id could have been added to the source previously, retain the remove.
|
||||
expect(merged.update[0].removeProperties).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('merges two diffs remove feature properties then update feature properties - retains both operations', () => {
|
||||
const diff1 = {
|
||||
update: [{id: 'feature1', removeProperties: ['prop1']}]
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
update: [{id: 'feature1', addOrUpdateProperties: [{key: 'prop1', value: 'value'}]}]
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.update[0].removeProperties).toHaveLength(1);
|
||||
expect(merged.update[0].addOrUpdateProperties).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('merges two diffs add feature then remove - and retains the remove', () => {
|
||||
const diff1 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [1, 1]}, properties: {}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
remove: ['feature1']
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged.add).toHaveLength(0);
|
||||
// Since a feature with the same id could have been added to the source previously, retain the remove.
|
||||
expect(merged.remove).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('merges two diffs remove feature then add', () => {
|
||||
const diff1 = {
|
||||
remove: ['feature1']
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [1, 1]}, properties: {}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff3 = {
|
||||
remove: ['feature1']
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged1 = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged1.add).toHaveLength(1);
|
||||
expect(merged1.remove).toHaveLength(0);
|
||||
|
||||
const merged2 = mergeSourceDiffs(merged1, diff3);
|
||||
expect(merged2.add).toHaveLength(0);
|
||||
// Since a feature with the same id could have been added to the source previously, retain the remove.
|
||||
expect(merged2.remove).toHaveLength(1);
|
||||
});
|
||||
|
||||
test('merges diff with empty', () => {
|
||||
const diff1 = {} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const diff2 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [0, 0]}, properties: {}}],
|
||||
remove: ['feature2'],
|
||||
update: [{id: 'feature3', newGeometry: {type: 'Point', coordinates: [1, 1]}, addOrUpdateProperties: [{key: 'prop1', value: 'value'}]}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, diff2);
|
||||
expect(merged).toEqual(diff2);
|
||||
});
|
||||
|
||||
test('merges diff with undefined', () => {
|
||||
const diff1 = {
|
||||
add: [{type: 'Feature', id: 'feature1', geometry: {type: 'Point', coordinates: [0, 0]}, properties: {}}],
|
||||
} satisfies GeoJSONSourceDiff;
|
||||
|
||||
const merged = mergeSourceDiffs(diff1, undefined);
|
||||
expect(merged).toEqual(diff1);
|
||||
});
|
||||
|
||||
test('merges two undefined diffs', () => {
|
||||
const merged = mergeSourceDiffs(undefined, undefined);
|
||||
expect(merged).toEqual({});
|
||||
});
|
||||
});
|
||||
393
node_modules/maplibre-gl/src/source/geojson_source_diff.ts
generated
vendored
Normal file
393
node_modules/maplibre-gl/src/source/geojson_source_diff.ts
generated
vendored
Normal file
@@ -0,0 +1,393 @@
|
||||
/**
|
||||
* A way to identify a feature, either by string or by number
|
||||
*/
|
||||
export type GeoJSONFeatureId = number | string;
|
||||
|
||||
/**
|
||||
* The geojson source diff object - processed in the following order: remove, add, update. Provides an efficient
|
||||
* way to update GeoJSON data in a map source without having to replace the entire dataset.
|
||||
*/
|
||||
export type GeoJSONSourceDiff = {
|
||||
/**
|
||||
* When set to `true` it will remove all features
|
||||
*/
|
||||
removeAll?: boolean;
|
||||
/**
|
||||
* An array of features IDs to remove
|
||||
*/
|
||||
remove?: Array<GeoJSONFeatureId>;
|
||||
/**
|
||||
* An array of features to add
|
||||
*/
|
||||
add?: Array<GeoJSON.Feature>;
|
||||
/**
|
||||
* An array of update objects
|
||||
*/
|
||||
update?: Array<GeoJSONFeatureDiff>;
|
||||
};
|
||||
|
||||
/**
|
||||
* A geojson feature diff object - processed in the following order: new geometry, remove properties, add/update properties.
|
||||
* Provides an efficient way to update GeoJSON features in a map source without replacing the entire feature.
|
||||
*/
|
||||
export type GeoJSONFeatureDiff = {
|
||||
/**
|
||||
* The feature ID
|
||||
*/
|
||||
id: GeoJSONFeatureId;
|
||||
/**
|
||||
* If it's a new geometry, place it here
|
||||
*/
|
||||
newGeometry?: GeoJSON.Geometry;
|
||||
/**
|
||||
* Setting to `true` will remove all preperties
|
||||
*/
|
||||
removeAllProperties?: boolean;
|
||||
/**
|
||||
* The properties keys to remove
|
||||
*/
|
||||
removeProperties?: Array<string>;
|
||||
/**
|
||||
* The properties to add or update along side their values
|
||||
*/
|
||||
addOrUpdateProperties?: Array<{key: string; value: any}>;
|
||||
};
|
||||
|
||||
export type UpdateableGeoJSON = GeoJSON.Feature | GeoJSON.FeatureCollection | undefined;
|
||||
|
||||
function getFeatureId(feature: GeoJSON.Feature, promoteId?: string): GeoJSONFeatureId | undefined {
|
||||
return promoteId ? feature.properties[promoteId] : feature.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a GeoJSON object into a map of feature IDs to GeoJSON features.
|
||||
* @param data - The GeoJSON object to convert.
|
||||
* @param promoteId - If set, the feature id will be set to the promoteId property value.
|
||||
* @returns A map of feature IDs to GeoJSON features, or `undefined` if the GeoJSON object is not a valid updateable object.
|
||||
*
|
||||
* Features must have unique identifiers to be updateable. IDs can come from:
|
||||
* - The feature's `id` property (standard GeoJSON)
|
||||
* - A promoted property specified by `promoteId` (e.g., a "name" property)
|
||||
*/
|
||||
export function toUpdateable(data: GeoJSON.GeoJSON | undefined, promoteId?: string): Map<GeoJSONFeatureId, GeoJSON.Feature> | undefined {
|
||||
const updateable = new Map<GeoJSONFeatureId, GeoJSON.Feature>();
|
||||
|
||||
// null can be updated - empty updateable
|
||||
if (data == null) {
|
||||
return updateable;
|
||||
}
|
||||
|
||||
// {} can be updated - empty updateable
|
||||
if (data.type == null) {
|
||||
return updateable;
|
||||
}
|
||||
|
||||
// a single feature with an id can be updated, need to explicitly check against null because 0 is a valid feature id that is falsy
|
||||
if (data.type === 'Feature') {
|
||||
const id = getFeatureId(data, promoteId);
|
||||
if (id == null) return undefined;
|
||||
|
||||
updateable.set(id, data);
|
||||
return updateable;
|
||||
}
|
||||
|
||||
// a feature collection can be updated if every feature has a unique id, which prevents the silent dropping of features
|
||||
if (data.type === 'FeatureCollection') {
|
||||
const seenIds = new Set<GeoJSONFeatureId>();
|
||||
|
||||
for (const feature of data.features) {
|
||||
const id = getFeatureId(feature, promoteId);
|
||||
if (id == null) return undefined;
|
||||
|
||||
if (seenIds.has(id)) return undefined;
|
||||
seenIds.add(id);
|
||||
|
||||
updateable.set(id, feature);
|
||||
}
|
||||
|
||||
return updateable;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates updateable and applies a {@link GeoJSONSourceDiff}. Operations are processed in a specific order to ensure predictable behavior:
|
||||
* 1. Remove operations (removeAll, remove)
|
||||
* 2. Add operations (add)
|
||||
* 3. Update operations (update)
|
||||
* @returns an array of geometries that were affected by the diff - with the exception of removeAll which does not track any affected geometries.
|
||||
*/
|
||||
export function applySourceDiff(updateable: Map<GeoJSONFeatureId, GeoJSON.Feature>, diff: GeoJSONSourceDiff, promoteId?: string): GeoJSON.Geometry[] {
|
||||
const affectedGeometries: GeoJSON.Geometry[] = [];
|
||||
|
||||
if (diff.removeAll) {
|
||||
updateable.clear();
|
||||
}
|
||||
else if (diff.remove) {
|
||||
for (const id of diff.remove) {
|
||||
const existing = updateable.get(id);
|
||||
if (!existing) continue;
|
||||
|
||||
affectedGeometries.push(existing.geometry);
|
||||
updateable.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
if (diff.add) {
|
||||
for (const feature of diff.add) {
|
||||
const id = getFeatureId(feature, promoteId);
|
||||
if (id == null) continue;
|
||||
|
||||
const existing = updateable.get(id);
|
||||
if (existing) affectedGeometries.push(existing.geometry);
|
||||
|
||||
affectedGeometries.push(feature.geometry);
|
||||
updateable.set(id, feature);
|
||||
}
|
||||
}
|
||||
|
||||
if (diff.update) {
|
||||
for (const update of diff.update) {
|
||||
const existing = updateable.get(update.id);
|
||||
if (!existing) continue;
|
||||
|
||||
const changeGeometry = !!update.newGeometry;
|
||||
|
||||
const changeProps =
|
||||
update.removeAllProperties ||
|
||||
update.removeProperties?.length > 0 ||
|
||||
update.addOrUpdateProperties?.length > 0;
|
||||
|
||||
// nothing to do
|
||||
if (!changeGeometry && !changeProps) continue;
|
||||
|
||||
// clone once since we'll mutate
|
||||
affectedGeometries.push(existing.geometry);
|
||||
const feature = {...existing};
|
||||
updateable.set(update.id, feature);
|
||||
|
||||
if (changeGeometry) {
|
||||
affectedGeometries.push(update.newGeometry);
|
||||
feature.geometry = update.newGeometry;
|
||||
}
|
||||
|
||||
if (changeProps) {
|
||||
if (update.removeAllProperties) {
|
||||
feature.properties = {};
|
||||
} else {
|
||||
feature.properties = {...feature.properties || {}};
|
||||
}
|
||||
|
||||
if (update.removeProperties) {
|
||||
for (const key of update.removeProperties) {
|
||||
delete feature.properties[key];
|
||||
}
|
||||
}
|
||||
|
||||
if (update.addOrUpdateProperties) {
|
||||
for (const {key, value} of update.addOrUpdateProperties) {
|
||||
feature.properties[key] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return affectedGeometries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge two GeoJSONSourceDiffs, considering the order of operations as specified above (remove, add, update).
|
||||
*
|
||||
* For `add` features that use promoteId, the feature id will be set to the promoteId value temporarily so that
|
||||
* the merge can be completed, then reverted to the original promoteId state after the merge.
|
||||
*/
|
||||
export function mergeSourceDiffs(
|
||||
prevDiff: GeoJSONSourceDiff | undefined,
|
||||
nextDiff: GeoJSONSourceDiff | undefined,
|
||||
promoteId?: string
|
||||
): GeoJSONSourceDiff {
|
||||
if (!prevDiff) return nextDiff || {};
|
||||
if (!nextDiff) return prevDiff || {};
|
||||
|
||||
if (promoteId) {
|
||||
// Temporarily normalize diff.add for features using promoteId
|
||||
promoteFeatureIds(prevDiff.add, promoteId);
|
||||
promoteFeatureIds(nextDiff.add, promoteId);
|
||||
}
|
||||
|
||||
// Hash for o(1) lookups while creating a mutatable copy of the collections
|
||||
const prev = diffToHashed(prevDiff);
|
||||
const next = diffToHashed(nextDiff);
|
||||
|
||||
// Resolve merge conflicts
|
||||
resolveMergeConflicts(prev, next);
|
||||
|
||||
// Simply merge the two diffs now that conflicts have been resolved
|
||||
const merged: GeoJSONSourceDiffHashed = {};
|
||||
if (prev.removeAll || next.removeAll) merged.removeAll = true;
|
||||
merged.remove = new Set([...prev.remove , ...next.remove]);
|
||||
merged.add = new Map([...prev.add , ...next.add]);
|
||||
merged.update = new Map([...prev.update , ...next.update]);
|
||||
|
||||
// Squash the merge - removing then adding the same feature
|
||||
if (merged.remove.size && merged.add.size) {
|
||||
for (const id of merged.add.keys()) {
|
||||
merged.remove.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
// Convert back to array-based representation
|
||||
const mergedDiff = hashedToDiff(merged);
|
||||
|
||||
if (promoteId) {
|
||||
// Revert diff.add for features using promoteId
|
||||
demoteFeatureIds(mergedDiff.add, promoteId);
|
||||
}
|
||||
|
||||
return mergedDiff;
|
||||
}
|
||||
|
||||
/**
|
||||
* Resolve merge conflicts between two GeoJSONSourceDiffs considering the ordering above (remove/add/update).
|
||||
*
|
||||
* - If you `removeAll` and then `add` features in the same diff, the added features will be kept.
|
||||
* - Updates only apply to features that exist after removes and adds have been processed.
|
||||
*/
|
||||
function resolveMergeConflicts(prev: GeoJSONSourceDiffHashed, next: GeoJSONSourceDiffHashed) {
|
||||
// Removing all features with added or updated features in previous - and clear no-op removes
|
||||
if (next.removeAll) {
|
||||
prev.add.clear();
|
||||
prev.update.clear();
|
||||
prev.remove.clear();
|
||||
next.remove.clear();
|
||||
}
|
||||
|
||||
// Removing features that were added or updated in previous
|
||||
for (const id of next.remove) {
|
||||
prev.add.delete(id);
|
||||
prev.update.delete(id);
|
||||
}
|
||||
|
||||
// Updating features that were updated in previous
|
||||
for (const [id, nextUpdate] of next.update) {
|
||||
const prevUpdate = prev.update.get(id);
|
||||
if (!prevUpdate) continue;
|
||||
|
||||
next.update.set(id, mergeFeatureDiffs(prevUpdate, nextUpdate));
|
||||
prev.update.delete(id);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge two feature diffs for the same feature id, considering the order of operations as specified above (remove, add/update).
|
||||
*/
|
||||
function mergeFeatureDiffs(prev: GeoJSONFeatureDiff, next: GeoJSONFeatureDiff): GeoJSONFeatureDiff {
|
||||
const merged: GeoJSONFeatureDiff = {id: prev.id};
|
||||
|
||||
// Removing all properties with added or updated properties in previous - and clear no-op removes
|
||||
if (next.removeAllProperties) {
|
||||
delete prev.removeProperties;
|
||||
delete prev.addOrUpdateProperties;
|
||||
delete next.removeProperties;
|
||||
}
|
||||
// Removing properties that were added or updated in previous
|
||||
if (next.removeProperties) {
|
||||
for (const key of next.removeProperties) {
|
||||
const index = prev.addOrUpdateProperties.findIndex(prop => prop.key === key);
|
||||
if (index > -1) prev.addOrUpdateProperties.splice(index, 1);
|
||||
}
|
||||
}
|
||||
|
||||
// Merge the two diffs
|
||||
if (prev.removeAllProperties || next.removeAllProperties) {
|
||||
merged.removeAllProperties = true;
|
||||
}
|
||||
if (prev.removeProperties || next.removeProperties) {
|
||||
merged.removeProperties = [...prev.removeProperties || [], ...next.removeProperties || []];
|
||||
}
|
||||
if (prev.addOrUpdateProperties || next.addOrUpdateProperties) {
|
||||
merged.addOrUpdateProperties = [...prev.addOrUpdateProperties || [], ...next.addOrUpdateProperties || []];
|
||||
}
|
||||
if (prev.newGeometry || next.newGeometry) {
|
||||
merged.newGeometry = next.newGeometry || prev.newGeometry;
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates diff.add and applies a feature id using the promoteId property
|
||||
*/
|
||||
function promoteFeatureIds(add: Array<GeoJSON.Feature>, promoteId: string) {
|
||||
if (!add) return;
|
||||
|
||||
for (const feature of add) {
|
||||
const id = getFeatureId(feature, promoteId);
|
||||
if (id != null) feature.id = id;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Mutates diff.add and removes the feature id if using the promoteId property
|
||||
*/
|
||||
function demoteFeatureIds(add: Array<GeoJSON.Feature>, promoteId: string) {
|
||||
if (!add) return;
|
||||
|
||||
for (const feature of add) {
|
||||
const id = getFeatureId(feature, promoteId);
|
||||
if (id != null) delete feature.id;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Internal representation of GeoJSONSourceDiff using Sets and Maps for efficient operations
|
||||
*/
|
||||
type GeoJSONSourceDiffHashed = {
|
||||
removeAll?: boolean;
|
||||
remove?: Set<GeoJSONFeatureId>;
|
||||
add?: Map<GeoJSONFeatureId, GeoJSON.Feature>;
|
||||
update?: Map<GeoJSONFeatureId, GeoJSONFeatureDiff>;
|
||||
};
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Convert a GeoJSONSourceDiff to an idempotent hashed representation using Sets and Maps
|
||||
*/
|
||||
function diffToHashed(diff: GeoJSONSourceDiff | undefined): GeoJSONSourceDiffHashed {
|
||||
if (!diff) return {};
|
||||
|
||||
const hashed: GeoJSONSourceDiffHashed = {};
|
||||
|
||||
hashed.removeAll = diff.removeAll;
|
||||
hashed.remove = new Set(diff.remove || []);
|
||||
hashed.add = new Map(diff.add?.map(feature => [feature.id, feature]));
|
||||
hashed.update = new Map(diff.update?.map(update => [update.id, update]));
|
||||
|
||||
return hashed;
|
||||
}
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* Convert a hashed GeoJSONSourceDiff back to the array-based representation
|
||||
*/
|
||||
function hashedToDiff(hashed: GeoJSONSourceDiffHashed): GeoJSONSourceDiff {
|
||||
const diff: GeoJSONSourceDiff = {};
|
||||
|
||||
if (hashed.removeAll) {
|
||||
diff.removeAll = hashed.removeAll;
|
||||
}
|
||||
if (hashed.remove) {
|
||||
diff.remove = Array.from(hashed.remove);
|
||||
}
|
||||
if (hashed.add) {
|
||||
diff.add = Array.from(hashed.add.values());
|
||||
}
|
||||
if (hashed.update) {
|
||||
diff.update = Array.from(hashed.update.values());
|
||||
}
|
||||
|
||||
return diff;
|
||||
}
|
||||
574
node_modules/maplibre-gl/src/source/geojson_worker_source.test.ts
generated
vendored
Normal file
574
node_modules/maplibre-gl/src/source/geojson_worker_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,574 @@
|
||||
import {describe, beforeEach, afterEach, test, expect, vi} from 'vitest';
|
||||
import {GEOJSON_TILE_LAYER_NAME} from '@maplibre/vt-pbf';
|
||||
import {GeoJSONWorkerSource, type LoadGeoJSONParameters} from './geojson_worker_source';
|
||||
import {StyleLayerIndex} from '../style/style_layer_index';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {setPerformance, sleep} from '../util/test/util';
|
||||
import {type FakeServer, fakeServer} from 'nise';
|
||||
import {SubdivisionGranularitySetting} from '../render/subdivision_granularity_settings';
|
||||
|
||||
import type {GeoJSONVT} from '@maplibre/geojson-vt';
|
||||
import type {Actor, IActor} from '../util/actor';
|
||||
import type {TileParameters, WorkerTileParameters, WorkerTileResult, WorkerTileWithData} from './worker_source';
|
||||
import type {LayerSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {WorkerTile} from './worker_tile';
|
||||
|
||||
const actor = {send: () => {}} as any as Actor;
|
||||
|
||||
describe('geojson tile worker source', () => {
|
||||
const actor: IActor = {sendAsync: () => Promise.resolve({})} as any as IActor;
|
||||
|
||||
test('GeoJSONWorkerSource.removeTile removes loaded tile', async () => {
|
||||
const source = new GeoJSONWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
|
||||
source.tileState.loaded = {
|
||||
'0': {} as WorkerTile
|
||||
};
|
||||
|
||||
const res = await source.removeTile({
|
||||
source: 'source',
|
||||
uid: 0
|
||||
} as any as TileParameters);
|
||||
expect(res).toBeUndefined();
|
||||
|
||||
expect(source.tileState.loaded).toEqual({});
|
||||
});
|
||||
|
||||
test('GeoJSONWorkerSource.reloadTile reloads a previously-loaded tile', async () => {
|
||||
const source = new GeoJSONWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
const parse = vi.fn().mockResolvedValue({} as WorkerTileResult);
|
||||
|
||||
source.tileState.loaded = {
|
||||
'0': {
|
||||
status: 'done',
|
||||
vectorTile: {},
|
||||
parse
|
||||
} as any as WorkerTile
|
||||
};
|
||||
|
||||
const reloadPromise = source.reloadTile({uid: 0} as any as WorkerTileParameters);
|
||||
expect(parse).toHaveBeenCalledTimes(1);
|
||||
await expect(reloadPromise).resolves.toBeTruthy();
|
||||
});
|
||||
|
||||
test('GeoJSONWorkerSource.reloadTile returns parse result without rawTileData when parsing state was already consumed', async () => {
|
||||
const source = new GeoJSONWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
const parseResult = {buckets: []} as any as WorkerTileResult;
|
||||
const parse = vi.fn().mockResolvedValue(parseResult);
|
||||
|
||||
source.tileState.loaded = {
|
||||
'0': {
|
||||
status: 'parsing',
|
||||
vectorTile: {},
|
||||
parse
|
||||
} as any as WorkerTile
|
||||
};
|
||||
|
||||
const result = await source.reloadTile({uid: 0} as any as WorkerTileParameters) as WorkerTileWithData;
|
||||
|
||||
expect(parse).toHaveBeenCalledTimes(1);
|
||||
expect(result).toBe(parseResult);
|
||||
expect(result.rawTileData).toBeUndefined();
|
||||
});
|
||||
|
||||
test('GeoJSONWorkerSource.loadTile reparses tile if reloadTile has been called during parsing', async () => {
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': '_geojsonTileLayer',
|
||||
type: 'symbol',
|
||||
layout: {
|
||||
'icon-image': 'hello',
|
||||
'text-font': ['StandardFont-Bold'],
|
||||
'text-field': '{name}'
|
||||
}
|
||||
}]);
|
||||
|
||||
const actor = {
|
||||
sendAsync: (message: {type: string; data: unknown}, abortController: AbortController) => {
|
||||
return new Promise((resolve, _reject) => {
|
||||
const res = setTimeout(() => {
|
||||
const response = message.type === 'getImages' ?
|
||||
{'hello': {width: 1, height: 1, data: new Uint8Array([0])}} :
|
||||
{'StandardFont-Bold': {width: 1, height: 1, data: new Uint8Array([0])}};
|
||||
resolve(response);
|
||||
}, 100);
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
clearTimeout(res);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const source = new GeoJSONWorkerSource(actor as any, layerIndex, ['hello']);
|
||||
|
||||
const geoJson = {
|
||||
type: 'FeatureCollection',
|
||||
features: [{
|
||||
type: 'Feature',
|
||||
id: 1,
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0]
|
||||
},
|
||||
properties: {
|
||||
name: 'test'
|
||||
}
|
||||
}]
|
||||
} as GeoJSON.GeoJSON;
|
||||
|
||||
await source.loadData({source: 'source', data: geoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
|
||||
source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
subdivisionGranularity: SubdivisionGranularitySetting.noSubdivision,
|
||||
} as any as WorkerTileParameters).then(() => expect(false).toBeTruthy());
|
||||
|
||||
// allow promise to run
|
||||
await sleep(0);
|
||||
|
||||
const res = await source.reloadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
subdivisionGranularity: SubdivisionGranularitySetting.noSubdivision,
|
||||
} as any as WorkerTileParameters) as WorkerTileWithData;
|
||||
|
||||
expect(res).toBeDefined();
|
||||
expect(res.rawTileData).toBeDefined();
|
||||
});
|
||||
|
||||
test('GeoJSONWorkerSource.loadTile returns null for an empty tile', async () => {
|
||||
const source = new GeoJSONWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
await source.loadData({source: 'source', data: {type: 'FeatureCollection', features: []}, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
|
||||
const result = await source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
test('GeoJSONWorkerSource.loadTile throws error when data has not been loaded', async () => {
|
||||
const source = new GeoJSONWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
|
||||
await expect(source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
} as any as WorkerTileParameters)).rejects.toThrowError(/Unable to parse the data into a cluster or geojson/);
|
||||
});
|
||||
|
||||
test('GeoJSONWorkerSource.abortTile aborts tile state', async () => {
|
||||
const source = new GeoJSONWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
const abortSpy = vi.spyOn(source.tileState, 'abort');
|
||||
|
||||
await source.abortTile({
|
||||
source: 'source',
|
||||
uid: 0
|
||||
} as any as TileParameters);
|
||||
|
||||
expect(abortSpy).toHaveBeenCalledWith(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('reloadTile', () => {
|
||||
test('does not rebuild vector data unless data has changed', async () => {
|
||||
const layers = [
|
||||
{
|
||||
id: 'mylayer',
|
||||
source: 'sourceId',
|
||||
type: 'symbol',
|
||||
}
|
||||
] as LayerSpecification[];
|
||||
const layerIndex = new StyleLayerIndex(layers);
|
||||
const source = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
const spy = vi.spyOn(source, 'loadVectorTile');
|
||||
const geoJson = {
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [0, 0]
|
||||
}
|
||||
};
|
||||
const tileParams = {
|
||||
source: 'sourceId',
|
||||
uid: 0,
|
||||
tileID: new OverscaledTileID(0, 0, 0, 0, 0),
|
||||
maxZoom: 10
|
||||
};
|
||||
|
||||
await source.loadData({source: 'sourceId', data: geoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
|
||||
// first call should load vector data from geojson
|
||||
const firstData = await source.reloadTile(tileParams as any as WorkerTileParameters) as WorkerTileWithData;
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
// second call won't give us new rawTileData
|
||||
let data = await source.reloadTile(tileParams as any as WorkerTileParameters) as WorkerTileWithData;
|
||||
expect('rawTileData' in data).toBeFalsy();
|
||||
data.rawTileData = firstData.rawTileData;
|
||||
data.encoding = 'mvt';
|
||||
expect(data).toEqual(firstData);
|
||||
|
||||
// also shouldn't call loadVectorData again
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
|
||||
// replace geojson data
|
||||
await source.loadData({source: 'sourceId', data: geoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
|
||||
// should call loadVectorData again after changing geojson data
|
||||
data = await source.reloadTile(tileParams as any as WorkerTileParameters) as WorkerTileWithData;
|
||||
expect('rawTileData' in data).toBeTruthy();
|
||||
expect(data).toEqual(firstData);
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
|
||||
test('handles null and undefined properties during tile serialization', async () => {
|
||||
const layers = [
|
||||
{
|
||||
id: 'mylayer',
|
||||
source: 'sourceId',
|
||||
type: 'symbol',
|
||||
}
|
||||
] as LayerSpecification[];
|
||||
const layerIndex = new StyleLayerIndex(layers);
|
||||
const source = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
const geoJson = {
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [0, 0]
|
||||
},
|
||||
'properties': {
|
||||
'nullProperty': null,
|
||||
'undefinedProperty': undefined,
|
||||
'stringProperty': 'string'
|
||||
}
|
||||
};
|
||||
const tileParams = {
|
||||
source: 'sourceId',
|
||||
uid: 0,
|
||||
tileID: new OverscaledTileID(0, 0, 0, 0, 0),
|
||||
maxZoom: 10
|
||||
};
|
||||
|
||||
await source.loadData({type: 'geojson', source: 'sourceId', data: geoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
|
||||
// load vector data from geojson, passing through the tile serialization step
|
||||
const data = await source.reloadTile(tileParams as any as WorkerTileParameters) as WorkerTileWithData;
|
||||
expect(data.featureIndex).toBeDefined();
|
||||
|
||||
// deserialize tile layers in the feature index
|
||||
data.featureIndex.rawTileData = data.rawTileData;
|
||||
const featureLayers = data.featureIndex.loadVTLayers();
|
||||
expect(Object.keys(featureLayers)).toHaveLength(1);
|
||||
|
||||
// validate supported features are present in the index
|
||||
expect(featureLayers[GEOJSON_TILE_LAYER_NAME].feature(0).properties['stringProperty']).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('resourceTiming', () => {
|
||||
let server: FakeServer;
|
||||
beforeEach(() => {
|
||||
setPerformance();
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
});
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
const layers = [
|
||||
{
|
||||
id: 'mylayer',
|
||||
source: 'sourceId',
|
||||
type: 'symbol',
|
||||
}
|
||||
] as LayerSpecification[];
|
||||
const geoJson = {
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [0, 0]
|
||||
}
|
||||
} as GeoJSON.GeoJSON;
|
||||
|
||||
test('loadData - url', async () => {
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(geoJson));
|
||||
});
|
||||
const exampleResourceTiming = {
|
||||
connectEnd: 473,
|
||||
connectStart: 473,
|
||||
decodedBodySize: 86494,
|
||||
domainLookupEnd: 473,
|
||||
domainLookupStart: 473,
|
||||
duration: 341,
|
||||
encodedBodySize: 52528,
|
||||
entryType: 'resource',
|
||||
fetchStart: 473.5,
|
||||
initiatorType: 'xmlhttprequest',
|
||||
name: 'http://localhost:2900/fake.geojson',
|
||||
nextHopProtocol: 'http/1.1',
|
||||
redirectEnd: 0,
|
||||
redirectStart: 0,
|
||||
requestStart: 477,
|
||||
responseEnd: 815,
|
||||
responseStart: 672,
|
||||
secureConnectionStart: 0
|
||||
} as any as PerformanceEntry;
|
||||
|
||||
window.performance.getEntriesByName = vi.fn().mockReturnValue([exampleResourceTiming]);
|
||||
|
||||
const layerIndex = new StyleLayerIndex(layers);
|
||||
const source = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
const promise = source.loadData({source: 'testSource', request: {url: 'http://localhost/nonexistent', collectResourceTiming: true}, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
server.respond();
|
||||
const result = await promise;
|
||||
|
||||
expect(result.resourceTiming.testSource).toEqual([exampleResourceTiming]);
|
||||
});
|
||||
|
||||
test('loadData - url (resourceTiming fallback method)', async () => {
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(geoJson));
|
||||
});
|
||||
const sampleMarks = [100, 350];
|
||||
const marks = {};
|
||||
const measures = {};
|
||||
window.performance.getEntriesByName = vi.fn().mockImplementation((name) => { return measures[name] || []; });
|
||||
vi.spyOn(performance, 'mark').mockImplementation((name) => {
|
||||
marks[name] = sampleMarks.shift();
|
||||
return null;
|
||||
});
|
||||
window.performance.measure = vi.fn().mockImplementation((name, start, end) => {
|
||||
measures[name] = measures[name] || [];
|
||||
measures[name].push({
|
||||
duration: marks[end] - marks[start],
|
||||
entryType: 'measure',
|
||||
name,
|
||||
startTime: marks[start]
|
||||
});
|
||||
return null;
|
||||
});
|
||||
vi.spyOn(performance, 'clearMarks').mockImplementation(() => { return null; });
|
||||
vi.spyOn(performance, 'clearMeasures').mockImplementation(() => { return null; });
|
||||
|
||||
const layerIndex = new StyleLayerIndex(layers);
|
||||
const source = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
const promise = source.loadData({source: 'testSource', request: {url: 'http://localhost/nonexistent', collectResourceTiming: true}, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
server.respond();
|
||||
const result = await promise;
|
||||
|
||||
expect(result.resourceTiming.testSource).toEqual(
|
||||
[{'duration': 250, 'entryType': 'measure', 'name': 'http://localhost/nonexistent', 'startTime': 100}]
|
||||
);
|
||||
});
|
||||
|
||||
test('loadData - data', async () => {
|
||||
const layerIndex = new StyleLayerIndex(layers);
|
||||
const source = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
const result = await source.loadData({source: 'testSource', data: geoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
expect(result.resourceTiming).toBeUndefined();
|
||||
expect(result.data).toBeUndefined();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('loadData', () => {
|
||||
let server: FakeServer;
|
||||
beforeEach(() => {
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
});
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
});
|
||||
|
||||
const layers = [
|
||||
{
|
||||
id: 'layer1',
|
||||
source: 'source1',
|
||||
type: 'symbol',
|
||||
},
|
||||
{
|
||||
id: 'layer2',
|
||||
source: 'source2',
|
||||
type: 'symbol',
|
||||
}
|
||||
] as LayerSpecification[];
|
||||
|
||||
const geoJson = {
|
||||
'type': 'Feature',
|
||||
'geometry': {
|
||||
'type': 'Point',
|
||||
'coordinates': [0, 0]
|
||||
}
|
||||
} as GeoJSON.GeoJSON;
|
||||
|
||||
const updateableGeoJson = {
|
||||
type: 'Feature',
|
||||
id: 'point',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
},
|
||||
properties: {},
|
||||
} as GeoJSON.GeoJSON;
|
||||
|
||||
const updateableFeatureCollection = {
|
||||
type: 'FeatureCollection',
|
||||
features: [
|
||||
{
|
||||
type: 'Feature',
|
||||
id: 'point1',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [0, 0],
|
||||
},
|
||||
properties: {},
|
||||
},
|
||||
{
|
||||
type: 'Feature',
|
||||
id: 'point2',
|
||||
geometry: {
|
||||
type: 'Point',
|
||||
coordinates: [1, 1],
|
||||
},
|
||||
properties: {},
|
||||
}
|
||||
]
|
||||
} as GeoJSON.GeoJSON;
|
||||
|
||||
const layerIndex = new StyleLayerIndex(layers);
|
||||
function createWorker() {
|
||||
return new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
}
|
||||
|
||||
test('abandons previous requests', async () => {
|
||||
const worker = createWorker();
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(geoJson));
|
||||
});
|
||||
|
||||
const p1 = worker.loadData({source: 'source1', request: {url: ''}, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
await sleep(0);
|
||||
|
||||
const p2 = worker.loadData({source: 'source1', request: {url: ''}, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
|
||||
await sleep(0);
|
||||
|
||||
server.respond();
|
||||
|
||||
const firstCallResult = await p1;
|
||||
expect(firstCallResult && firstCallResult.abandoned).toBeTruthy();
|
||||
const result = await p2;
|
||||
expect(result && result.abandoned).toBeFalsy();
|
||||
});
|
||||
|
||||
test('removeSource aborts requests', async () => {
|
||||
const worker = createWorker();
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(geoJson));
|
||||
});
|
||||
|
||||
const loadPromise = worker.loadData({source: 'source1', request: {url: ''}} as LoadGeoJSONParameters);
|
||||
await sleep(0);
|
||||
const removePromise = worker.removeSource({source: 'source1', type: 'type'});
|
||||
await sleep(0);
|
||||
|
||||
server.respond();
|
||||
|
||||
const result = await loadPromise;
|
||||
expect(result && result.abandoned).toBeTruthy();
|
||||
await removePromise;
|
||||
});
|
||||
|
||||
test('loadData with geojson creates an updateable source', async () => {
|
||||
const worker = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
await worker.loadData({source: 'source1', data: updateableGeoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
await expect(worker.loadData({source: 'source1', dataDiff: {removeAll: true}, geojsonVtOptions: {}} as LoadGeoJSONParameters)).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('loadData with geojson network call creates an updateable source', async () => {
|
||||
const worker = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(updateableGeoJson));
|
||||
});
|
||||
|
||||
const load1Promise = worker.loadData({source: 'source1', request: {url: ''}, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
server.respond();
|
||||
|
||||
const result = await load1Promise;
|
||||
expect(result.data).toStrictEqual(updateableGeoJson);
|
||||
await expect(worker.loadData({source: 'source1', dataDiff: {removeAll: true}} as LoadGeoJSONParameters)).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('loadData with diff updates', async () => {
|
||||
const worker = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
await worker.loadData({source: 'source1', data: updateableGeoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
const result = await worker.loadData({source: 'source1', dataDiff: {
|
||||
add: [{
|
||||
type: 'Feature',
|
||||
id: 'update_point',
|
||||
geometry: {type: 'Point', coordinates: [0, 0]},
|
||||
properties: {}
|
||||
}]
|
||||
}} as LoadGeoJSONParameters);
|
||||
expect(result).toBeDefined();
|
||||
expect(result.data).toBeUndefined();
|
||||
});
|
||||
|
||||
test('loadData should reject as first call with no data', async () => {
|
||||
const worker = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
await expect(worker.loadData({} as LoadGeoJSONParameters)).rejects.toBeDefined();
|
||||
});
|
||||
|
||||
test('loadData should resolve as subsequent call with no data', async () => {
|
||||
const worker = new GeoJSONWorkerSource(actor, layerIndex, []);
|
||||
|
||||
await worker.loadData({source: 'source1', data: updateableGeoJson, geojsonVtOptions: {}} as LoadGeoJSONParameters);
|
||||
await expect(worker.loadData({} as LoadGeoJSONParameters)).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('loadData should process cluster change with no data and build relevant map and reduce methods', async () => {
|
||||
const updateSpy = vi.fn();
|
||||
const mockGeoJSONIndex = {
|
||||
updateClusterOptions: updateSpy
|
||||
} as any as GeoJSONVT;
|
||||
const worker = new GeoJSONWorkerSource(actor, layerIndex, [], () => mockGeoJSONIndex);
|
||||
await worker.loadData({source: 'source1', data: updateableFeatureCollection, geojsonVtOptions: {cluster: false}} as LoadGeoJSONParameters);
|
||||
expect(mockGeoJSONIndex.updateClusterOptions).not.toHaveBeenCalled();
|
||||
await expect(worker.loadData({
|
||||
type: 'geojson',
|
||||
updateCluster: true,
|
||||
geojsonVtOptions: {
|
||||
cluster: true,
|
||||
clusterOptions: {},
|
||||
},
|
||||
clusterProperties: {
|
||||
'max': ['max', ['get', 'scalerank']],
|
||||
'sum': ['+', ['get', 'scalerank']],
|
||||
}
|
||||
} as LoadGeoJSONParameters)).resolves.toBeDefined();
|
||||
expect(updateSpy).toHaveBeenCalled();
|
||||
expect(updateSpy.mock.calls[0][1].map).toBeInstanceOf(Function);
|
||||
expect(updateSpy.mock.calls[0][1].reduce).toBeInstanceOf(Function);
|
||||
});
|
||||
});
|
||||
372
node_modules/maplibre-gl/src/source/geojson_worker_source.ts
generated
vendored
Normal file
372
node_modules/maplibre-gl/src/source/geojson_worker_source.ts
generated
vendored
Normal file
@@ -0,0 +1,372 @@
|
||||
import {getJSON} from '../util/ajax';
|
||||
import {RequestPerformance} from '../util/request_performance';
|
||||
import {GeoJSONWrapper} from '@maplibre/vt-pbf';
|
||||
import {EXTENT} from '../data/extent';
|
||||
import {GeoJSONVT, type GeoJSONVTOptions} from '@maplibre/geojson-vt';
|
||||
import {createExpression, type FilterSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import {isAbortError} from '../util/abort_error';
|
||||
import {toVirtualVectorTile} from './vector_tile_overzoomed';
|
||||
import {WorkerTile} from './worker_tile';
|
||||
import {WorkerTileState, type ParsingState} from './worker_tile_state';
|
||||
import {extend} from '../util/util';
|
||||
|
||||
import type {GeoJSONSourceDiff} from './geojson_source_diff';
|
||||
import type {WorkerSource, WorkerTileParameters, TileParameters, WorkerTileResult} from './worker_source';
|
||||
import type {LoadVectorTileResult} from './vector_tile_worker_source';
|
||||
import type {RequestParameters} from '../util/ajax';
|
||||
import type {ClusterIDAndSource, GeoJSONWorkerSourceLoadDataResult, RemoveSourceParams} from '../util/actor_messages';
|
||||
import type {IActor} from '../util/actor';
|
||||
import type {StyleLayerIndex} from '../style/style_layer_index';
|
||||
|
||||
/**
|
||||
* The geojson worker options that can be passed to the worker
|
||||
*/
|
||||
export type GeoJSONWorkerOptions = {
|
||||
source?: string;
|
||||
geojsonVtOptions?: GeoJSONVTOptions;
|
||||
clusterProperties?: Record<string, [unknown, unknown]>;
|
||||
filter?: FilterSpecification;
|
||||
collectResourceTiming?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parameters needed to load GeoJSON to the worker - must specify either a `request`, `data` or `dataDiff`.
|
||||
*/
|
||||
export type LoadGeoJSONParameters = GeoJSONWorkerOptions & {
|
||||
type: 'geojson';
|
||||
/**
|
||||
* Request parameters including a URL to fetch GeoJSON data.
|
||||
*/
|
||||
request?: RequestParameters;
|
||||
/**
|
||||
* GeoJSON data to set as the source's data.
|
||||
*/
|
||||
data?: GeoJSON.GeoJSON;
|
||||
/**
|
||||
* GeoJSONSourceDiff to apply to the existing GeoJSON source data.
|
||||
*/
|
||||
dataDiff?: GeoJSONSourceDiff;
|
||||
/**
|
||||
* Update the supercluster using the latest worker cluster options.
|
||||
*/
|
||||
updateCluster?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* The {@link WorkerSource} implementation that supports {@link GeoJSONSource}.
|
||||
* This class is designed to be easily reused to support custom source types
|
||||
* for data formats that can be parsed/converted into an in-memory GeoJSON
|
||||
* representation. To do so, create it with
|
||||
* `new GeoJSONWorkerSource(actor, layerIndex, customLoadGeoJSONFunction)`.
|
||||
* For a full example, see [mapbox-gl-topojson](https://github.com/developmentseed/mapbox-gl-topojson).
|
||||
*/
|
||||
export class GeoJSONWorkerSource implements WorkerSource {
|
||||
actor: IActor;
|
||||
layerIndex: StyleLayerIndex;
|
||||
availableImages: Array<string>;
|
||||
tileState: WorkerTileState;
|
||||
|
||||
_pendingRequest: AbortController;
|
||||
_geoJSONIndex: GeoJSONVT;
|
||||
_createGeoJSONIndex: typeof createGeoJSONIndex;
|
||||
|
||||
constructor(actor: IActor, layerIndex: StyleLayerIndex, availableImages: Array<string>, createGeoJSONIndexFunc: typeof createGeoJSONIndex = createGeoJSONIndex) {
|
||||
this.actor = actor;
|
||||
this.layerIndex = layerIndex;
|
||||
this.availableImages = availableImages;
|
||||
this.tileState = new WorkerTileState();
|
||||
this._createGeoJSONIndex = createGeoJSONIndexFunc;
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves and sends loaded vector tiles to the main thread.
|
||||
*/
|
||||
loadVectorTile(params: WorkerTileParameters): LoadVectorTileResult | null {
|
||||
if (!this._geoJSONIndex) throw new Error('Unable to parse the data into a cluster or geojson');
|
||||
|
||||
const {z, x, y} = params.tileID.canonical;
|
||||
const geoJSONTile = this._geoJSONIndex.getTile(z, x, y);
|
||||
if (!geoJSONTile) return null;
|
||||
|
||||
const geojsonWrapper = new GeoJSONWrapper(geoJSONTile.features, {version: 2, extent: EXTENT});
|
||||
return toVirtualVectorTile(geojsonWrapper);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.loadTile}.
|
||||
*/
|
||||
async loadTile(params: WorkerTileParameters): Promise<WorkerTileResult | null> {
|
||||
const {uid} = params;
|
||||
|
||||
const workerTile = new WorkerTile(params);
|
||||
workerTile.abort = new AbortController();
|
||||
try {
|
||||
const loadResult = this.loadVectorTile(params);
|
||||
if (!loadResult) return null;
|
||||
|
||||
const {vectorTile, rawData} = loadResult;
|
||||
|
||||
workerTile.vectorTile = vectorTile;
|
||||
this.tileState.markLoaded(uid, workerTile);
|
||||
|
||||
const parseState = {rawData};
|
||||
this.tileState.setParsing(uid, parseState); // Keep data so reloadTile can access if parse is canceled.
|
||||
try {
|
||||
return await this._parseWorkerTile(workerTile, params, parseState);
|
||||
} finally {
|
||||
this.tileState.clearParsing(uid);
|
||||
}
|
||||
} catch (err) {
|
||||
workerTile.status = 'done';
|
||||
this.tileState.markLoaded(uid, workerTile);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
private async _reloadLoadedTile(params: WorkerTileParameters): Promise<WorkerTileResult> {
|
||||
const uid = params.uid;
|
||||
|
||||
const workerTile = this.tileState.getLoaded(uid);
|
||||
if (!workerTile) throw new Error('Should not be trying to reload a tile that was never loaded or has been removed');
|
||||
|
||||
workerTile.showCollisionBoxes = params.showCollisionBoxes;
|
||||
|
||||
if (workerTile.status === 'parsing') {
|
||||
// If we are cancelling the original parse, make sure to pass the rawData from the original parse.
|
||||
const parseState = this.tileState.consumeParsing(uid);
|
||||
return await this._parseWorkerTile(workerTile, params, parseState);
|
||||
}
|
||||
|
||||
// If there was no vector tile data on the initial load, don't try and reparse the tile.
|
||||
if (workerTile.status === 'done' && workerTile.vectorTile) {
|
||||
return await this._parseWorkerTile(workerTile, params);
|
||||
}
|
||||
}
|
||||
|
||||
async _parseWorkerTile(workerTile: WorkerTile, params: WorkerTileParameters, parseState?: ParsingState): Promise<WorkerTileResult> {
|
||||
let result = await workerTile.parse(workerTile.vectorTile, this.layerIndex, this.availableImages, this.actor, params.subdivisionGranularity);
|
||||
|
||||
if (parseState) {
|
||||
const {rawData} = parseState;
|
||||
// Transferring a copy of rawTileData because the worker needs to retain its copy.
|
||||
result = extend({rawTileData: rawData.slice(0), encoding: 'mvt'}, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.abortTile}.
|
||||
*/
|
||||
async abortTile(params: TileParameters): Promise<void> {
|
||||
this.tileState.abort(params.uid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.removeTile}.
|
||||
*/
|
||||
async removeTile(params: TileParameters): Promise<void> {
|
||||
this.tileState.removeLoaded(params.uid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetches (if appropriate), parses and indexes geojson data into tiles. This
|
||||
* preparatory method must be called before {@link GeoJSONWorkerSource.loadTile}
|
||||
* can correctly serve up tiles. The first call to this method must contain a valid
|
||||
* {@link params.data}, {@link params.request} or {@link params.dataDiff}. Subsequent
|
||||
* calls may omit these parameters to reprocess the existing data (such as to update
|
||||
* clustering options).
|
||||
*
|
||||
* Defers to {@link GeoJSONWorkerSource.loadAndProcessGeoJSON} for the pre-processing.
|
||||
*
|
||||
* When a `loadData` request comes in while a previous one is being processed,
|
||||
* the previous one is aborted.
|
||||
*
|
||||
* @param params - the parameters
|
||||
* @returns a promise that resolves when the data is loaded and parsed into a GeoJSON object
|
||||
*/
|
||||
async loadData(params: LoadGeoJSONParameters): Promise<GeoJSONWorkerSourceLoadDataResult> {
|
||||
this._pendingRequest?.abort();
|
||||
|
||||
const timing = this._startRequestTiming(params);
|
||||
this._pendingRequest = new AbortController();
|
||||
try {
|
||||
await this.loadAndProcessGeoJSON(params, this._pendingRequest);
|
||||
delete this._pendingRequest;
|
||||
this.tileState.clearLoaded();
|
||||
|
||||
// Sending a large GeoJSON payload from the worker to the main thread is slow so only do if necessary.
|
||||
// Send data only if it was loaded from a URL, otherwise the main thread already has a copy of this data.
|
||||
const result: GeoJSONWorkerSourceLoadDataResult = {};
|
||||
if (params.request) result.data = params.data;
|
||||
|
||||
this._finishRequestTiming(timing, params, result);
|
||||
return result;
|
||||
} catch (err) {
|
||||
delete this._pendingRequest;
|
||||
if (!isAbortError(err)) throw err;
|
||||
return {abandoned: true};
|
||||
}
|
||||
}
|
||||
|
||||
_startRequestTiming(params: LoadGeoJSONParameters): RequestPerformance | undefined {
|
||||
if (!params.request?.collectResourceTiming) return;
|
||||
return new RequestPerformance(params.request.url);
|
||||
}
|
||||
|
||||
_finishRequestTiming(timing: RequestPerformance, params: LoadGeoJSONParameters, result: GeoJSONWorkerSourceLoadDataResult): void {
|
||||
const timingData = timing?.finish();
|
||||
if (!timingData) return;
|
||||
|
||||
// it's necessary to eval the result of getEntriesByName() here via parse/stringify
|
||||
// late evaluation in the main thread causes TypeError: illegal invocation
|
||||
result.resourceTiming = {[params.source]: JSON.parse(JSON.stringify(timingData))};
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.reloadTile}.
|
||||
*
|
||||
* If the tile is loaded, reload by re-parsing the already available tile data.
|
||||
* Otherwise, such as after a setData() call, we load the tile fresh.
|
||||
*
|
||||
* @param params - the parameters
|
||||
* @returns A promise that resolves when the tile is reloaded
|
||||
*/
|
||||
reloadTile(params: WorkerTileParameters): Promise<WorkerTileResult> {
|
||||
const tile = this.tileState.getLoaded(params.uid);
|
||||
|
||||
if (tile) {
|
||||
return this._reloadLoadedTile(params);
|
||||
}
|
||||
|
||||
return this.loadTile(params);
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch, parse and process GeoJSON according to the given parameters.
|
||||
* Defers to {@link GeoJSONWorkerSource._loadGeoJSONFromString} for the fetching and parsing.
|
||||
*
|
||||
* @param params - the parameters
|
||||
* @param abortController - the abort controller that allows aborting this operation
|
||||
* @returns a promise that is resolved with the processes GeoJSON
|
||||
*/
|
||||
async loadAndProcessGeoJSON(params: LoadGeoJSONParameters, abortController: AbortController): Promise<GeoJSON.GeoJSON> {
|
||||
if (params.request) {
|
||||
params.data = (await getJSON<GeoJSON.GeoJSON>(params.request, abortController)).data;
|
||||
}
|
||||
|
||||
if (params.data) {
|
||||
params.data = this._filterGeoJSON(params.data, params.filter);
|
||||
this._geoJSONIndex = this._createGeoJSONIndex(params.data, params);
|
||||
return;
|
||||
}
|
||||
|
||||
if (params.dataDiff) {
|
||||
this._geoJSONIndex ??= this._createGeoJSONIndex({type: 'FeatureCollection', features: []}, params);
|
||||
this._geoJSONIndex.updateData(params.dataDiff, this._getFilterPredicate(params.filter));
|
||||
return;
|
||||
}
|
||||
|
||||
if (params.updateCluster) {
|
||||
this._geoJSONIndex.updateClusterOptions(params.geojsonVtOptions.cluster, getSuperclusterOptions(params));
|
||||
}
|
||||
|
||||
if (this._geoJSONIndex == null) {
|
||||
throw new Error(`Input data given to '${params.source}' is not a valid GeoJSON object.`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Applies a filter to a GeoJSON object.
|
||||
*/
|
||||
_filterGeoJSON(data: GeoJSON.GeoJSON, filter: FilterSpecification): GeoJSON.GeoJSON {
|
||||
if (data.type !== 'FeatureCollection') return data;
|
||||
|
||||
const predicate = this._getFilterPredicate(filter);
|
||||
if (!predicate) return data;
|
||||
|
||||
return {type: 'FeatureCollection', features: data.features.filter(feature => predicate(feature))};
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a predicate function that can be used to filter GeoJSON features.
|
||||
*/
|
||||
_getFilterPredicate(filter: FilterSpecification): (feature: GeoJSON.Feature) => boolean {
|
||||
if (typeof filter !== 'boolean' && !filter?.length) return undefined;
|
||||
|
||||
const compiled = createExpression(filter, {type: 'boolean', 'property-type': 'data-driven', overridable: false, transition: false} as any);
|
||||
if (compiled.result === 'error') {
|
||||
throw new Error(compiled.value.map(err => `${err.key}: ${err.message}`).join(', '));
|
||||
}
|
||||
|
||||
const predicate = (feature: GeoJSON.Feature) => compiled.value.evaluate({zoom: 0}, feature as any);
|
||||
return predicate;
|
||||
}
|
||||
|
||||
async removeSource(_params: RemoveSourceParams): Promise<void> {
|
||||
this._pendingRequest?.abort();
|
||||
}
|
||||
|
||||
getClusterExpansionZoom(params: ClusterIDAndSource): number {
|
||||
return this._geoJSONIndex.getClusterExpansionZoom(params.clusterId);
|
||||
}
|
||||
|
||||
getClusterChildren(params: ClusterIDAndSource): Array<GeoJSON.Feature> {
|
||||
return this._geoJSONIndex.getClusterChildren(params.clusterId);
|
||||
}
|
||||
|
||||
getClusterLeaves(params: {
|
||||
clusterId: number;
|
||||
limit: number;
|
||||
offset: number;
|
||||
}): Array<GeoJSON.Feature> {
|
||||
return this._geoJSONIndex.getClusterLeaves(params.clusterId, params.limit, params.offset);
|
||||
}
|
||||
}
|
||||
|
||||
export function createGeoJSONIndex(data: GeoJSON.GeoJSON, params: LoadGeoJSONParameters): GeoJSONVT {
|
||||
const options = extend(params.geojsonVtOptions || {}, {
|
||||
updateable: true,
|
||||
clusterOptions: getSuperclusterOptions(params),
|
||||
});
|
||||
|
||||
return new GeoJSONVT(data, options);
|
||||
}
|
||||
|
||||
function getSuperclusterOptions({geojsonVtOptions, clusterProperties}: LoadGeoJSONParameters) {
|
||||
if (!clusterProperties || !geojsonVtOptions.clusterOptions) return geojsonVtOptions.clusterOptions;
|
||||
|
||||
const mapExpressions = {};
|
||||
const reduceExpressions = {};
|
||||
const globals = {accumulated: null, zoom: 0};
|
||||
const feature = {properties: null};
|
||||
const propertyNames = Object.keys(clusterProperties);
|
||||
|
||||
for (const key of propertyNames) {
|
||||
const [operator, mapExpression] = clusterProperties[key];
|
||||
|
||||
const mapExpressionParsed = createExpression(mapExpression);
|
||||
const reduceExpressionParsed = createExpression(
|
||||
typeof operator === 'string' ? [operator, ['accumulated'], ['get', key]] : operator);
|
||||
|
||||
mapExpressions[key] = mapExpressionParsed.value;
|
||||
reduceExpressions[key] = reduceExpressionParsed.value;
|
||||
}
|
||||
|
||||
geojsonVtOptions.clusterOptions.map = (pointProperties) => {
|
||||
feature.properties = pointProperties;
|
||||
const properties = {};
|
||||
for (const key of propertyNames) {
|
||||
properties[key] = mapExpressions[key].evaluate(globals, feature);
|
||||
}
|
||||
return properties;
|
||||
};
|
||||
geojsonVtOptions.clusterOptions.reduce = (accumulated, clusterProperties) => {
|
||||
feature.properties = clusterProperties;
|
||||
for (const key of propertyNames) {
|
||||
globals.accumulated = accumulated[key];
|
||||
accumulated[key] = reduceExpressions[key].evaluate(globals, feature);
|
||||
}
|
||||
};
|
||||
return geojsonVtOptions.clusterOptions;
|
||||
}
|
||||
367
node_modules/maplibre-gl/src/source/image_source.test.ts
generated
vendored
Normal file
367
node_modules/maplibre-gl/src/source/image_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,367 @@
|
||||
import {describe, beforeEach, test, expect, vi} from 'vitest';
|
||||
import {ImageSource} from './image_source';
|
||||
import {Evented} from '../util/evented';
|
||||
import {type IReadonlyTransform} from '../geo/transform_interface';
|
||||
import {extend, MAX_TILE_ZOOM} from '../util/util';
|
||||
import {type FakeServer, fakeServer} from 'nise';
|
||||
import {type RequestManager} from '../util/request_manager';
|
||||
import {sleep, stubAjaxGetImage, waitForEvent} from '../util/test/util';
|
||||
import {Tile} from '../tile/tile';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {type Texture} from '../render/texture';
|
||||
import type {ImageSourceSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import {MercatorTransform} from '../geo/projection/mercator_transform';
|
||||
|
||||
function createSource(options) {
|
||||
options = extend({
|
||||
coordinates: [[0, 0], [1, 0], [1, 1], [0, 1]]
|
||||
}, options);
|
||||
|
||||
const source = new ImageSource('id', options, {} as any, options.eventedParent);
|
||||
return source;
|
||||
}
|
||||
|
||||
class StubMap extends Evented {
|
||||
transform: IReadonlyTransform;
|
||||
painter: any;
|
||||
_requestManager: RequestManager;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.transform = new MercatorTransform();
|
||||
this._requestManager = {
|
||||
transformRequest: (url) => {
|
||||
return {url};
|
||||
}
|
||||
} as any as RequestManager;
|
||||
this.painter = {
|
||||
context: {
|
||||
gl: {}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
describe('ImageSource', () => {
|
||||
stubAjaxGetImage(undefined);
|
||||
let server: FakeServer;
|
||||
|
||||
beforeEach(() => {
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
server.respondWith(new ArrayBuffer(1));
|
||||
server.respondWith('/missing-image.png', [404, {}, '']);
|
||||
});
|
||||
|
||||
test('constructor', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
|
||||
expect(source.minzoom).toBe(0);
|
||||
expect(source.maxzoom).toBe(22);
|
||||
expect(source.tileSize).toBe(512);
|
||||
});
|
||||
|
||||
test('fires dataloading event', async () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
source.on('dataloading', (e) => {
|
||||
expect(e.dataType).toBe('source');
|
||||
});
|
||||
source.onAdd(new StubMap() as any);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await sleep(0);
|
||||
expect(source.image).toBeTruthy();
|
||||
});
|
||||
|
||||
test('transforms url request', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
const spy = vi.spyOn(map._requestManager, 'transformRequest');
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
expect(spy.mock.calls[0][0]).toBe('/image.png');
|
||||
expect(spy.mock.calls[0][1]).toBe('Image');
|
||||
});
|
||||
|
||||
test('can asynchronously transform request', async () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
map._requestManager = {
|
||||
transformRequest: async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
})
|
||||
};
|
||||
const promise = source.once('data');
|
||||
source.onAdd(map);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
expect(server.requests[0].url).toBe('/image.png');
|
||||
expect(server.requests[0].requestHeaders['Authorization']).toBe('Bearer token');
|
||||
});
|
||||
|
||||
test('updates url from updateImage', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
const spy = vi.spyOn(map._requestManager, 'transformRequest');
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
expect(spy).toHaveBeenCalledTimes(1);
|
||||
expect(spy.mock.calls[0][0]).toBe('/image.png');
|
||||
expect(spy.mock.calls[0][1]).toBe('Image');
|
||||
source.updateImage({url: '/image2.png'});
|
||||
server.respond();
|
||||
expect(spy).toHaveBeenCalledTimes(2);
|
||||
expect(spy.mock.calls[1][0]).toBe('/image2.png');
|
||||
expect(spy.mock.calls[1][1]).toBe('Image');
|
||||
});
|
||||
|
||||
test('sets coordinates', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
const beforeSerialized = source.serialize();
|
||||
expect(beforeSerialized.coordinates).toEqual([[0, 0], [1, 0], [1, 1], [0, 1]]);
|
||||
source.setCoordinates([[0, 0], [-1, 0], [-1, -1], [0, -1]]);
|
||||
const afterSerialized = source.serialize();
|
||||
expect(afterSerialized.coordinates).toEqual([[0, 0], [-1, 0], [-1, -1], [0, -1]]);
|
||||
});
|
||||
|
||||
test('sets coordinates via updateImage', async () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
const beforeSerialized = source.serialize();
|
||||
expect(beforeSerialized.coordinates).toEqual([[0, 0], [1, 0], [1, 1], [0, 1]]);
|
||||
source.updateImage({
|
||||
url: '/image2.png',
|
||||
coordinates: [[0, 0], [-1, 0], [-1, -1], [0, -1]]
|
||||
});
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await sleep(0);
|
||||
const afterSerialized = source.serialize();
|
||||
expect(afterSerialized.coordinates).toEqual([[0, 0], [-1, 0], [-1, -1], [0, -1]]);
|
||||
});
|
||||
|
||||
test('fires data event when content is loaded', async () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const promise = waitForEvent(source, 'data', (e) => e.dataType === 'source' && e.sourceDataType === 'content');
|
||||
source.onAdd(new StubMap() as any);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
expect(typeof source.tileID == 'object').toBeTruthy();
|
||||
});
|
||||
|
||||
test('fires data event when metadata is loaded', async () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const promise = waitForEvent(source, 'data', (e) => e.dataType === 'source' && e.sourceDataType === 'metadata');
|
||||
source.onAdd(new StubMap() as any);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await expect(promise).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('fires idle event on prepare call when there is at least one not loaded tile', async () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const tile = new Tile(new OverscaledTileID(1, 0, 1, 0, 0), 512);
|
||||
const promise = waitForEvent(source, 'data', (e) => e.dataType === 'source' && e.sourceDataType === 'idle');
|
||||
source.onAdd(new StubMap() as any);
|
||||
server.respond();
|
||||
|
||||
source.tiles[String(tile.tileID.wrap)] = tile;
|
||||
source.image = new ImageBitmap();
|
||||
// assign dummies directly so we don't need to stub the gl things
|
||||
source.texture = {} as Texture;
|
||||
source.prepare();
|
||||
await promise;
|
||||
expect(tile.state).toBe('loaded');
|
||||
});
|
||||
|
||||
test('serialize url and coordinates', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
|
||||
const serialized = source.serialize() as ImageSourceSpecification;
|
||||
expect(serialized.type).toBe('image');
|
||||
expect(serialized.url).toBe('/image.png');
|
||||
expect(serialized.coordinates).toEqual([[0, 0], [1, 0], [1, 1], [0, 1]]);
|
||||
});
|
||||
|
||||
test('allows using updateImage before initial image is loaded', async () => {
|
||||
const map = new StubMap() as any;
|
||||
const source = createSource({url: '/image.png', eventedParent: map});
|
||||
|
||||
// Suppress errors because we're aborting when updating.
|
||||
map.on('error', () => {});
|
||||
source.onAdd(map);
|
||||
expect(source.image).toBeUndefined();
|
||||
source.updateImage({url: '/image2.png'});
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await sleep(10);
|
||||
|
||||
expect(source.image).toBeTruthy();
|
||||
});
|
||||
|
||||
test('cancels request if updateImage is used', async () => {
|
||||
const map = new StubMap() as any;
|
||||
const source = createSource({url: '/image.png', eventedParent: map});
|
||||
|
||||
// Suppress errors because we're aborting.
|
||||
map.on('error', () => {});
|
||||
source.onAdd(map);
|
||||
await sleep(0);
|
||||
|
||||
const spy = vi.spyOn(server.requests[0] as any, 'abort');
|
||||
|
||||
source.updateImage({url: '/image2.png'});
|
||||
expect(spy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('marks the source as loaded when the request has received a response', async () => {
|
||||
const map = new StubMap() as any;
|
||||
const source = createSource({url: '/image.png', eventedParent: map});
|
||||
|
||||
expect(source.loaded()).toBe(false);
|
||||
source.onAdd(map);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await sleep(0);
|
||||
expect(source.loaded()).toBe(true);
|
||||
|
||||
const missingImagesource = createSource({url: '/missing-image.png', eventedParent: map});
|
||||
|
||||
// Suppress errors as we're loading a missing image.
|
||||
map.on('error', () => {});
|
||||
|
||||
expect(missingImagesource.loaded()).toBe(false);
|
||||
missingImagesource.onAdd(map);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await sleep(0);
|
||||
|
||||
expect(missingImagesource.loaded()).toBe(true);
|
||||
});
|
||||
|
||||
test('does not throw when updateImage is called while a request is pending', async () => {
|
||||
const map = new StubMap() as any;
|
||||
const source = createSource({url: '/image.png', eventedParent: map});
|
||||
|
||||
const errorHandler = vi.fn();
|
||||
source.on('error', errorHandler);
|
||||
|
||||
source.onAdd(map);
|
||||
source.updateImage({url: '/image2.png'});
|
||||
|
||||
await sleep(0);
|
||||
|
||||
expect(errorHandler).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
describe('terrainTileRanges', () => {
|
||||
test('sets tile ranges for all zoom levels', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
source.setCoordinates([[-10, 10], [10, 10], [10, -10], [-10, -10]]);
|
||||
|
||||
for (let z = 0; z <= MAX_TILE_ZOOM; z++) {
|
||||
expect(source.terrainTileRanges[z]).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
test('calculates tile ranges properly', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
source.setCoordinates([[11.39585,47.30074],[11.46585,47.30074],[11.46585,47.25074],[11.39585,47.25074]]);
|
||||
expect(source.terrainTileRanges[9]).toEqual({
|
||||
minWrap: 0,
|
||||
maxWrap: 0,
|
||||
minTileXWrapped: 272,
|
||||
maxTileXWrapped: 272,
|
||||
minTileY: 179,
|
||||
maxTileY: 179
|
||||
});
|
||||
expect(source.terrainTileRanges[10]).toEqual({
|
||||
minWrap: 0,
|
||||
maxWrap: 0,
|
||||
minTileXWrapped: 544,
|
||||
maxTileXWrapped: 544,
|
||||
minTileY: 358,
|
||||
maxTileY: 359
|
||||
});
|
||||
expect(source.terrainTileRanges[11]).toEqual({
|
||||
minWrap: 0,
|
||||
maxWrap: 0,
|
||||
minTileXWrapped: 1088,
|
||||
maxTileXWrapped: 1089,
|
||||
minTileY: 717,
|
||||
maxTileY: 718
|
||||
});
|
||||
expect(source.terrainTileRanges[12]).toEqual({
|
||||
minWrap: 0,
|
||||
maxWrap: 0,
|
||||
minTileXWrapped: 2177,
|
||||
maxTileXWrapped: 2178,
|
||||
minTileY: 1435,
|
||||
maxTileY: 1436
|
||||
});
|
||||
});
|
||||
|
||||
test('calculates tile ranges for an image exceeds the world bounds - east', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
source.setCoordinates([[-180, 60], [270, 60], [270, -60], [-180, -60]]);
|
||||
expect(source.terrainTileRanges[0]).toEqual({
|
||||
minWrap: 0,
|
||||
maxWrap: 1,
|
||||
minTileXWrapped: 0,
|
||||
maxTileXWrapped: 0,
|
||||
minTileY: 0,
|
||||
maxTileY: 0
|
||||
});
|
||||
expect(source.terrainTileRanges[1]).toEqual({
|
||||
minWrap: 0,
|
||||
maxWrap: 1,
|
||||
minTileXWrapped: 0,
|
||||
maxTileXWrapped: 0,
|
||||
minTileY: 0,
|
||||
maxTileY: 1
|
||||
});
|
||||
});
|
||||
|
||||
test('calculates tile ranges for an image exceeds the world bounds - west', () => {
|
||||
const source = createSource({url: '/image.png'});
|
||||
const map = new StubMap() as any;
|
||||
source.onAdd(map);
|
||||
server.respond();
|
||||
source.setCoordinates([[120, 60], [-270, 60], [-270, -60], [120, -60]]);
|
||||
expect(source.terrainTileRanges[0]).toEqual({
|
||||
minWrap: -1,
|
||||
maxWrap: 0,
|
||||
minTileXWrapped: 0,
|
||||
maxTileXWrapped: 0,
|
||||
minTileY: 0,
|
||||
maxTileY: 0
|
||||
});
|
||||
expect(source.terrainTileRanges[1]).toEqual({
|
||||
minWrap: -1,
|
||||
maxWrap: 0,
|
||||
minTileXWrapped: 1,
|
||||
maxTileXWrapped: 1,
|
||||
minTileY: 0,
|
||||
maxTileY: 1
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
382
node_modules/maplibre-gl/src/source/image_source.ts
generated
vendored
Normal file
382
node_modules/maplibre-gl/src/source/image_source.ts
generated
vendored
Normal file
@@ -0,0 +1,382 @@
|
||||
import {CanonicalTileID} from '../tile/tile_id';
|
||||
import {Event, ErrorEvent, Evented} from '../util/evented';
|
||||
import {ImageRequest} from '../util/image_request';
|
||||
import {ResourceType} from '../util/request_manager';
|
||||
import {Texture} from '../render/texture';
|
||||
import {MercatorCoordinate} from '../geo/mercator_coordinate';
|
||||
|
||||
import type {Source} from './source';
|
||||
import type {CanvasSourceSpecification} from './canvas_source';
|
||||
import type {Map} from '../ui/map';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {Tile} from '../tile/tile';
|
||||
import type {
|
||||
ImageSourceSpecification,
|
||||
VideoSourceSpecification
|
||||
} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type Point from '@mapbox/point-geometry';
|
||||
import {MAX_TILE_ZOOM} from '../util/util';
|
||||
import {Bounds} from '../geo/bounds';
|
||||
import {isAbortError} from '../util/abort_error';
|
||||
|
||||
/**
|
||||
* Four geographical coordinates,
|
||||
* represented as arrays of longitude and latitude numbers, which define the corners of the image.
|
||||
* The coordinates start at the top left corner of the image and proceed in clockwise order.
|
||||
* They do not have to represent a rectangle.
|
||||
*/
|
||||
export type Coordinates = [[number, number], [number, number], [number, number], [number, number]];
|
||||
|
||||
/**
|
||||
* The options object for the {@link ImageSource.updateImage} method
|
||||
*/
|
||||
export type UpdateImageOptions = {
|
||||
/**
|
||||
* Required image URL.
|
||||
*/
|
||||
url: string;
|
||||
/**
|
||||
* The image coordinates
|
||||
*/
|
||||
coordinates?: Coordinates;
|
||||
};
|
||||
|
||||
export type CanonicalTileRange = {
|
||||
minTileY: number;
|
||||
maxTileY: number;
|
||||
|
||||
/**
|
||||
* Image can exceed the boundary of a single "world" (tile 0/0/0),
|
||||
* so we need to know the tile range for wrapping.
|
||||
*/
|
||||
minTileXWrapped: number;
|
||||
maxTileXWrapped: number;
|
||||
minWrap: number;
|
||||
maxWrap: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* A data source containing an image.
|
||||
* (See the [Style Specification](https://maplibre.org/maplibre-style-spec/#sources-image) for detailed documentation of options.)
|
||||
*
|
||||
* @group Sources
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // add to map
|
||||
* map.addSource('some id', {
|
||||
* type: 'image',
|
||||
* url: 'https://www.maplibre.org/images/foo.png',
|
||||
* coordinates: [
|
||||
* [-76.54, 39.18],
|
||||
* [-76.52, 39.18],
|
||||
* [-76.52, 39.17],
|
||||
* [-76.54, 39.17]
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* // update coordinates
|
||||
* let mySource = map.getSource('some id');
|
||||
* mySource.setCoordinates([
|
||||
* [-76.54335737228394, 39.18579907229748],
|
||||
* [-76.52803659439087, 39.1838364847587],
|
||||
* [-76.5295386314392, 39.17683392507606],
|
||||
* [-76.54520273208618, 39.17876344106642]
|
||||
* ]);
|
||||
*
|
||||
* // update url and coordinates simultaneously
|
||||
* mySource.updateImage({
|
||||
* url: 'https://www.maplibre.org/images/bar.png',
|
||||
* coordinates: [
|
||||
* [-76.54335737228394, 39.18579907229748],
|
||||
* [-76.52803659439087, 39.1838364847587],
|
||||
* [-76.5295386314392, 39.17683392507606],
|
||||
* [-76.54520273208618, 39.17876344106642]
|
||||
* ]
|
||||
* })
|
||||
*
|
||||
* map.removeSource('some id'); // remove
|
||||
* ```
|
||||
*/
|
||||
export class ImageSource extends Evented implements Source {
|
||||
type: string;
|
||||
id: string;
|
||||
minzoom: number;
|
||||
maxzoom: number;
|
||||
tileSize: number;
|
||||
url: string;
|
||||
/**
|
||||
* This object is used to store the range of terrain tiles that overlap with this tile.
|
||||
* It is relevant for image tiles, as the image exceeds single tile boundaries.
|
||||
*/
|
||||
terrainTileRanges: {[zoom: string]: CanonicalTileRange};
|
||||
|
||||
coordinates: Coordinates;
|
||||
tiles: {[_: string]: Tile};
|
||||
options: any;
|
||||
dispatcher: Dispatcher;
|
||||
map: Map;
|
||||
texture: Texture | null;
|
||||
image: HTMLImageElement | ImageBitmap;
|
||||
tileID: CanonicalTileID;
|
||||
tileCoords: Array<Point>;
|
||||
flippedWindingOrder: boolean = false;
|
||||
_loaded: boolean;
|
||||
_request: AbortController;
|
||||
|
||||
/** @internal */
|
||||
constructor(id: string, options: ImageSourceSpecification | VideoSourceSpecification | CanvasSourceSpecification, dispatcher: Dispatcher, eventedParent: Evented) {
|
||||
super();
|
||||
this.id = id;
|
||||
this.dispatcher = dispatcher;
|
||||
this.coordinates = options.coordinates;
|
||||
|
||||
this.type = 'image';
|
||||
this.minzoom = 0;
|
||||
this.maxzoom = 22;
|
||||
this.tileSize = 512;
|
||||
this.tiles = {};
|
||||
this._loaded = false;
|
||||
|
||||
this.setEventedParent(eventedParent);
|
||||
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
async load(newCoordinates?: Coordinates): Promise<void> {
|
||||
this._loaded = false;
|
||||
this.fire(new Event('dataloading', {dataType: 'source'}));
|
||||
|
||||
this.url = this.options.url;
|
||||
|
||||
this._request = new AbortController();
|
||||
try {
|
||||
const image = await ImageRequest.getImage(await this.map._requestManager.transformRequest(this.url, ResourceType.Image), this._request);
|
||||
this._request = null;
|
||||
this._loaded = true;
|
||||
|
||||
if (image && image.data) {
|
||||
this.image = image.data;
|
||||
if (newCoordinates) {
|
||||
this.coordinates = newCoordinates;
|
||||
}
|
||||
this._finishLoading();
|
||||
}
|
||||
} catch (err) {
|
||||
this._request = null;
|
||||
this._loaded = true;
|
||||
if (!isAbortError(err)) {
|
||||
this.fire(new ErrorEvent(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loaded(): boolean {
|
||||
return this._loaded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the image URL and, optionally, the coordinates. To avoid having the image flash after changing,
|
||||
* set the `raster-fade-duration` paint property on the raster layer to 0.
|
||||
*
|
||||
* @param options - The options object.
|
||||
*/
|
||||
updateImage(options: UpdateImageOptions): this {
|
||||
if (!options.url) {
|
||||
return this;
|
||||
}
|
||||
|
||||
if (this._request) {
|
||||
this._request.abort();
|
||||
this._request = null;
|
||||
}
|
||||
|
||||
this.options.url = options.url;
|
||||
this.load(options.coordinates).finally(() => { this.texture = null; });
|
||||
return this;
|
||||
}
|
||||
|
||||
_finishLoading() {
|
||||
if (this.map) {
|
||||
this.setCoordinates(this.coordinates);
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'metadata'}));
|
||||
}
|
||||
}
|
||||
|
||||
onAdd(map: Map) {
|
||||
this.map = map;
|
||||
this.load();
|
||||
}
|
||||
|
||||
onRemove() {
|
||||
if (this._request) {
|
||||
this._request.abort();
|
||||
this._request = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the image's coordinates and re-renders the map.
|
||||
*
|
||||
* @param coordinates - Four geographical coordinates,
|
||||
* represented as arrays of longitude and latitude numbers, which define the corners of the image.
|
||||
* The coordinates start at the top left corner of the image and proceed in clockwise order.
|
||||
* They do not have to represent a rectangle.
|
||||
*/
|
||||
setCoordinates(coordinates: Coordinates): this {
|
||||
this.coordinates = coordinates;
|
||||
|
||||
// Calculate which mercator tile is suitable for rendering the video in
|
||||
// and create a buffer with the corner coordinates. These coordinates
|
||||
// may be outside the tile, because raster tiles aren't clipped when rendering.
|
||||
|
||||
// transform the geo coordinates into (zoom 0) tile space coordinates
|
||||
const cornerCoords = coordinates.map(MercatorCoordinate.fromLngLat);
|
||||
|
||||
// Compute the coordinates of the tile we'll use to hold this image's
|
||||
// render data
|
||||
this.tileID = getCoordinatesCenterTileID(cornerCoords);
|
||||
|
||||
// Compute tiles overlapping with the image. We need to know for which
|
||||
// terrain tiles we have to render the image.
|
||||
this.terrainTileRanges = this._getOverlappingTileRanges(cornerCoords);
|
||||
|
||||
// Constrain min/max zoom to our tile's zoom level in order to force
|
||||
// TileManager to request this tile (no matter what the map's zoom
|
||||
// level)
|
||||
this.minzoom = this.maxzoom = this.tileID.z;
|
||||
|
||||
// Transform the corner coordinates into the coordinate space of our
|
||||
// tile.
|
||||
this.tileCoords = cornerCoords.map((coord) => this.tileID.getTilePoint(coord)._round());
|
||||
this.flippedWindingOrder = hasWrongWindingOrder(this.tileCoords);
|
||||
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'content'}));
|
||||
return this;
|
||||
}
|
||||
|
||||
prepare() {
|
||||
if (Object.keys(this.tiles).length === 0 || !this.image) {
|
||||
return;
|
||||
}
|
||||
|
||||
const context = this.map.painter.context;
|
||||
const gl = context.gl;
|
||||
|
||||
if (!this.texture) {
|
||||
this.texture = new Texture(context, this.image, gl.RGBA);
|
||||
this.texture.bind(gl.LINEAR, gl.CLAMP_TO_EDGE);
|
||||
}
|
||||
|
||||
let newTilesLoaded = false;
|
||||
for (const w in this.tiles) {
|
||||
const tile = this.tiles[w];
|
||||
if (tile.state !== 'loaded') {
|
||||
tile.state = 'loaded';
|
||||
tile.texture = this.texture;
|
||||
newTilesLoaded = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (newTilesLoaded) {
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'idle', sourceId: this.id}));
|
||||
}
|
||||
}
|
||||
|
||||
async loadTile(tile: Tile): Promise<void> {
|
||||
// We have a single tile -- whose coordinates are this.tileID -- that
|
||||
// covers the image we want to render. If that's the one being
|
||||
// requested, set it up with the image; otherwise, mark the tile as
|
||||
// `errored` to indicate that we have no data for it.
|
||||
// If the world wraps, we may have multiple "wrapped" copies of the
|
||||
// single tile.
|
||||
if (this.tileID && this.tileID.equals(tile.tileID.canonical)) {
|
||||
this.tiles[String(tile.tileID.wrap)] = tile;
|
||||
tile.buckets = {};
|
||||
} else {
|
||||
tile.state = 'errored';
|
||||
}
|
||||
}
|
||||
|
||||
serialize(): ImageSourceSpecification | VideoSourceSpecification | CanvasSourceSpecification {
|
||||
return {
|
||||
type: 'image',
|
||||
url: this.options.url,
|
||||
coordinates: this.coordinates
|
||||
};
|
||||
}
|
||||
|
||||
hasTransition() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a list of coordinates, determine overlapping tile ranges for all zoom levels.
|
||||
*
|
||||
* @returns Overlapping tile ranges for all zoom levels.
|
||||
* @internal
|
||||
*/
|
||||
private _getOverlappingTileRanges(
|
||||
coords: Array<MercatorCoordinate>
|
||||
): {[zoom: string]: CanonicalTileRange} {
|
||||
const {minX, minY, maxX, maxY} = Bounds.fromPoints(coords);
|
||||
|
||||
const ranges: {[zoom: string]: CanonicalTileRange} = {};
|
||||
|
||||
for (let z = 0; z <= MAX_TILE_ZOOM; z++) {
|
||||
const tilesAtZoom = Math.pow(2, z);
|
||||
const minTileX = Math.floor(minX * tilesAtZoom);
|
||||
const minTileY = Math.floor(minY * tilesAtZoom);
|
||||
const maxTileX = Math.floor(maxX * tilesAtZoom);
|
||||
const maxTileY = Math.floor(maxY * tilesAtZoom);
|
||||
|
||||
const minTileXWrapped = ((minTileX % tilesAtZoom) + tilesAtZoom) % tilesAtZoom;
|
||||
const maxTileXWrapped = maxTileX % tilesAtZoom;
|
||||
const minWrap = Math.floor(minTileX / tilesAtZoom);
|
||||
const maxWrap = Math.floor(maxTileX / tilesAtZoom);
|
||||
|
||||
ranges[z] = {
|
||||
minWrap,
|
||||
maxWrap,
|
||||
minTileXWrapped,
|
||||
maxTileXWrapped,
|
||||
minTileY,
|
||||
maxTileY
|
||||
};
|
||||
}
|
||||
|
||||
return ranges;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Given a list of coordinates, get their center as a coordinate.
|
||||
*
|
||||
* @returns centerpoint
|
||||
* @internal
|
||||
*/
|
||||
export function getCoordinatesCenterTileID(coords: Array<MercatorCoordinate>) {
|
||||
const bounds = Bounds.fromPoints(coords);
|
||||
|
||||
const dx = bounds.width();
|
||||
const dy = bounds.height();
|
||||
const dMax = Math.max(dx, dy);
|
||||
const zoom = Math.max(0, Math.floor(-Math.log(dMax) / Math.LN2));
|
||||
const tilesAtZoom = Math.pow(2, zoom);
|
||||
|
||||
return new CanonicalTileID(
|
||||
zoom,
|
||||
Math.floor((bounds.minX + bounds.maxX) / 2 * tilesAtZoom),
|
||||
Math.floor((bounds.minY + bounds.maxY) / 2 * tilesAtZoom));
|
||||
}
|
||||
|
||||
function hasWrongWindingOrder(coords: Array<Point>) {
|
||||
const e0x = coords[1].x - coords[0].x;
|
||||
const e0y = coords[1].y - coords[0].y;
|
||||
const e1x = coords[2].x - coords[0].x;
|
||||
const e1y = coords[2].y - coords[0].y;
|
||||
|
||||
const crossProduct = e0x * e1y - e0y * e1x;
|
||||
|
||||
return crossProduct < 0;
|
||||
}
|
||||
223
node_modules/maplibre-gl/src/source/load_tilejson.test.ts
generated
vendored
Normal file
223
node_modules/maplibre-gl/src/source/load_tilejson.test.ts
generated
vendored
Normal file
@@ -0,0 +1,223 @@
|
||||
import {describe, beforeEach, afterEach, test, expect} from 'vitest';
|
||||
import {fakeServer, type FakeServer} from 'nise';
|
||||
import {loadTileJson} from './load_tilejson';
|
||||
import {RequestManager} from '../util/request_manager';
|
||||
import {ABORT_ERROR} from '../util/abort_error';
|
||||
import {sleep} from '../util/test/util';
|
||||
|
||||
import {type RasterSourceSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
|
||||
describe('loadTileJson', () => {
|
||||
let server: FakeServer;
|
||||
beforeEach(() => {
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
});
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
});
|
||||
|
||||
const requestManager = new RequestManager();
|
||||
|
||||
test('fetches and returns TileJSON', async () => {
|
||||
const options = {
|
||||
type: 'raster',
|
||||
url: 'http://example.com/test.json',
|
||||
} satisfies RasterSourceSpecification;
|
||||
|
||||
const mockTileJSON = {
|
||||
tiles: ['http://example.com/tile/{z}/{x}/{y}.png'],
|
||||
minzoom: 0,
|
||||
maxzoom: 14,
|
||||
attribution: 'Test Attribution',
|
||||
bounds: [-180, -85, 180, 85],
|
||||
scheme: 'xyz',
|
||||
tileSize: 256,
|
||||
};
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(mockTileJSON));
|
||||
});
|
||||
|
||||
const promise = loadTileJson(options, requestManager, new AbortController());
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
const result = await promise;
|
||||
|
||||
expect(result).toEqual(mockTileJSON);
|
||||
});
|
||||
|
||||
test('fetches and returns TileJSON (async transformRequest)', async () => {
|
||||
const options = {
|
||||
type: 'raster',
|
||||
url: 'http://example.com/test.json',
|
||||
} satisfies RasterSourceSpecification;
|
||||
|
||||
const mockTileJSON = {
|
||||
tiles: ['http://example.com/tile/{z}/{x}/{y}.png'],
|
||||
minzoom: 0,
|
||||
maxzoom: 14,
|
||||
attribution: 'Test Attribution',
|
||||
bounds: [-180, -85, 180, 85],
|
||||
scheme: 'xyz',
|
||||
tileSize: 256,
|
||||
};
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(mockTileJSON));
|
||||
});
|
||||
|
||||
const requestManager = new RequestManager(async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
}));
|
||||
const promise = loadTileJson(options, requestManager, new AbortController());
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
const result = await promise;
|
||||
|
||||
expect(result).toEqual(mockTileJSON);
|
||||
expect(server.requests[0].url).toBe('http://example.com/test.json');
|
||||
expect(server.requests[0].requestHeaders.Authorization).toBe('Bearer token');
|
||||
});
|
||||
|
||||
test('combines input and TileJSON', async () => {
|
||||
const options = {
|
||||
type: 'raster',
|
||||
url: 'http://example.com/test.json',
|
||||
minzoom: 5,
|
||||
tiles: ['http://example2.com/tile/{z}/{x}/{y}.png'],
|
||||
} satisfies RasterSourceSpecification;
|
||||
|
||||
const mockTileJSON = {
|
||||
tiles: ['http://example.com/tile/{z}/{x}/{y}.png'],
|
||||
minzoom: 0,
|
||||
maxzoom: 14,
|
||||
attribution: 'Test Attribution',
|
||||
bounds: [-180, -85, 180, 85],
|
||||
scheme: 'xyz',
|
||||
tileSize: 256,
|
||||
};
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(mockTileJSON));
|
||||
});
|
||||
|
||||
const promise = loadTileJson(options, requestManager, new AbortController());
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
const result = await promise;
|
||||
|
||||
expect(result).toEqual({
|
||||
...mockTileJSON,
|
||||
minzoom: options.minzoom,
|
||||
tiles: options.tiles,
|
||||
});
|
||||
});
|
||||
|
||||
test('excludes non-TileJSON data', async () => {
|
||||
const options = {
|
||||
type: 'raster',
|
||||
url: 'http://example.com/test.json',
|
||||
someData1: 'value1',
|
||||
} as any;
|
||||
|
||||
const mockTileJSON = {
|
||||
tiles: ['http://example.com/tile/{z}/{x}/{y}.png'],
|
||||
minzoom: 0,
|
||||
maxzoom: 14,
|
||||
attribution: 'Test Attribution',
|
||||
bounds: [-180, -85, 180, 85],
|
||||
scheme: 'xyz',
|
||||
tileSize: 256,
|
||||
someData2: 'value2',
|
||||
};
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(mockTileJSON));
|
||||
});
|
||||
|
||||
const promise = loadTileJson(options, requestManager, new AbortController());
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
const result: any = await promise;
|
||||
|
||||
expect(result.someData1).toBeUndefined();
|
||||
expect(result.someData2).toBeUndefined();
|
||||
});
|
||||
|
||||
test('handles vector_layers in TileJSON', async () => {
|
||||
const options = {
|
||||
type: 'raster',
|
||||
url: 'http://example.com/test.json',
|
||||
} satisfies RasterSourceSpecification;
|
||||
|
||||
const mockTileJSON = {
|
||||
tiles: ['http://example.com/tile/{z}/{x}/{y}.png'],
|
||||
minzoom: 0,
|
||||
maxzoom: 14,
|
||||
attribution: 'Test Attribution',
|
||||
bounds: [-180, -85, 180, 85],
|
||||
scheme: 'xyz',
|
||||
tileSize: 256,
|
||||
vector_layers: [{id: 'layer1'}, {id: 'layer2'}],
|
||||
};
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(mockTileJSON));
|
||||
});
|
||||
|
||||
const promise = loadTileJson(options, requestManager, new AbortController());
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
const result = await promise;
|
||||
|
||||
expect(result.vectorLayerIds).toEqual(['layer1', 'layer2']);
|
||||
});
|
||||
|
||||
test('handles aborted request', async () => {
|
||||
const options = {
|
||||
type: 'raster',
|
||||
url: 'http://example.com/test.json',
|
||||
} satisfies RasterSourceSpecification;
|
||||
|
||||
const mockTileJSON = {
|
||||
tiles: ['http://example.com/tile/{z}/{x}/{y}.png'],
|
||||
minzoom: 0,
|
||||
maxzoom: 14,
|
||||
attribution: 'Test Attribution',
|
||||
bounds: [-180, -85, 180, 85],
|
||||
scheme: 'xyz',
|
||||
tileSize: 256,
|
||||
};
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/json'}, JSON.stringify(mockTileJSON));
|
||||
});
|
||||
|
||||
const abortController = new AbortController();
|
||||
const promise = loadTileJson(options, requestManager, abortController);
|
||||
await sleep(0);
|
||||
abortController.abort();
|
||||
server.respond();
|
||||
|
||||
await expect(promise).rejects.toThrow(expect.objectContaining({name: ABORT_ERROR}));
|
||||
});
|
||||
|
||||
test('throws for AJAX errors', async () => {
|
||||
const options = {
|
||||
type: 'raster',
|
||||
url: 'http://example.com/test.json',
|
||||
} satisfies RasterSourceSpecification;
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(404, undefined, 'Not Found');
|
||||
});
|
||||
|
||||
const promise = loadTileJson(options, requestManager, new AbortController());
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await expect(promise).rejects.toThrow('AJAXError: Not Found (404): http://example.com/test.json');
|
||||
});
|
||||
});
|
||||
48
node_modules/maplibre-gl/src/source/load_tilejson.ts
generated
vendored
Normal file
48
node_modules/maplibre-gl/src/source/load_tilejson.ts
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import {pick, extend, type TileJSON} from '../util/util';
|
||||
import {getJSON} from '../util/ajax';
|
||||
import {ResourceType} from '../util/request_manager';
|
||||
import {browser} from '../util/browser';
|
||||
|
||||
import type {RequestManager} from '../util/request_manager';
|
||||
import type {RasterDEMSourceSpecification, RasterSourceSpecification, VectorSourceSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
|
||||
export type LoadTileJsonResponse = {
|
||||
tiles: Array<string>;
|
||||
minzoom: number;
|
||||
maxzoom: number;
|
||||
attribution: string;
|
||||
bounds: RasterSourceSpecification['bounds'];
|
||||
scheme: RasterSourceSpecification['scheme'];
|
||||
tileSize: number;
|
||||
encoding: RasterDEMSourceSpecification['encoding'];
|
||||
vectorLayerIds?: Array<string>;
|
||||
};
|
||||
|
||||
export async function loadTileJson(
|
||||
options: RasterSourceSpecification | RasterDEMSourceSpecification | VectorSourceSpecification,
|
||||
requestManager: RequestManager,
|
||||
abortController: AbortController,
|
||||
targetWindow?: Window,
|
||||
): Promise<LoadTileJsonResponse | null> {
|
||||
let tileJSON: TileJSON | typeof options = options;
|
||||
if (options.url) {
|
||||
const response = await getJSON<TileJSON>(await requestManager.transformRequest(options.url, ResourceType.Source), abortController);
|
||||
tileJSON = response.data;
|
||||
} else {
|
||||
await browser.frameAsync(abortController, targetWindow);
|
||||
}
|
||||
if (!tileJSON) {
|
||||
return null;
|
||||
}
|
||||
const result = pick(
|
||||
// explicit source options take precedence over TileJSON
|
||||
extend(tileJSON, options),
|
||||
['tiles', 'minzoom', 'maxzoom', 'attribution', 'bounds', 'scheme', 'tileSize', 'encoding']
|
||||
) as LoadTileJsonResponse;
|
||||
|
||||
if ('vector_layers' in tileJSON && tileJSON.vector_layers) {
|
||||
result.vectorLayerIds = tileJSON.vector_layers.map((layer) => { return layer.id; });
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
25
node_modules/maplibre-gl/src/source/pixels_to_tile_units.ts
generated
vendored
Normal file
25
node_modules/maplibre-gl/src/source/pixels_to_tile_units.ts
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
import {EXTENT} from '../data/extent';
|
||||
|
||||
import type {OverscaledTileID} from '../tile/tile_id';
|
||||
|
||||
/**
|
||||
* Converts a pixel value at a the given zoom level to tile units.
|
||||
*
|
||||
* The shaders mostly calculate everything in tile units so style
|
||||
* properties need to be converted from pixels to tile units using this.
|
||||
*
|
||||
* For example, a translation by 30 pixels at zoom 6.5 will be a
|
||||
* translation by pixelsToTileUnits(30, 6.5) tile units.
|
||||
*
|
||||
* @returns value in tile units
|
||||
*/
|
||||
export function pixelsToTileUnits(
|
||||
tile: {
|
||||
tileID: OverscaledTileID;
|
||||
tileSize: number;
|
||||
},
|
||||
pixelValue: number,
|
||||
z: number
|
||||
): number {
|
||||
return pixelValue * (EXTENT / (tile.tileSize * Math.pow(2, z - tile.tileID.overscaledZ)));
|
||||
}
|
||||
48
node_modules/maplibre-gl/src/source/protocol_crud.ts
generated
vendored
Normal file
48
node_modules/maplibre-gl/src/source/protocol_crud.ts
generated
vendored
Normal file
@@ -0,0 +1,48 @@
|
||||
import {type AddProtocolAction, config} from '../util/config';
|
||||
|
||||
export function getProtocol(url: string) {
|
||||
return config.REGISTERED_PROTOCOLS[url.substring(0, url.indexOf('://'))];
|
||||
}
|
||||
/**
|
||||
* Adds a custom load resource function that will be called when using a URL that starts with a custom url schema.
|
||||
* This will happen in the main thread, and workers might call it if they don't know how to handle the protocol.
|
||||
* The example below will be triggered for custom:// urls defined in the sources list in the style definitions.
|
||||
* The function passed will receive the request parameters and should return with the resulting resource,
|
||||
* for example a pbf vector tile, non-compressed, represented as ArrayBuffer.
|
||||
*
|
||||
* @param customProtocol - the protocol to hook, for example 'custom'
|
||||
* @param loadFn - the function to use when trying to fetch a tile specified by the customProtocol
|
||||
* @example
|
||||
* ```ts
|
||||
* // This will fetch a file using the fetch API (this is obviously a non interesting example...)
|
||||
* addProtocol('custom', async (params, abortController) => {
|
||||
* const t = await fetch(`https://${params.url.split("://")[1]}`);
|
||||
* if (t.status == 200) {
|
||||
* const buffer = await t.arrayBuffer();
|
||||
* return {data: buffer}
|
||||
* } else {
|
||||
* throw new Error(`Tile fetch error: ${t.statusText}`);
|
||||
* }
|
||||
* });
|
||||
* // the following is an example of a way to return an error when trying to load a tile
|
||||
* addProtocol('custom2', async (params, abortController) => {
|
||||
* throw new Error('someErrorMessage');
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function addProtocol(customProtocol: string, loadFn: AddProtocolAction) {
|
||||
config.REGISTERED_PROTOCOLS[customProtocol] = loadFn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a previously added protocol in the main thread.
|
||||
*
|
||||
* @param customProtocol - the custom protocol to remove registration for
|
||||
* @example
|
||||
* ```ts
|
||||
* removeProtocol('custom');
|
||||
* ```
|
||||
*/
|
||||
export function removeProtocol(customProtocol: string) {
|
||||
delete config.REGISTERED_PROTOCOLS[customProtocol];
|
||||
}
|
||||
32
node_modules/maplibre-gl/src/source/query_features.test.ts
generated
vendored
Normal file
32
node_modules/maplibre-gl/src/source/query_features.test.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
import {describe, test, expect} from 'vitest';
|
||||
import {
|
||||
queryRenderedFeatures,
|
||||
querySourceFeatures
|
||||
} from './query_features';
|
||||
import {TileManager} from '../tile/tile_manager';
|
||||
import type Point from '@mapbox/point-geometry';
|
||||
import {MercatorTransform} from '../geo/projection/mercator_transform';
|
||||
|
||||
describe('QueryFeatures.rendered', () => {
|
||||
test('returns empty object if source returns no tiles', () => {
|
||||
const mockTileManager = {tilesIn () { return []; }} as any as TileManager;
|
||||
const transform = new MercatorTransform();
|
||||
const result = queryRenderedFeatures(mockTileManager, {}, undefined, [] as Point[], undefined, transform, undefined);
|
||||
expect(result).toEqual({});
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
describe('QueryFeatures.source', () => {
|
||||
test('returns empty result when source has no features', () => {
|
||||
const tileManager = new TileManager('test', {
|
||||
type: 'geojson',
|
||||
data: {type: 'FeatureCollection', features: []}
|
||||
}, {
|
||||
getActor() {}
|
||||
} as any);
|
||||
const result = querySourceFeatures(tileManager, {});
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
});
|
||||
298
node_modules/maplibre-gl/src/source/query_features.ts
generated
vendored
Normal file
298
node_modules/maplibre-gl/src/source/query_features.ts
generated
vendored
Normal file
@@ -0,0 +1,298 @@
|
||||
import {mat4} from 'gl-matrix';
|
||||
import type Point from '@mapbox/point-geometry';
|
||||
import type {TileManager} from '../tile/tile_manager';
|
||||
import type {StyleLayer} from '../style/style_layer';
|
||||
import type {CollisionIndex} from '../symbol/collision_index';
|
||||
import type {IReadonlyTransform} from '../geo/transform_interface';
|
||||
import type {RetainedQueryData} from '../symbol/placement';
|
||||
import type {FilterSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {GeoJSONFeature, MapGeoJSONFeature} from '../util/vectortile_to_geojson';
|
||||
import type {QueryResults, QueryResultsItem} from '../data/feature_index';
|
||||
import type {OverscaledTileID} from '../tile/tile_id';
|
||||
|
||||
type RenderedFeatureLayer = {
|
||||
wrappedTileID: string;
|
||||
queryResults: QueryResults;
|
||||
};
|
||||
|
||||
/**
|
||||
* Options to pass to query the map for the rendered features
|
||||
*/
|
||||
export type QueryRenderedFeaturesOptions = {
|
||||
/**
|
||||
* An array or set of [style layer IDs](https://maplibre.org/maplibre-style-spec/#layer-id) for the query to inspect.
|
||||
* Only features within these layers will be returned. If this parameter is undefined, all layers will be checked.
|
||||
*/
|
||||
layers?: Array<string> | Set<string>;
|
||||
/**
|
||||
* A [filter](https://maplibre.org/maplibre-style-spec/layers/#filter) to limit query results.
|
||||
*/
|
||||
filter?: FilterSpecification;
|
||||
/**
|
||||
* An array of string representing the available images
|
||||
*/
|
||||
availableImages?: Array<string>;
|
||||
/**
|
||||
* Whether to check if the [options.filter] conforms to the MapLibre Style Specification. Disabling validation is a performance optimization that should only be used if you have previously validated the values you will be passing to this function.
|
||||
*/
|
||||
validate?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* A version of QueryRenderedFeaturesOptions used internally
|
||||
*/
|
||||
export type QueryRenderedFeaturesOptionsStrict = Omit<QueryRenderedFeaturesOptions, 'layers'> & {
|
||||
layers: Set<string> | null;
|
||||
globalState?: Record<string, any>;
|
||||
};
|
||||
|
||||
/**
|
||||
* The options object related to the {@link Map.querySourceFeatures} method
|
||||
*/
|
||||
export type QuerySourceFeatureOptions = {
|
||||
/**
|
||||
* The name of the source layer to query. *For vector tile sources, this parameter is required.* For GeoJSON sources, it is ignored.
|
||||
*/
|
||||
sourceLayer?: string;
|
||||
/**
|
||||
* A [filter](https://maplibre.org/maplibre-style-spec/layers/#filter)
|
||||
* to limit query results.
|
||||
*/
|
||||
filter?: FilterSpecification;
|
||||
/**
|
||||
* Whether to check if the [parameters.filter] conforms to the MapLibre Style Specification. Disabling validation is a performance optimization that should only be used if you have previously validated the values you will be passing to this function.
|
||||
* @defaultValue true
|
||||
*/
|
||||
validate?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* A version of QuerySourceFeatureOptions used internally
|
||||
*/
|
||||
export type QuerySourceFeatureOptionsStrict = QuerySourceFeatureOptions & {
|
||||
globalState?: Record<string, any>;
|
||||
};
|
||||
|
||||
export type QueryRenderedFeaturesResults = {
|
||||
[key: string]: QueryRenderedFeaturesResultsItem[];
|
||||
};
|
||||
|
||||
export type QueryRenderedFeaturesResultsItem = QueryResultsItem & { feature: MapGeoJSONFeature };
|
||||
|
||||
/*
|
||||
* Returns a matrix that can be used to convert from tile coordinates to viewport pixel coordinates.
|
||||
*/
|
||||
function getPixelPosMatrix(transform, tileID: OverscaledTileID) {
|
||||
const t = mat4.create();
|
||||
mat4.translate(t, t, [1, 1, 0]);
|
||||
mat4.scale(t, t, [transform.width * 0.5, transform.height * 0.5, 1]);
|
||||
if (transform.calculatePosMatrix) { // Globe: TODO: remove this hack once queryRendererFeatures supports globe properly
|
||||
return mat4.multiply(t, t, transform.calculatePosMatrix(tileID.toUnwrapped()));
|
||||
} else {
|
||||
return t;
|
||||
}
|
||||
}
|
||||
|
||||
function queryIncludes3DLayer(layers: Set<string> | undefined, styleLayers: {[_: string]: StyleLayer}, sourceID: string) {
|
||||
if (layers) {
|
||||
for (const layerID of layers) {
|
||||
const layer = styleLayers[layerID];
|
||||
if (layer && layer.source === sourceID && layer.type === 'fill-extrusion') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (const key in styleLayers) {
|
||||
const layer = styleLayers[key];
|
||||
if (layer.source === sourceID && layer.type === 'fill-extrusion') {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
export function queryRenderedFeatures(
|
||||
tileManager: TileManager,
|
||||
styleLayers: {[_: string]: StyleLayer},
|
||||
serializedLayers: {[_: string]: any},
|
||||
queryGeometry: Array<Point>,
|
||||
params: QueryRenderedFeaturesOptionsStrict | undefined,
|
||||
transform: IReadonlyTransform,
|
||||
getElevation: undefined | ((id: OverscaledTileID, x: number, y: number) => number)
|
||||
): QueryRenderedFeaturesResults {
|
||||
|
||||
const has3DLayer = queryIncludes3DLayer(params?.layers ?? null, styleLayers, tileManager.id);
|
||||
const maxPitchScaleFactor = transform.maxPitchScaleFactor();
|
||||
const tilesIn = tileManager.tilesIn(queryGeometry, maxPitchScaleFactor, has3DLayer);
|
||||
|
||||
tilesIn.sort(sortTilesIn);
|
||||
const renderedFeatureLayers: RenderedFeatureLayer[] = [];
|
||||
for (const tileIn of tilesIn) {
|
||||
renderedFeatureLayers.push({
|
||||
wrappedTileID: tileIn.tileID.wrapped().key,
|
||||
queryResults: tileIn.tile.queryRenderedFeatures(
|
||||
styleLayers,
|
||||
serializedLayers,
|
||||
tileManager.getState(),
|
||||
tileIn.queryGeometry,
|
||||
tileIn.cameraQueryGeometry,
|
||||
tileIn.scale,
|
||||
params,
|
||||
transform,
|
||||
maxPitchScaleFactor,
|
||||
getPixelPosMatrix(transform, tileIn.tileID),
|
||||
getElevation ? (x: number, y: number) => getElevation(tileIn.tileID, x, y) : undefined,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
const result = mergeRenderedFeatureLayers(renderedFeatureLayers);
|
||||
|
||||
return convertFeaturesToMapFeatures(result, tileManager);
|
||||
}
|
||||
|
||||
export function queryRenderedSymbols(styleLayers: {[_: string]: StyleLayer},
|
||||
serializedLayers: {[_: string]: StyleLayer},
|
||||
tileManagers: {[_: string]: TileManager},
|
||||
queryGeometry: Array<Point>,
|
||||
params: QueryRenderedFeaturesOptionsStrict,
|
||||
collisionIndex: CollisionIndex,
|
||||
retainedQueryData: {
|
||||
[_: number]: RetainedQueryData;
|
||||
}): QueryRenderedFeaturesResults {
|
||||
const result: QueryResults = {};
|
||||
const renderedSymbols = collisionIndex.queryRenderedSymbols(queryGeometry);
|
||||
const bucketQueryData: RetainedQueryData[] = [];
|
||||
for (const bucketInstanceId of Object.keys(renderedSymbols).map(Number)) {
|
||||
bucketQueryData.push(retainedQueryData[bucketInstanceId]);
|
||||
}
|
||||
bucketQueryData.sort(sortTilesIn);
|
||||
|
||||
for (const queryData of bucketQueryData) {
|
||||
const bucketSymbols = queryData.featureIndex.lookupSymbolFeatures(
|
||||
renderedSymbols[queryData.bucketInstanceId],
|
||||
serializedLayers,
|
||||
queryData.bucketIndex,
|
||||
queryData.sourceLayerIndex,
|
||||
{
|
||||
filterSpec: params.filter,
|
||||
globalState: params.globalState
|
||||
},
|
||||
params.layers,
|
||||
params.availableImages,
|
||||
styleLayers);
|
||||
|
||||
for (const layerID in bucketSymbols) {
|
||||
const resultFeatures = result[layerID] = result[layerID] || [];
|
||||
const layerSymbols = bucketSymbols[layerID];
|
||||
layerSymbols.sort((a, b) => {
|
||||
// Match topDownFeatureComparator from FeatureIndex, but using
|
||||
// most recent sorting of features from bucket.sortFeatures
|
||||
const featureSortOrder = queryData.featureSortOrder;
|
||||
if (featureSortOrder) {
|
||||
// queryRenderedSymbols documentation says we'll return features in
|
||||
// "top-to-bottom" rendering order (aka last-to-first).
|
||||
// Actually there can be multiple symbol instances per feature, so
|
||||
// we sort each feature based on the first matching symbol instance.
|
||||
const sortedA = featureSortOrder.indexOf(a.featureIndex);
|
||||
const sortedB = featureSortOrder.indexOf(b.featureIndex);
|
||||
return sortedB - sortedA;
|
||||
} else {
|
||||
// Bucket hasn't been re-sorted based on angle, so use the
|
||||
// reverse of the order the features appeared in the data.
|
||||
return b.featureIndex - a.featureIndex;
|
||||
}
|
||||
});
|
||||
for (const symbolFeature of layerSymbols) {
|
||||
resultFeatures.push(symbolFeature);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return convertFeaturesToMapFeaturesMultiple(result, styleLayers, tileManagers);
|
||||
}
|
||||
|
||||
export function querySourceFeatures(tileManager: TileManager, params: QuerySourceFeatureOptionsStrict | undefined): GeoJSONFeature[] {
|
||||
const tiles = tileManager.getRenderableIds().map((id) => {
|
||||
return tileManager.getTileByID(id);
|
||||
});
|
||||
|
||||
const result: GeoJSONFeature[] = [];
|
||||
|
||||
const dataTiles = {};
|
||||
for (let i = 0; i < tiles.length; i++) {
|
||||
const tile = tiles[i];
|
||||
const dataID = tile.tileID.canonical.key;
|
||||
if (!dataTiles[dataID]) {
|
||||
dataTiles[dataID] = true;
|
||||
tile.querySourceFeatures(result, params);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function sortTilesIn(a: {tileID: OverscaledTileID}, b: {tileID: OverscaledTileID}) {
|
||||
const idA = a.tileID;
|
||||
const idB = b.tileID;
|
||||
return (idA.overscaledZ - idB.overscaledZ) || (idA.canonical.y - idB.canonical.y) || (idA.wrap - idB.wrap) || (idA.canonical.x - idB.canonical.x);
|
||||
}
|
||||
|
||||
function mergeRenderedFeatureLayers(tiles: RenderedFeatureLayer[]): QueryResults {
|
||||
// Merge results from all tiles, but if two tiles share the same
|
||||
// wrapped ID, don't duplicate features between the two tiles
|
||||
const result: QueryResults = {};
|
||||
const wrappedIDLayerMap = {};
|
||||
for (const tile of tiles) {
|
||||
const queryResults = tile.queryResults;
|
||||
const wrappedID = tile.wrappedTileID;
|
||||
const wrappedIDLayers = wrappedIDLayerMap[wrappedID] = wrappedIDLayerMap[wrappedID] || {};
|
||||
for (const layerID in queryResults) {
|
||||
const tileFeatures = queryResults[layerID];
|
||||
const wrappedIDFeatures = wrappedIDLayers[layerID] = wrappedIDLayers[layerID] || {};
|
||||
const resultFeatures = result[layerID] = result[layerID] || [];
|
||||
for (const tileFeature of tileFeatures) {
|
||||
if (!wrappedIDFeatures[tileFeature.featureIndex]) {
|
||||
wrappedIDFeatures[tileFeature.featureIndex] = true;
|
||||
resultFeatures.push(tileFeature);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function convertFeaturesToMapFeatures(result: QueryResults, tileManager: TileManager): QueryRenderedFeaturesResults {
|
||||
// Merge state from TileManager into the results
|
||||
for (const layerID in result) {
|
||||
for (const featureWrapper of result[layerID]) {
|
||||
convertFeatureToMapFeature(featureWrapper, tileManager);
|
||||
};
|
||||
}
|
||||
return result as QueryRenderedFeaturesResults;
|
||||
}
|
||||
|
||||
function convertFeaturesToMapFeaturesMultiple(result: QueryResults, styleLayers: {[_: string]: StyleLayer}, tileManagers: {[_: string]: TileManager}): QueryRenderedFeaturesResults {
|
||||
// Merge state from TileManager into the results
|
||||
for (const layerName in result) {
|
||||
for (const featureWrapper of result[layerName]) {
|
||||
const layer = styleLayers[layerName];
|
||||
const tileManager = tileManagers[layer.source];
|
||||
convertFeatureToMapFeature(featureWrapper, tileManager);
|
||||
};
|
||||
}
|
||||
return result as QueryRenderedFeaturesResults;
|
||||
}
|
||||
|
||||
function convertFeatureToMapFeature(featureWrapper: QueryResultsItem, tileManager: TileManager) {
|
||||
const feature = featureWrapper.feature as MapGeoJSONFeature;
|
||||
const state = tileManager.getFeatureState(feature.layer['source-layer'], feature.id);
|
||||
feature.source = feature.layer.source;
|
||||
if (feature.layer['source-layer']) {
|
||||
feature.sourceLayer = feature.layer['source-layer'];
|
||||
}
|
||||
feature.state = state;
|
||||
}
|
||||
376
node_modules/maplibre-gl/src/source/raster_dem_tile_source.test.ts
generated
vendored
Normal file
376
node_modules/maplibre-gl/src/source/raster_dem_tile_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,376 @@
|
||||
import {describe, beforeEach, afterEach, test, expect, vi, it} from 'vitest';
|
||||
import {fakeServer, type FakeServer} from 'nise';
|
||||
import {RasterDEMTileSource} from './raster_dem_tile_source';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {RequestManager} from '../util/request_manager';
|
||||
import {type Tile} from '../tile/tile';
|
||||
import {getMockDispatcher} from '../util/test/util';
|
||||
import {sleep, waitForEvent, waitForMetadataEvent} from '../util/test/util';
|
||||
import type {MapSourceDataEvent} from '../ui/events';
|
||||
|
||||
function createSource(options, transformCallback?) {
|
||||
const source = new RasterDEMTileSource('id', options, getMockDispatcher(), options.eventedParent);
|
||||
source.onAdd({
|
||||
transform: {angle: 0, pitch: 0, showCollisionBoxes: false},
|
||||
_getMapId: () => 1,
|
||||
_requestManager: new RequestManager(transformCallback),
|
||||
getPixelRatio() { return 1; }
|
||||
} as any);
|
||||
|
||||
source.on('error', (e) => {
|
||||
throw e.error;
|
||||
});
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
describe('RasterDEMTileSource', () => {
|
||||
let server: FakeServer;
|
||||
beforeEach(() => {
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
});
|
||||
|
||||
test('transforms request for TileJSON URL', () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.pngraw'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const transformSpy = vi.fn().mockImplementation((url) => {
|
||||
return {url};
|
||||
});
|
||||
|
||||
createSource({url: '/source.json'}, transformSpy);
|
||||
server.respond();
|
||||
|
||||
expect(transformSpy.mock.calls[0][0]).toBe('/source.json');
|
||||
expect(transformSpy.mock.calls[0][1]).toBe('Source');
|
||||
});
|
||||
|
||||
test('can asynchronously transform request for TileJSON URL', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.pngraw'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const source = createSource({url: '/source.json'}, async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
}));
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
expect(server.requests[0].url).toBe('/source.json');
|
||||
expect(server.requests[0].requestHeaders.Authorization).toBe('Bearer token');
|
||||
});
|
||||
|
||||
test('transforms tile urls before requesting', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const source = createSource({url: '/source.json'});
|
||||
const transformSpy = vi.spyOn(source.map._requestManager, 'transformRequest');
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData () {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
source.loadTile(tile);
|
||||
|
||||
expect(transformSpy).toHaveBeenCalledTimes(1);
|
||||
expect(transformSpy.mock.calls[0][0]).toBe('http://example.com/10/5/5.png');
|
||||
expect(transformSpy.mock.calls[0][1]).toBe('Tile');
|
||||
});
|
||||
|
||||
test('can asynchronously transform tile request', async () => {
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Cache-Control': 'max-age=100'}, '0']
|
||||
);
|
||||
|
||||
const source = createSource({
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
}, async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
}));
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
await waitForMetadataEvent(source);
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
setExpiryData() {},
|
||||
actor: 1
|
||||
} as any as Tile;
|
||||
const promise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
expect(server.requests[0].url).toBe('http://example.com/10/5/5.png');
|
||||
expect(server.requests[0].requestHeaders.Authorization).toBe('Bearer token');
|
||||
});
|
||||
|
||||
test('populates neighboringTiles', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
}));
|
||||
const source = createSource({url: '/source.json'});
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData () {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
source.loadTile(tile);
|
||||
await sleep(0);
|
||||
|
||||
expect(Object.keys(tile.neighboringTiles)).toEqual([
|
||||
new OverscaledTileID(10, 0, 10, 4, 5).key,
|
||||
new OverscaledTileID(10, 0, 10, 6, 5).key,
|
||||
new OverscaledTileID(10, 0, 10, 4, 4).key,
|
||||
new OverscaledTileID(10, 0, 10, 5, 4).key,
|
||||
new OverscaledTileID(10, 0, 10, 6, 4).key,
|
||||
new OverscaledTileID(10, 0, 10, 4, 6).key,
|
||||
new OverscaledTileID(10, 0, 10, 5, 6).key,
|
||||
new OverscaledTileID(10, 0, 10, 6, 6).key
|
||||
]);
|
||||
});
|
||||
|
||||
test('populates neighboringTiles with wrapped tiles', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
}));
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
const promise = waitForMetadataEvent(source);
|
||||
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(5, 0, 5, 31, 5),
|
||||
state: 'loading',
|
||||
loadVectorData() {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
source.loadTile(tile);
|
||||
await sleep(0);
|
||||
|
||||
expect(Object.keys(tile.neighboringTiles)).toEqual([
|
||||
new OverscaledTileID(5, 0, 5, 30, 6).key,
|
||||
new OverscaledTileID(5, 0, 5, 31, 6).key,
|
||||
new OverscaledTileID(5, 0, 5, 30, 5).key,
|
||||
new OverscaledTileID(5, 1, 5, 0, 5).key,
|
||||
new OverscaledTileID(5, 0, 5, 30, 4).key,
|
||||
new OverscaledTileID(5, 0, 5, 31, 4).key,
|
||||
new OverscaledTileID(5, 1, 5, 0, 4).key,
|
||||
new OverscaledTileID(5, 1, 5, 0, 6).key
|
||||
]);
|
||||
});
|
||||
|
||||
it('serializes options', () => {
|
||||
const source = createSource({
|
||||
tiles: ['http://localhost:2900/raster-dem/{z}/{x}/{y}.png'],
|
||||
minzoom: 2,
|
||||
maxzoom: 10
|
||||
});
|
||||
expect(source.serialize()).toStrictEqual({
|
||||
type: 'raster-dem',
|
||||
tiles: ['http://localhost:2900/raster-dem/{z}/{x}/{y}.png'],
|
||||
minzoom: 2,
|
||||
maxzoom: 10
|
||||
});
|
||||
});
|
||||
|
||||
test('Tile expiry data is set when "Cache-Control" is set but not "Expires"', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Cache-Control': 'max-age=100'}, '0']
|
||||
);
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
source.map._refreshExpiredTiles = true;
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
setExpiryData() {},
|
||||
actor: source.dispatcher.getActor()
|
||||
} as any as Tile;
|
||||
const expiryDataSpy = vi.spyOn(tile, 'setExpiryData');
|
||||
const tilePromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await tilePromise;
|
||||
expect(expiryDataSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('Tile expiry data is set when "Expires" is set but not "Cache-Control"', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Expires': 'Wed, 21 Oct 2015 07:28:00 GMT'}, '0']
|
||||
);
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
source.map._refreshExpiredTiles = true;
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
setExpiryData() {},
|
||||
actor: source.dispatcher.getActor()
|
||||
} as any as Tile;
|
||||
const expiryDataSpy = vi.spyOn(tile, 'setExpiryData');
|
||||
const tilePromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await tilePromise;
|
||||
expect(expiryDataSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('Tile expiry data is set when "Expires" is set and "Cache-Control" is an empty string', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Cache-Control': '', 'Expires': 'Wed, 21 Oct 2015 07:28:00 GMT'}, '0']
|
||||
);
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
source.map._refreshExpiredTiles = true;
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
setExpiryData() {},
|
||||
actor: source.dispatcher.getActor()
|
||||
} as any as Tile;
|
||||
const expiryDataSpy = vi.spyOn(tile, 'setExpiryData');
|
||||
const tilePromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await tilePromise;
|
||||
expect(expiryDataSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('does not throw when tile is aborted', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
}));
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
const promise = waitForMetadataEvent(source);
|
||||
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(5, 0, 5, 31, 5),
|
||||
state: 'loading',
|
||||
loadVectorData() {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const loadPromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
|
||||
tile.abortController.abort();
|
||||
tile.aborted = true;
|
||||
|
||||
await expect(loadPromise).resolves.toBeUndefined();
|
||||
expect(tile.state).toBe('unloaded');
|
||||
});
|
||||
|
||||
test('reloads tile in reloading state', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
}));
|
||||
server.respondWith('http://example.com/5/31/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1}, '0']
|
||||
);
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
const promise = waitForMetadataEvent(source);
|
||||
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(5, 0, 5, 31, 5),
|
||||
state: 'reloading',
|
||||
actor: source.dispatcher.getActor(),
|
||||
loadVectorData() {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const tilePromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await tilePromise;
|
||||
expect(tile.state).toBe('loaded');
|
||||
});
|
||||
});
|
||||
167
node_modules/maplibre-gl/src/source/raster_dem_tile_source.ts
generated
vendored
Normal file
167
node_modules/maplibre-gl/src/source/raster_dem_tile_source.ts
generated
vendored
Normal file
@@ -0,0 +1,167 @@
|
||||
import {ImageRequest} from '../util/image_request';
|
||||
import {ResourceType} from '../util/request_manager';
|
||||
import {extend, isImageBitmap, readImageUsingVideoFrame} from '../util/util';
|
||||
import {type Evented} from '../util/evented';
|
||||
import {browser} from '../util/browser';
|
||||
import {offscreenCanvasSupported} from '../util/offscreen_canvas_supported';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {RasterTileSource} from './raster_tile_source';
|
||||
// ensure DEMData is registered for worker transfer on main thread:
|
||||
import '../data/dem_data';
|
||||
import type {DEMEncoding} from '../data/dem_data';
|
||||
|
||||
import type {Source} from './source';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {Tile} from '../tile/tile';
|
||||
import type {RasterDEMSourceSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import {isOffscreenCanvasDistorted} from '../util/offscreen_canvas_distorted';
|
||||
import {RGBAImage} from '../util/image';
|
||||
import {MessageType} from '../util/actor_messages';
|
||||
|
||||
/**
|
||||
* A source containing raster DEM tiles (See the [Style Specification](https://maplibre.org/maplibre-style-spec/) for detailed documentation of options.)
|
||||
* This source can be used to show hillshading and 3D terrain
|
||||
*
|
||||
* @group Sources
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.addSource('raster-dem-source', {
|
||||
* type: 'raster-dem',
|
||||
* url: 'https://demotiles.maplibre.org/terrain-tiles/tiles.json',
|
||||
* tileSize: 256
|
||||
* });
|
||||
* ```
|
||||
* @see [3D Terrain](https://maplibre.org/maplibre-gl-js/docs/examples/3d-terrain/)
|
||||
*/
|
||||
export class RasterDEMTileSource extends RasterTileSource implements Source {
|
||||
encoding: DEMEncoding;
|
||||
redFactor?: number;
|
||||
greenFactor?: number;
|
||||
blueFactor?: number;
|
||||
baseShift?: number;
|
||||
|
||||
constructor(id: string, options: RasterDEMSourceSpecification, dispatcher: Dispatcher, eventedParent: Evented) {
|
||||
super(id, options, dispatcher, eventedParent);
|
||||
this.type = 'raster-dem';
|
||||
this.maxzoom = 22;
|
||||
this._options = extend({type: 'raster-dem'}, options);
|
||||
this.encoding = options.encoding || 'mapbox';
|
||||
this.redFactor = options.redFactor;
|
||||
this.greenFactor = options.greenFactor;
|
||||
this.blueFactor = options.blueFactor;
|
||||
this.baseShift = options.baseShift;
|
||||
}
|
||||
|
||||
override async loadTile(tile: Tile): Promise<void> {
|
||||
const url = tile.tileID.canonical.url(this.tiles, this.map.getPixelRatio(), this.scheme);
|
||||
const request = await this.map._requestManager.transformRequest(url, ResourceType.Tile);
|
||||
tile.neighboringTiles = this._getNeighboringTiles(tile.tileID);
|
||||
tile.abortController = new AbortController();
|
||||
try {
|
||||
const response = await ImageRequest.getImage(request, tile.abortController, this.map._refreshExpiredTiles);
|
||||
delete tile.abortController;
|
||||
if (tile.aborted) {
|
||||
tile.state = 'unloaded';
|
||||
return;
|
||||
}
|
||||
if (response && response.data) {
|
||||
const img = response.data;
|
||||
if (this.map._refreshExpiredTiles && (response.cacheControl || response.expires)) {
|
||||
tile.setExpiryData({cacheControl: response.cacheControl, expires: response.expires});
|
||||
}
|
||||
const transfer = isImageBitmap(img) && offscreenCanvasSupported();
|
||||
const rawImageData = transfer ? img : await this.readImageNow(img);
|
||||
const params = {
|
||||
type: this.type,
|
||||
uid: tile.uid,
|
||||
source: this.id,
|
||||
rawImageData,
|
||||
encoding: this.encoding,
|
||||
redFactor: this.redFactor,
|
||||
greenFactor: this.greenFactor,
|
||||
blueFactor: this.blueFactor,
|
||||
baseShift: this.baseShift
|
||||
};
|
||||
|
||||
if (tile.actor && tile.state !== 'expired' && tile.state !== 'reloading') {
|
||||
return;
|
||||
}
|
||||
if (!tile.actor || tile.state === 'expired') {
|
||||
tile.actor = this.dispatcher.getActor();
|
||||
}
|
||||
const data = await tile.actor.sendAsync({type: MessageType.loadDEMTile, data: params});
|
||||
tile.dem = data;
|
||||
tile.needsHillshadePrepare = true;
|
||||
tile.needsTerrainPrepare = true;
|
||||
tile.state = 'loaded';
|
||||
}
|
||||
} catch (err) {
|
||||
delete tile.abortController;
|
||||
if (tile.aborted) {
|
||||
tile.state = 'unloaded';
|
||||
} else if (err) {
|
||||
tile.state = 'errored';
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async readImageNow(img: ImageBitmap | HTMLImageElement): Promise<RGBAImage | ImageData> {
|
||||
if (typeof VideoFrame !== 'undefined' && isOffscreenCanvasDistorted()) {
|
||||
const width = img.width + 2;
|
||||
const height = img.height + 2;
|
||||
try {
|
||||
return new RGBAImage({width, height}, await readImageUsingVideoFrame(img, -1, -1, width, height));
|
||||
} catch {
|
||||
// fall-back to browser canvas decoding
|
||||
}
|
||||
}
|
||||
return browser.getImageData(img, 1);
|
||||
}
|
||||
|
||||
_getNeighboringTiles(tileID: OverscaledTileID): Record<string, {backfilled: boolean}> {
|
||||
const canonical = tileID.canonical;
|
||||
const dim = Math.pow(2, canonical.z);
|
||||
|
||||
const px = (canonical.x - 1 + dim) % dim;
|
||||
const pxw = canonical.x === 0 ? tileID.wrap - 1 : tileID.wrap;
|
||||
const nx = (canonical.x + 1 + dim) % dim;
|
||||
const nxw = canonical.x + 1 === dim ? tileID.wrap + 1 : tileID.wrap;
|
||||
|
||||
const neighboringTiles: Record<string, {backfilled: boolean}> = {};
|
||||
// add adjacent tiles
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, pxw, canonical.z, px, canonical.y).key] = {backfilled: false};
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, nxw, canonical.z, nx, canonical.y).key] = {backfilled: false};
|
||||
|
||||
// Add upper neighboringTiles
|
||||
if (canonical.y > 0) {
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, pxw, canonical.z, px, canonical.y - 1).key] = {backfilled: false};
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, tileID.wrap, canonical.z, canonical.x, canonical.y - 1).key] = {backfilled: false};
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, nxw, canonical.z, nx, canonical.y - 1).key] = {backfilled: false};
|
||||
}
|
||||
// Add lower neighboringTiles
|
||||
if (canonical.y + 1 < dim) {
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, pxw, canonical.z, px, canonical.y + 1).key] = {backfilled: false};
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, tileID.wrap, canonical.z, canonical.x, canonical.y + 1).key] = {backfilled: false};
|
||||
neighboringTiles[new OverscaledTileID(tileID.overscaledZ, nxw, canonical.z, nx, canonical.y + 1).key] = {backfilled: false};
|
||||
}
|
||||
|
||||
return neighboringTiles;
|
||||
}
|
||||
|
||||
async unloadTile(tile: Tile) {
|
||||
if (tile.demTexture) this.map.painter.saveTileTexture(tile.demTexture);
|
||||
if (tile.fbo) {
|
||||
tile.fbo.destroy();
|
||||
delete tile.fbo;
|
||||
}
|
||||
if (tile.dem) delete tile.dem;
|
||||
delete tile.neighboringTiles;
|
||||
|
||||
tile.state = 'unloaded';
|
||||
if (tile.actor) {
|
||||
await tile.actor.sendAsync({type: MessageType.removeDEMTile, data: {type: this.type, uid: tile.uid, source: this.id}});
|
||||
}
|
||||
}
|
||||
}
|
||||
37
node_modules/maplibre-gl/src/source/raster_dem_tile_worker_source.test.ts
generated
vendored
Normal file
37
node_modules/maplibre-gl/src/source/raster_dem_tile_worker_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import {describe, test, expect} from 'vitest';
|
||||
import {RasterDEMTileWorkerSource} from './raster_dem_tile_worker_source';
|
||||
import {DEMData} from '../data/dem_data';
|
||||
import {type WorkerDEMTileParameters} from './worker_source';
|
||||
|
||||
describe('loadTile', () => {
|
||||
test('loads DEM tile', async () => {
|
||||
const source = new RasterDEMTileWorkerSource();
|
||||
|
||||
const data = await source.loadTile({
|
||||
source: 'source',
|
||||
uid: '0',
|
||||
rawImageData: {data: new Uint8ClampedArray(256), height: 8, width: 8},
|
||||
dim: 256
|
||||
} as any as WorkerDEMTileParameters);
|
||||
expect(Object.keys(source.loaded)).toEqual(['0']);
|
||||
expect(data instanceof DEMData).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('removeTile', () => {
|
||||
test('removes loaded tile', () => {
|
||||
const source = new RasterDEMTileWorkerSource();
|
||||
|
||||
source.loaded = {
|
||||
'0': {} as DEMData
|
||||
};
|
||||
|
||||
source.removeTile({
|
||||
source: 'source',
|
||||
uid: '0',
|
||||
type: 'raster-dem',
|
||||
});
|
||||
|
||||
expect(source.loaded).toEqual({});
|
||||
});
|
||||
});
|
||||
38
node_modules/maplibre-gl/src/source/raster_dem_tile_worker_source.ts
generated
vendored
Normal file
38
node_modules/maplibre-gl/src/source/raster_dem_tile_worker_source.ts
generated
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
import {DEMData} from '../data/dem_data';
|
||||
import {RGBAImage} from '../util/image';
|
||||
import type {Actor} from '../util/actor';
|
||||
import type {
|
||||
WorkerDEMTileParameters,
|
||||
TileParameters
|
||||
} from './worker_source';
|
||||
import {getImageData, isImageBitmap} from '../util/util';
|
||||
|
||||
export class RasterDEMTileWorkerSource {
|
||||
actor: Actor;
|
||||
loaded: {[_: string]: DEMData};
|
||||
|
||||
constructor() {
|
||||
this.loaded = {};
|
||||
}
|
||||
|
||||
async loadTile(params: WorkerDEMTileParameters): Promise<DEMData | null> {
|
||||
const {uid, encoding, rawImageData, redFactor, greenFactor, blueFactor, baseShift} = params;
|
||||
const width = rawImageData.width + 2;
|
||||
const height = rawImageData.height + 2;
|
||||
const imagePixels: RGBAImage | ImageData = isImageBitmap(rawImageData) ?
|
||||
new RGBAImage({width, height}, await getImageData(rawImageData, -1, -1, width, height)) :
|
||||
rawImageData;
|
||||
const dem = new DEMData(uid, imagePixels, encoding, redFactor, greenFactor, blueFactor, baseShift);
|
||||
this.loaded = this.loaded || {};
|
||||
this.loaded[uid] = dem;
|
||||
return dem;
|
||||
}
|
||||
|
||||
removeTile(params: TileParameters) {
|
||||
const loaded = this.loaded,
|
||||
uid = params.uid;
|
||||
if (loaded && loaded[uid]) {
|
||||
delete loaded[uid];
|
||||
}
|
||||
}
|
||||
}
|
||||
405
node_modules/maplibre-gl/src/source/raster_tile_source.test.ts
generated
vendored
Normal file
405
node_modules/maplibre-gl/src/source/raster_tile_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,405 @@
|
||||
import {describe, beforeEach, afterEach, test, expect, vi, it} from 'vitest';
|
||||
import {RasterTileSource} from './raster_tile_source';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {RequestManager} from '../util/request_manager';
|
||||
import {type Dispatcher} from '../util/dispatcher';
|
||||
import {fakeServer, type FakeServer} from 'nise';
|
||||
import {type Tile} from '../tile/tile';
|
||||
import {sleep, stubAjaxGetImage, waitForEvent} from '../util/test/util';
|
||||
import {type MapSourceDataEvent} from '../ui/events';
|
||||
|
||||
function createSource(options, transformCallback?) {
|
||||
const source = new RasterTileSource('id', options, {send() {}} as any as Dispatcher, options.eventedParent);
|
||||
source.onAdd({
|
||||
transform: {angle: 0, pitch: 0, showCollisionBoxes: false},
|
||||
_getMapId: () => 1,
|
||||
_requestManager: new RequestManager(transformCallback),
|
||||
getPixelRatio() { return 1; }
|
||||
} as any);
|
||||
|
||||
source.on('error', () => { }); // to prevent console log of errors
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
describe('RasterTileSource', () => {
|
||||
let server: FakeServer;
|
||||
beforeEach(() => {
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
});
|
||||
|
||||
test('transforms request for TileJSON URL', () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const transformSpy = vi.fn().mockImplementation((url) => {
|
||||
return {url};
|
||||
});
|
||||
|
||||
createSource({url: '/source.json'}, transformSpy);
|
||||
server.respond();
|
||||
|
||||
expect(transformSpy.mock.calls[0][0]).toBe('/source.json');
|
||||
expect(transformSpy.mock.calls[0][1]).toBe('Source');
|
||||
});
|
||||
|
||||
test('can asynchronously transform request for TileJSON URL', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const source = createSource({url: '/source.json'}, async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
}));
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
expect(server.requests[0].url).toBe('/source.json');
|
||||
expect(server.requests[0].requestHeaders.Authorization).toBe('Bearer token');
|
||||
});
|
||||
|
||||
test('fires "error" event if TileJSON request fails', async () => {
|
||||
server.respondWith('/source.json', [404, {}, '']);
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
const errorEvent = waitForEvent(source, 'error', (e) => e.error.status === 404);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await expect(errorEvent).resolves.toBeDefined();
|
||||
expect(source.loaded()).toBe(true);
|
||||
});
|
||||
|
||||
test('respects TileJSON.bounds', async () => {
|
||||
const source = createSource({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
});
|
||||
|
||||
await waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 96, 132))).toBeFalsy();
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 95, 132))).toBeTruthy();
|
||||
});
|
||||
|
||||
test('does not error on invalid bounds', async () => {
|
||||
const source = createSource({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, 91]
|
||||
});
|
||||
|
||||
await waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
|
||||
expect(source.tileBounds.bounds).toEqual({_sw: {lng: -47, lat: -7}, _ne: {lng: -45, lat: 90}});
|
||||
});
|
||||
|
||||
test('respects TileJSON.bounds when loaded from TileJSON', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const source = createSource({url: '/source.json'});
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await promise;
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 96, 132))).toBeFalsy();
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 95, 132))).toBeTruthy();
|
||||
});
|
||||
|
||||
test('transforms tile urls before requesting', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const source = createSource({url: '/source.json'});
|
||||
const transformSpy = vi.spyOn(source.map._requestManager, 'transformRequest');
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData () {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
source.loadTile(tile);
|
||||
expect(transformSpy).toHaveBeenCalledTimes(1);
|
||||
expect(transformSpy.mock.calls[0][0]).toBe('http://example.com/10/5/5.png');
|
||||
expect(transformSpy.mock.calls[0][1]).toBe('Tile');
|
||||
});
|
||||
|
||||
test('can asynchronously transform tile request', async () => {
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Cache-Control': 'max-age=100'}, '0']
|
||||
);
|
||||
|
||||
const source = createSource({
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
}, async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
}));
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
await waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData () {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const promise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
expect(server.requests[0].url).toBe('http://example.com/10/5/5.png');
|
||||
expect(server.requests[0].requestHeaders.Authorization).toBe('Bearer token');
|
||||
expect(tile.state).toBe('loaded');
|
||||
});
|
||||
|
||||
test('HttpImageElement used to get image when refreshExpiredTiles is false', async () => {
|
||||
stubAjaxGetImage(undefined);
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
source.map._refreshExpiredTiles = false;
|
||||
|
||||
const imageConstructorSpy = vi.spyOn(global, 'Image');
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading'
|
||||
} as any as Tile;
|
||||
await source.loadTile(tile);
|
||||
expect(imageConstructorSpy).toHaveBeenCalledTimes(1);
|
||||
expect(tile.state).toBe('loaded');
|
||||
});
|
||||
|
||||
test('supports updating tiles', () => {
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.setTiles(['http://example.com/{z}/{x}/{y}.png?updated=true']);
|
||||
|
||||
source.on('data', (e) => {
|
||||
if (e.sourceDataType === 'metadata') {
|
||||
expect(source.tiles[0]).toBe('http://example.com/{z}/{x}/{y}.png?updated=true');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test('cancels TileJSON request if removed', async () => {
|
||||
const source = createSource({url: '/source.json'});
|
||||
await sleep(0);
|
||||
source.onRemove();
|
||||
expect((server.lastRequest as any).aborted).toBe(true);
|
||||
});
|
||||
|
||||
test('supports url property updates', async () => {
|
||||
server.respondWith('http://localhost:2900/source2.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
|
||||
const source = createSource({
|
||||
url: 'http://localhost:2900/source.json'
|
||||
});
|
||||
await sleep(0);
|
||||
const errorHandler = vi.fn();
|
||||
source.on('error', errorHandler);
|
||||
source.setUrl('http://localhost:2900/source2.json');
|
||||
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
|
||||
expect(server.requests.length).toBe(2);
|
||||
expect(server.requests[0].aborted).toBe(true);
|
||||
expect(source.serialize()).toEqual({
|
||||
type: 'raster',
|
||||
url: 'http://localhost:2900/source2.json'
|
||||
});
|
||||
expect(errorHandler).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('serializes options', () => {
|
||||
const source = createSource({
|
||||
tiles: ['http://localhost:2900/raster/{z}/{x}/{y}.png'],
|
||||
minzoom: 2,
|
||||
maxzoom: 10
|
||||
});
|
||||
expect(source.serialize()).toStrictEqual({
|
||||
type: 'raster',
|
||||
tiles: ['http://localhost:2900/raster/{z}/{x}/{y}.png'],
|
||||
minzoom: 2,
|
||||
maxzoom: 10
|
||||
});
|
||||
});
|
||||
|
||||
test('Tile expiry data is set when "Cache-Control" is set but not "Expires"', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Cache-Control': 'max-age=100'}, '0']
|
||||
);
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
source.map._refreshExpiredTiles = true;
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const expiryDataSpy = vi.spyOn(tile, 'setExpiryData');
|
||||
const tilePromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await tilePromise;
|
||||
expect(tile.state).toBe('loaded');
|
||||
expect(expiryDataSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('Tile expiry data is set when "Expires" is set but not "Cache-Control"', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Expires': 'Wed, 21 Oct 2015 07:28:00 GMT'}, '0']
|
||||
);
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
source.map._refreshExpiredTiles = true;
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const expiryDataSpy = vi.spyOn(tile, 'setExpiryData');
|
||||
const tilePromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await tilePromise;
|
||||
expect(tile.state).toBe('loaded');
|
||||
expect(expiryDataSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('Tile expiry data is set when "Expires" is set and "Cache-Control" is an empty string', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
server.respondWith('http://example.com/10/5/5.png',
|
||||
[200, {'Content-Type': 'image/png', 'Content-Length': 1, 'Cache-Control': '', 'Expires': 'Wed, 21 Oct 2015 07:28:00 GMT'}, '0']
|
||||
);
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.map.painter = {context: {}, getTileTexture: () => { return {update: () => {}}; }} as any;
|
||||
source.map._refreshExpiredTiles = true;
|
||||
|
||||
const promise = waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const expiryDataSpy = vi.spyOn(tile, 'setExpiryData');
|
||||
const tilePromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await tilePromise;
|
||||
expect(tile.state).toBe('loaded');
|
||||
expect(expiryDataSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('does not throw when tile is aborted', async () => {
|
||||
const source = createSource({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
});
|
||||
|
||||
await waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(5, 0, 5, 31, 5),
|
||||
state: 'loading',
|
||||
loadVectorData() {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const loadPromise = source.loadTile(tile);
|
||||
await sleep(0);
|
||||
|
||||
tile.abortController.abort();
|
||||
tile.aborted = true;
|
||||
|
||||
await expect(loadPromise).resolves.toBeUndefined();
|
||||
expect(tile.state).toBe('unloaded');
|
||||
});
|
||||
});
|
||||
235
node_modules/maplibre-gl/src/source/raster_tile_source.ts
generated
vendored
Normal file
235
node_modules/maplibre-gl/src/source/raster_tile_source.ts
generated
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
import {extend, pick} from '../util/util';
|
||||
|
||||
import {ImageRequest} from '../util/image_request';
|
||||
|
||||
import {ResourceType} from '../util/request_manager';
|
||||
import {Event, ErrorEvent, Evented} from '../util/evented';
|
||||
import {loadTileJson} from './load_tilejson';
|
||||
import {TileBounds} from '../tile/tile_bounds';
|
||||
import {Texture} from '../render/texture';
|
||||
import {isAbortError} from '../util/abort_error';
|
||||
|
||||
import type {Source} from './source';
|
||||
import type {OverscaledTileID} from '../tile/tile_id';
|
||||
import type {Map} from '../ui/map';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {Tile} from '../tile/tile';
|
||||
import type {
|
||||
RasterSourceSpecification,
|
||||
RasterDEMSourceSpecification
|
||||
} from '@maplibre/maplibre-gl-style-spec';
|
||||
|
||||
/**
|
||||
* A source containing raster tiles (See the [raster source documentation](https://maplibre.org/maplibre-style-spec/sources/#raster) for detailed documentation of options.)
|
||||
*
|
||||
* @group Sources
|
||||
*
|
||||
* \> ℹ️ **Note:** The default `tileSize` is `512`. If your tile provider (such as OpenStreetMap or Stadia Maps) serves 256px tiles, set `tileSize: 256` manually to avoid blurry rendering due to upscaling.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.addSource('raster-source', {
|
||||
* 'type': 'raster',
|
||||
* 'tiles': ['https://tiles.stadiamaps.com/tiles/stamen_watercolor/{z}/{x}/{y}.jpg'],
|
||||
* 'tileSize': 256, // Set this to match tile server output to avoid blurry rendering
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.addSource('wms-test-source', {
|
||||
* 'type': 'raster',
|
||||
* // use the tiles option to specify a WMS tile source URL
|
||||
* 'tiles': [
|
||||
* 'https://img.nj.gov/imagerywms/Natural2015?bbox={bbox-epsg-3857}&format=image/png&service=WMS&version=1.1.1&request=GetMap&srs=EPSG:3857&transparent=true&width=256&height=256&layers=Natural2015'
|
||||
* ],
|
||||
* 'tileSize': 256 // Important for WMS if tiles are 256px
|
||||
* });
|
||||
* ```
|
||||
* @see [Add a raster tile source](https://maplibre.org/maplibre-gl-js/docs/examples/map-tiles/)
|
||||
* @see [Add a WMS source](https://maplibre.org/maplibre-gl-js/docs/examples/add-a-wms-source/)
|
||||
* @see [Display a satellite map](https://maplibre.org/maplibre-gl-js/docs/examples/display-a-satellite-map/)
|
||||
*/
|
||||
export class RasterTileSource extends Evented implements Source {
|
||||
type: 'raster' | 'raster-dem';
|
||||
id: string;
|
||||
minzoom: number;
|
||||
maxzoom: number;
|
||||
url: string;
|
||||
scheme: string;
|
||||
tileSize: number;
|
||||
|
||||
bounds: [number, number, number, number];
|
||||
tileBounds: TileBounds;
|
||||
roundZoom: boolean;
|
||||
dispatcher: Dispatcher;
|
||||
map: Map;
|
||||
tiles: Array<string>;
|
||||
|
||||
_loaded: boolean;
|
||||
_options: RasterSourceSpecification | RasterDEMSourceSpecification;
|
||||
_tileJSONRequest: AbortController;
|
||||
|
||||
constructor(id: string, options: RasterSourceSpecification | RasterDEMSourceSpecification, dispatcher: Dispatcher, eventedParent: Evented) {
|
||||
super();
|
||||
this.id = id;
|
||||
this.dispatcher = dispatcher;
|
||||
this.setEventedParent(eventedParent);
|
||||
|
||||
this.type = 'raster';
|
||||
this.minzoom = 0;
|
||||
this.maxzoom = 22;
|
||||
this.roundZoom = true;
|
||||
this.scheme = 'xyz';
|
||||
this.tileSize = 512;
|
||||
this._loaded = false;
|
||||
|
||||
this._options = extend({type: 'raster'}, options);
|
||||
extend(this, pick(options, ['url', 'scheme', 'tileSize']));
|
||||
}
|
||||
|
||||
async load(sourceDataChanged: boolean = false) {
|
||||
this._loaded = false;
|
||||
this.fire(new Event('dataloading', {dataType: 'source'}));
|
||||
this._tileJSONRequest = new AbortController();
|
||||
try {
|
||||
const tileJSON = await loadTileJson(this._options, this.map._requestManager, this._tileJSONRequest, this.map._ownerWindow);
|
||||
this._tileJSONRequest = null;
|
||||
this._loaded = true;
|
||||
if (tileJSON) {
|
||||
extend(this, tileJSON);
|
||||
if (tileJSON.bounds) this.tileBounds = new TileBounds(tileJSON.bounds, this.minzoom, this.maxzoom);
|
||||
|
||||
// `content` is included here to prevent a race condition where `Style._updateSources` is called
|
||||
// before the TileJSON arrives. this makes sure the tiles needed are loaded once TileJSON arrives
|
||||
// ref: https://github.com/mapbox/mapbox-gl-js/pull/4347#discussion_r104418088
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'metadata'}));
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'content', sourceDataChanged}));
|
||||
}
|
||||
} catch (err) {
|
||||
this._tileJSONRequest = null;
|
||||
this._loaded = true; // let's pretend it's loaded so the source will be ignored
|
||||
|
||||
// only fire error event if it is not due to aborting the request
|
||||
if (!isAbortError(err)) {
|
||||
this.fire(new ErrorEvent(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loaded(): boolean {
|
||||
return this._loaded;
|
||||
}
|
||||
|
||||
onAdd(map: Map) {
|
||||
this.map = map;
|
||||
this.load();
|
||||
}
|
||||
|
||||
onRemove() {
|
||||
if (this._tileJSONRequest) {
|
||||
this._tileJSONRequest.abort();
|
||||
this._tileJSONRequest = null;
|
||||
}
|
||||
}
|
||||
|
||||
setSourceProperty(callback: Function) {
|
||||
if (this._tileJSONRequest) {
|
||||
this._tileJSONRequest.abort();
|
||||
this._tileJSONRequest = null;
|
||||
}
|
||||
|
||||
callback();
|
||||
|
||||
this.load(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source `tiles` property and re-renders the map.
|
||||
*
|
||||
* @param tiles - An array of one or more tile source URLs, as in the raster tiles spec (See the [Style Specification](https://maplibre.org/maplibre-style-spec/)
|
||||
*/
|
||||
setTiles(tiles: Array<string>): this {
|
||||
this.setSourceProperty(() => {
|
||||
this._options.tiles = tiles;
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source `url` property and re-renders the map.
|
||||
*
|
||||
* @param url - A URL to a TileJSON resource. Supported protocols are `http:` and `https:`.
|
||||
*/
|
||||
setUrl(url: string): this {
|
||||
this.setSourceProperty(() => {
|
||||
this.url = url;
|
||||
this._options.url = url;
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
serialize() {
|
||||
return extend({}, this._options);
|
||||
}
|
||||
|
||||
hasTile(tileID: OverscaledTileID) {
|
||||
return !this.tileBounds || this.tileBounds.contains(tileID.canonical);
|
||||
}
|
||||
|
||||
async loadTile(tile: Tile): Promise<void> {
|
||||
const url = tile.tileID.canonical.url(this.tiles, this.map.getPixelRatio(), this.scheme);
|
||||
tile.abortController = new AbortController();
|
||||
try {
|
||||
const response = await ImageRequest.getImage(await this.map._requestManager.transformRequest(url, ResourceType.Tile), tile.abortController, this.map._refreshExpiredTiles);
|
||||
delete tile.abortController;
|
||||
if (tile.aborted) {
|
||||
tile.state = 'unloaded';
|
||||
return;
|
||||
}
|
||||
if (response && response.data) {
|
||||
if (this.map._refreshExpiredTiles && (response.cacheControl || response.expires)) {
|
||||
tile.setExpiryData({cacheControl: response.cacheControl, expires: response.expires});
|
||||
}
|
||||
const context = this.map.painter.context;
|
||||
const gl = context.gl;
|
||||
const img = response.data;
|
||||
tile.texture = this.map.painter.getTileTexture(img.width);
|
||||
if (tile.texture) {
|
||||
tile.texture.update(img, {useMipmap: true});
|
||||
} else {
|
||||
tile.texture = new Texture(context, img, gl.RGBA, {useMipmap: true});
|
||||
tile.texture.bind(gl.LINEAR, gl.CLAMP_TO_EDGE, gl.LINEAR_MIPMAP_NEAREST);
|
||||
}
|
||||
tile.state = 'loaded';
|
||||
}
|
||||
} catch (err) {
|
||||
delete tile.abortController;
|
||||
if (tile.aborted) {
|
||||
tile.state = 'unloaded';
|
||||
} else if (err) {
|
||||
tile.state = 'errored';
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async abortTile(tile: Tile) {
|
||||
if (tile.abortController) {
|
||||
tile.abortController.abort();
|
||||
delete tile.abortController;
|
||||
}
|
||||
}
|
||||
|
||||
async unloadTile(tile: Tile) {
|
||||
if (tile.texture) {
|
||||
this.map.painter.saveTileTexture(tile.texture);
|
||||
}
|
||||
}
|
||||
|
||||
hasTransition() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
170
node_modules/maplibre-gl/src/source/rtl_text_plugin_main_thread.test.ts
generated
vendored
Normal file
170
node_modules/maplibre-gl/src/source/rtl_text_plugin_main_thread.test.ts
generated
vendored
Normal file
@@ -0,0 +1,170 @@
|
||||
import {describe, beforeEach, it, afterEach, expect, vi, type MockInstance} from 'vitest';
|
||||
import {type FakeServer, fakeServer} from 'nise';
|
||||
import {rtlMainThreadPluginFactory} from './rtl_text_plugin_main_thread';
|
||||
import {sleep} from '../util/test/util';
|
||||
import {browser} from '../util/browser';
|
||||
import {Dispatcher} from '../util/dispatcher';
|
||||
import {type PluginState} from './rtl_text_plugin_status';
|
||||
import {MessageType} from '../util/actor_messages';
|
||||
const rtlMainThreadPlugin = rtlMainThreadPluginFactory();
|
||||
|
||||
describe('RTLMainThreadPlugin', () => {
|
||||
let server: FakeServer;
|
||||
let broadcastSpy: MockInstance;
|
||||
const url = 'http://example.com/plugin';
|
||||
const failedToLoadMessage = `RTL Text Plugin failed to import scripts from ${url}`;
|
||||
const SyncRTLPluginStateMessageName = MessageType.syncRTLPluginState;
|
||||
|
||||
beforeEach(() => {
|
||||
server = fakeServer.create();
|
||||
global.fetch = null;
|
||||
// Reset the singleton instance before each test
|
||||
rtlMainThreadPlugin.clearRTLTextPlugin();
|
||||
broadcastSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(() => { return Promise.resolve({} as any); });
|
||||
});
|
||||
|
||||
function broadcastMockSuccess(message: MessageType, payload: PluginState): Promise<PluginState[]> {
|
||||
if (message === SyncRTLPluginStateMessageName) {
|
||||
if (payload.pluginStatus === 'loading') {
|
||||
const resultState: PluginState = {
|
||||
pluginStatus: 'loaded',
|
||||
pluginURL: payload.pluginURL
|
||||
};
|
||||
return Promise.resolve([resultState]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function broadcastMockSuccessDefer(message: MessageType, payload: PluginState): Promise<PluginState[]> {
|
||||
if (message === SyncRTLPluginStateMessageName) {
|
||||
if (payload.pluginStatus === 'deferred') {
|
||||
const resultState: PluginState = {
|
||||
pluginStatus: 'deferred',
|
||||
pluginURL: payload.pluginURL
|
||||
};
|
||||
return Promise.resolve([resultState]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function broadcastMockFailure(message: MessageType, payload: PluginState): Promise<PluginState[]> {
|
||||
if (message === SyncRTLPluginStateMessageName) {
|
||||
if (payload.pluginStatus === 'loading') {
|
||||
return Promise.reject(failedToLoadMessage);
|
||||
}
|
||||
} else {
|
||||
return Promise.resolve([]);
|
||||
}
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
broadcastSpy.mockRestore();
|
||||
});
|
||||
|
||||
it('should get the RTL text plugin status', () => {
|
||||
const status = rtlMainThreadPlugin.getRTLTextPluginStatus();
|
||||
expect(status).toBe('unavailable');
|
||||
});
|
||||
|
||||
it('should set the RTL text plugin and download it', async () => {
|
||||
broadcastSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(broadcastMockSuccess as any);
|
||||
await rtlMainThreadPlugin.setRTLTextPlugin(url);
|
||||
expect(rtlMainThreadPlugin.url).toEqual(url);
|
||||
expect(rtlMainThreadPlugin.status).toBe('loaded');
|
||||
});
|
||||
|
||||
it('should set the RTL text plugin but defer downloading', async () => {
|
||||
await rtlMainThreadPlugin.setRTLTextPlugin(url, true);
|
||||
expect(rtlMainThreadPlugin.status).toBe('deferred');
|
||||
expect(broadcastSpy).toHaveBeenCalledWith(SyncRTLPluginStateMessageName, {pluginStatus: 'deferred', pluginURL: url});
|
||||
});
|
||||
|
||||
it('should throw if the plugin is already set', async () => {
|
||||
await rtlMainThreadPlugin.setRTLTextPlugin(url, true);
|
||||
await expect(rtlMainThreadPlugin.setRTLTextPlugin(url)).rejects.toThrow('setRTLTextPlugin cannot be called multiple times.');
|
||||
});
|
||||
|
||||
it('should throw if the plugin url is not set', async () => {
|
||||
const spy = vi.spyOn(browser, 'resolveURL').mockImplementation(() => { return ''; });
|
||||
await expect(rtlMainThreadPlugin.setRTLTextPlugin(null)).rejects.toThrow('requested url null is invalid');
|
||||
spy.mockRestore();
|
||||
});
|
||||
|
||||
it('should be in error state if download fails', async () => {
|
||||
broadcastSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(broadcastMockFailure as any);
|
||||
const resultPromise = rtlMainThreadPlugin.setRTLTextPlugin(url);
|
||||
await expect(resultPromise).rejects.toBe(failedToLoadMessage);
|
||||
expect(rtlMainThreadPlugin.url).toEqual(url);
|
||||
expect(rtlMainThreadPlugin.status).toBe('error');
|
||||
});
|
||||
|
||||
it('should lazy load the plugin if deferred', async () => {
|
||||
// use success spy to make sure test case does not throw exception
|
||||
const deferredSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(broadcastMockSuccessDefer as any);
|
||||
await rtlMainThreadPlugin.setRTLTextPlugin(url, true);
|
||||
expect(deferredSpy).toHaveBeenCalledTimes(1);
|
||||
expect(deferredSpy).toHaveBeenCalledWith(SyncRTLPluginStateMessageName, {pluginStatus: 'deferred', pluginURL: url});
|
||||
expect(rtlMainThreadPlugin.status).toBe('deferred');
|
||||
deferredSpy.mockRestore();
|
||||
|
||||
// this is really a fire and forget
|
||||
broadcastSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(broadcastMockSuccess as any);
|
||||
rtlMainThreadPlugin.lazyLoad();
|
||||
await sleep(1);
|
||||
|
||||
// 'loading'
|
||||
expect(broadcastSpy).toHaveBeenCalledWith(SyncRTLPluginStateMessageName, {pluginStatus: 'loading', pluginURL: url});
|
||||
expect(broadcastSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
// second call to lazyLoad should not change anything
|
||||
rtlMainThreadPlugin.lazyLoad();
|
||||
expect(broadcastSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
expect(rtlMainThreadPlugin.status).toBe('loaded');
|
||||
|
||||
// 3rd call to lazyLoad should not change anything
|
||||
rtlMainThreadPlugin.lazyLoad();
|
||||
expect(rtlMainThreadPlugin.status).toBe('loaded');
|
||||
expect(broadcastSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('should set status to requested if RTL plugin was not set', async () => {
|
||||
rtlMainThreadPlugin.lazyLoad();
|
||||
expect(rtlMainThreadPlugin.status).toBe('requested');
|
||||
});
|
||||
|
||||
it('should immediately download if RTL plugin was already requested, ignoring deferred:true', async () => {
|
||||
broadcastSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(broadcastMockSuccess as any);
|
||||
rtlMainThreadPlugin.lazyLoad();
|
||||
expect(rtlMainThreadPlugin.status).toBe('requested');
|
||||
await sleep(1);
|
||||
|
||||
// notice even when deferred is true, it should download because already requested
|
||||
await rtlMainThreadPlugin.setRTLTextPlugin(url, true);
|
||||
expect(rtlMainThreadPlugin.status).toBe('loaded');
|
||||
expect(broadcastSpy).toHaveBeenCalledWith(SyncRTLPluginStateMessageName, {pluginStatus: 'loading', pluginURL: url});
|
||||
});
|
||||
|
||||
it('should allow multiple calls to lazyLoad', async () => {
|
||||
rtlMainThreadPlugin.lazyLoad();
|
||||
expect(rtlMainThreadPlugin.status).toBe('requested');
|
||||
rtlMainThreadPlugin.lazyLoad();
|
||||
expect(rtlMainThreadPlugin.status).toBe('requested');
|
||||
});
|
||||
|
||||
it('should be in error state if lazyLoad fails', async () => {
|
||||
broadcastSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(broadcastMockSuccessDefer);
|
||||
const resultPromise = rtlMainThreadPlugin.setRTLTextPlugin(url, true);
|
||||
await expect(resultPromise).resolves.toBeUndefined();
|
||||
|
||||
expect(rtlMainThreadPlugin.status).toBe('deferred');
|
||||
|
||||
// the next one should fail
|
||||
broadcastSpy = vi.spyOn(Dispatcher.prototype, 'broadcast').mockImplementation(broadcastMockFailure as any);
|
||||
|
||||
await expect(rtlMainThreadPlugin._requestImport()).rejects.toBe(failedToLoadMessage);
|
||||
expect(rtlMainThreadPlugin.url).toEqual(url);
|
||||
expect(rtlMainThreadPlugin.status).toBe('error');
|
||||
});
|
||||
});
|
||||
89
node_modules/maplibre-gl/src/source/rtl_text_plugin_main_thread.ts
generated
vendored
Normal file
89
node_modules/maplibre-gl/src/source/rtl_text_plugin_main_thread.ts
generated
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
|
||||
import {browser} from '../util/browser';
|
||||
import {Event, Evented} from '../util/evented';
|
||||
import {type RTLPluginStatus, RTLPluginLoadedEventName, type PluginState} from './rtl_text_plugin_status';
|
||||
import {type Dispatcher, getGlobalDispatcher} from '../util/dispatcher';
|
||||
import {MessageType} from '../util/actor_messages';
|
||||
|
||||
class RTLMainThreadPlugin extends Evented {
|
||||
status: RTLPluginStatus = 'unavailable';
|
||||
url: string = null;
|
||||
dispatcher: Dispatcher = getGlobalDispatcher();
|
||||
|
||||
/** Sync RTL plugin state by broadcasting a message to the worker */
|
||||
_syncState(statusToSend: RTLPluginStatus): Promise<PluginState[]> {
|
||||
this.status = statusToSend;
|
||||
return this.dispatcher.broadcast(MessageType.syncRTLPluginState, {pluginStatus: statusToSend, pluginURL: this.url})
|
||||
.catch((e: any) => {
|
||||
this.status = 'error';
|
||||
throw e;
|
||||
});
|
||||
}
|
||||
|
||||
/** This one is exposed to outside */
|
||||
getRTLTextPluginStatus(): RTLPluginStatus {
|
||||
return this.status;
|
||||
}
|
||||
|
||||
clearRTLTextPlugin(): void {
|
||||
this.status = 'unavailable';
|
||||
this.url = null;
|
||||
}
|
||||
|
||||
async setRTLTextPlugin(url: string, deferred: boolean = false): Promise<void> {
|
||||
if (this.url) {
|
||||
// error
|
||||
throw new Error('setRTLTextPlugin cannot be called multiple times.');
|
||||
}
|
||||
|
||||
this.url = browser.resolveURL(url);
|
||||
if (!this.url) {
|
||||
throw new Error(`requested url ${url} is invalid`);
|
||||
}
|
||||
if (this.status === 'unavailable') {
|
||||
|
||||
// from initial state:
|
||||
if (deferred) {
|
||||
|
||||
this.status = 'deferred';
|
||||
// fire and forget: in this case it does not need wait for the broadcasting result
|
||||
// it is important to sync the deferred status once because
|
||||
// symbol_bucket will be checking it in worker
|
||||
this._syncState(this.status);
|
||||
|
||||
} else {
|
||||
return this._requestImport();
|
||||
}
|
||||
|
||||
} else if (this.status === 'requested') {
|
||||
return this._requestImport();
|
||||
}
|
||||
}
|
||||
|
||||
/** Send a message to worker which will import the RTL plugin script */
|
||||
async _requestImport() : Promise<void> {
|
||||
|
||||
// all errors/exceptions will be handled by _syncState
|
||||
await this._syncState('loading');
|
||||
this.status = 'loaded';
|
||||
this.fire(new Event(RTLPluginLoadedEventName));
|
||||
}
|
||||
|
||||
/** Start a lazy loading process of RTL plugin */
|
||||
lazyLoad(): void {
|
||||
if (this.status === 'unavailable') {
|
||||
this.status = 'requested';
|
||||
} else if (this.status === 'deferred') {
|
||||
this._requestImport();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let rtlMainThreadPlugin: RTLMainThreadPlugin = null;
|
||||
|
||||
export function rtlMainThreadPluginFactory(): RTLMainThreadPlugin {
|
||||
if (!rtlMainThreadPlugin) {
|
||||
rtlMainThreadPlugin = new RTLMainThreadPlugin();
|
||||
}
|
||||
return rtlMainThreadPlugin;
|
||||
}
|
||||
33
node_modules/maplibre-gl/src/source/rtl_text_plugin_status.ts
generated
vendored
Normal file
33
node_modules/maplibre-gl/src/source/rtl_text_plugin_status.ts
generated
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* The possible option of the plugin's status
|
||||
*
|
||||
* `unavailable`: Not loaded.
|
||||
*
|
||||
* `deferred`: The plugin URL has been specified, but loading has been deferred.
|
||||
*
|
||||
* `requested`: at least one tile needs RTL to render, but the plugin has not been set
|
||||
*
|
||||
* `loading`: RTL is in the process of being loaded by worker.
|
||||
*
|
||||
* `loaded`: The plugin is now loaded
|
||||
*
|
||||
* `error`: The plugin failed to load
|
||||
*/
|
||||
export type RTLPluginStatus =
|
||||
'unavailable' |
|
||||
'deferred' |
|
||||
'requested' |
|
||||
'loading' |
|
||||
'loaded' |
|
||||
'error';
|
||||
|
||||
/**
|
||||
* The RTL plugin state
|
||||
*/
|
||||
export type PluginState = {
|
||||
pluginStatus: RTLPluginStatus;
|
||||
pluginURL: string;
|
||||
};
|
||||
|
||||
export const RTLPluginLoadedEventName = 'RTLPluginLoaded';
|
||||
|
||||
119
node_modules/maplibre-gl/src/source/rtl_text_plugin_worker.test.ts
generated
vendored
Normal file
119
node_modules/maplibre-gl/src/source/rtl_text_plugin_worker.test.ts
generated
vendored
Normal file
@@ -0,0 +1,119 @@
|
||||
import {beforeEach, describe, expect, test, vi} from 'vitest';
|
||||
import {type PluginState} from './rtl_text_plugin_status';
|
||||
import {rtlWorkerPlugin} from './rtl_text_plugin_worker';
|
||||
|
||||
describe('RTLWorkerPlugin', () => {
|
||||
beforeEach(() => {
|
||||
// This is a static class, so we need to reset the properties before each test
|
||||
rtlWorkerPlugin.processStyledBidirectionalText = null;
|
||||
rtlWorkerPlugin.processBidirectionalText = null;
|
||||
rtlWorkerPlugin.applyArabicShaping = null;
|
||||
});
|
||||
|
||||
test('should throw if already parsed', () => {
|
||||
const rtlTextPlugin = {
|
||||
applyArabicShaping: vi.fn(),
|
||||
processBidirectionalText: vi.fn(),
|
||||
processStyledBidirectionalText: vi.fn(),
|
||||
};
|
||||
|
||||
rtlWorkerPlugin.setMethods(rtlTextPlugin);
|
||||
expect(() => {
|
||||
rtlWorkerPlugin.setMethods(rtlTextPlugin);
|
||||
}).toThrow('RTL text plugin already registered.');
|
||||
});
|
||||
|
||||
test('should move RTL plugin from unavailable to deferred', async () => {
|
||||
rtlWorkerPlugin.pluginURL = '';
|
||||
rtlWorkerPlugin.pluginStatus = 'unavailable';
|
||||
|
||||
const mockMessage: PluginState = {
|
||||
pluginURL: 'https://somehost/somescript',
|
||||
pluginStatus: 'deferred'
|
||||
};
|
||||
|
||||
await rtlWorkerPlugin.syncState(mockMessage, vi.fn());
|
||||
|
||||
expect(rtlWorkerPlugin.getRTLTextPluginStatus()).toBe('deferred');
|
||||
});
|
||||
|
||||
test('should not change RTL plugin status if already parsed', async () => {
|
||||
const originalUrl = 'https://somehost/somescript1';
|
||||
rtlWorkerPlugin.pluginURL = originalUrl;
|
||||
rtlWorkerPlugin.pluginStatus = 'loaded';
|
||||
rtlWorkerPlugin.setMethods({
|
||||
applyArabicShaping: vi.fn(),
|
||||
processBidirectionalText: vi.fn(),
|
||||
processStyledBidirectionalText: vi.fn(),
|
||||
});
|
||||
const mockMessage: PluginState = {
|
||||
pluginURL: 'https://somehost/somescript2',
|
||||
pluginStatus: 'loading'
|
||||
};
|
||||
|
||||
const workerResult: PluginState = await await rtlWorkerPlugin.syncState(mockMessage, vi.fn());
|
||||
|
||||
expect(rtlWorkerPlugin.getRTLTextPluginStatus()).toBe('loaded');
|
||||
expect(rtlWorkerPlugin.pluginURL).toBe(originalUrl);
|
||||
|
||||
expect(workerResult.pluginStatus).toBe('loaded');
|
||||
expect(workerResult.pluginURL).toBe(originalUrl);
|
||||
});
|
||||
|
||||
test('should do a full cycle of rtl loading synchronously', async () => {
|
||||
const originalUrl = 'https://somehost/somescript1';
|
||||
const loadScriptsMock = vi.fn().mockImplementation((_) => {
|
||||
rtlWorkerPlugin.setMethods({
|
||||
applyArabicShaping: vi.fn(),
|
||||
processBidirectionalText: vi.fn(),
|
||||
processStyledBidirectionalText: vi.fn(),
|
||||
});
|
||||
});
|
||||
|
||||
const workerResult: PluginState = await rtlWorkerPlugin.syncState({
|
||||
pluginURL: originalUrl,
|
||||
pluginStatus: 'loading'
|
||||
}, loadScriptsMock);
|
||||
|
||||
expect(rtlWorkerPlugin.getRTLTextPluginStatus()).toBe('loaded');
|
||||
expect(rtlWorkerPlugin.pluginURL).toBe(originalUrl);
|
||||
expect(workerResult.pluginStatus).toBe('loaded');
|
||||
expect(workerResult.pluginURL).toBe(originalUrl);
|
||||
});
|
||||
|
||||
test('should do a full cycle of rtl loading asynchronously', async () => {
|
||||
const originalUrl = 'https://somehost/somescript1';
|
||||
const loadScriptsMock = vi.fn().mockImplementation((_) => {
|
||||
setTimeout(() => {
|
||||
rtlWorkerPlugin.setMethods({
|
||||
applyArabicShaping: vi.fn(),
|
||||
processBidirectionalText: vi.fn(),
|
||||
processStyledBidirectionalText: vi.fn(),
|
||||
});
|
||||
}, 10);
|
||||
});
|
||||
|
||||
const workerResult: PluginState = await rtlWorkerPlugin.syncState({
|
||||
pluginURL: originalUrl,
|
||||
pluginStatus: 'loading'
|
||||
}, loadScriptsMock);
|
||||
|
||||
expect(rtlWorkerPlugin.getRTLTextPluginStatus()).toBe('loaded');
|
||||
expect(rtlWorkerPlugin.pluginURL).toBe(originalUrl);
|
||||
expect(workerResult.pluginStatus).toBe('loaded');
|
||||
expect(workerResult.pluginURL).toBe(originalUrl);
|
||||
});
|
||||
|
||||
test('should fail loading on timeout', async () => {
|
||||
const originalUrl = 'https://somehost/somescript1';
|
||||
const loadScriptsMock = vi.fn().mockImplementation(() => {});
|
||||
|
||||
(rtlWorkerPlugin as any).TIMEOUT = 1;
|
||||
|
||||
await expect(rtlWorkerPlugin.syncState({
|
||||
pluginURL: originalUrl,
|
||||
pluginStatus: 'loading'
|
||||
}, loadScriptsMock)
|
||||
).rejects.toThrow('RTL Text Plugin failed to import scripts from https://somehost/somescript1');
|
||||
});
|
||||
});
|
||||
88
node_modules/maplibre-gl/src/source/rtl_text_plugin_worker.ts
generated
vendored
Normal file
88
node_modules/maplibre-gl/src/source/rtl_text_plugin_worker.ts
generated
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
import {type PluginState, type RTLPluginStatus} from './rtl_text_plugin_status';
|
||||
|
||||
export interface RTLTextPlugin {
|
||||
applyArabicShaping: (a: string) => string;
|
||||
processBidirectionalText: ((b: string, a: Array<number>) => Array<string>);
|
||||
processStyledBidirectionalText: ((c: string, b: Array<number>, a: Array<number>) => Array<[string, Array<number>]>);
|
||||
}
|
||||
|
||||
class RTLWorkerPlugin implements RTLTextPlugin {
|
||||
readonly TIMEOUT = 5000;
|
||||
|
||||
applyArabicShaping: (a: string) => string = null;
|
||||
processBidirectionalText: ((b: string, a: Array<number>) => Array<string>) = null;
|
||||
processStyledBidirectionalText: ((c: string, b: Array<number>, a: Array<number>) => Array<[string, Array<number>]>) = null;
|
||||
pluginStatus: RTLPluginStatus = 'unavailable';
|
||||
pluginURL: string = null;
|
||||
loadScriptResolve: () => void = () => {};
|
||||
|
||||
private setState(state: PluginState) {
|
||||
this.pluginStatus = state.pluginStatus;
|
||||
this.pluginURL = state.pluginURL;
|
||||
}
|
||||
|
||||
private getState(): PluginState {
|
||||
return {
|
||||
pluginStatus: this.pluginStatus,
|
||||
pluginURL: this.pluginURL
|
||||
};
|
||||
}
|
||||
|
||||
public setMethods(rtlTextPlugin: RTLTextPlugin) {
|
||||
if (rtlWorkerPlugin.isParsed()) {
|
||||
throw new Error('RTL text plugin already registered.');
|
||||
}
|
||||
this.applyArabicShaping = rtlTextPlugin.applyArabicShaping;
|
||||
this.processBidirectionalText = rtlTextPlugin.processBidirectionalText;
|
||||
this.processStyledBidirectionalText = rtlTextPlugin.processStyledBidirectionalText;
|
||||
this.loadScriptResolve();
|
||||
}
|
||||
|
||||
public isParsed(): boolean {
|
||||
return this.applyArabicShaping != null &&
|
||||
this.processBidirectionalText != null &&
|
||||
this.processStyledBidirectionalText != null;
|
||||
}
|
||||
|
||||
public getRTLTextPluginStatus() {
|
||||
return this.pluginStatus;
|
||||
}
|
||||
|
||||
public async syncState(incomingState: PluginState, importScripts: (url: string) => void): Promise<PluginState> {
|
||||
// Parsed plugin cannot be changed, so just return its current state.
|
||||
if (this.isParsed()) {
|
||||
return this.getState();
|
||||
}
|
||||
|
||||
if (incomingState.pluginStatus !== 'loading') {
|
||||
// simply sync and done
|
||||
this.setState(incomingState);
|
||||
return incomingState;
|
||||
}
|
||||
const urlToLoad = incomingState.pluginURL;
|
||||
const loadScriptPromise = new Promise<void>((resolve) => {
|
||||
this.loadScriptResolve = resolve;
|
||||
});
|
||||
importScripts(urlToLoad);
|
||||
const dontWaitForeverTimeoutPromise = new Promise<void>((resolve) => setTimeout(() => resolve(), this.TIMEOUT));
|
||||
await Promise.race([loadScriptPromise, dontWaitForeverTimeoutPromise]);
|
||||
const complete = this.isParsed();
|
||||
if (complete) {
|
||||
const loadedState: PluginState = {
|
||||
pluginStatus: 'loaded',
|
||||
pluginURL: urlToLoad
|
||||
};
|
||||
this.setState(loadedState);
|
||||
return loadedState;
|
||||
}
|
||||
|
||||
// error case
|
||||
this.setState({
|
||||
pluginStatus: 'error',
|
||||
pluginURL: ''
|
||||
});
|
||||
throw new Error(`RTL Text Plugin failed to import scripts from ${urlToLoad}`);
|
||||
}
|
||||
}
|
||||
|
||||
export const rtlWorkerPlugin = new RTLWorkerPlugin();
|
||||
42
node_modules/maplibre-gl/src/source/source.test.ts
generated
vendored
Normal file
42
node_modules/maplibre-gl/src/source/source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import {describe, test, expect, vi} from 'vitest';
|
||||
import {Dispatcher} from '../util/dispatcher';
|
||||
import {type SourceClass, addSourceType, create} from './source';
|
||||
|
||||
describe('addSourceType', () => {
|
||||
test('adds factory function without a worker url does not dispatch to worker', async () => {
|
||||
const sourceType = vi.fn().mockImplementation(function (id) { this.id = id; }) as SourceClass;
|
||||
|
||||
// expect no call to load worker source
|
||||
const spy = vi.spyOn(Dispatcher.prototype, 'broadcast');
|
||||
|
||||
await addSourceType('foo', sourceType);
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
|
||||
create('id', {type: 'foo'} as any, null, null);
|
||||
expect(sourceType).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('create a custom source without an id throws', async () => {
|
||||
const sourceType = vi.fn() as SourceClass;
|
||||
|
||||
// expect no call to load worker source
|
||||
const spy = vi.spyOn(Dispatcher.prototype, 'broadcast');
|
||||
|
||||
await addSourceType('foo2', sourceType);
|
||||
expect(spy).not.toHaveBeenCalled();
|
||||
|
||||
expect(() => create('id', {type: 'foo2'} as any, null, null)).toThrow();
|
||||
expect(sourceType).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('refuses to add new type over existing name', async () => {
|
||||
const sourceType = function () {} as any as SourceClass;
|
||||
await expect(addSourceType('canvas', sourceType)).rejects.toThrow();
|
||||
await expect(addSourceType('geojson', sourceType)).rejects.toThrow();
|
||||
await expect(addSourceType('image', sourceType)).rejects.toThrow();
|
||||
await expect(addSourceType('raster', sourceType)).rejects.toThrow();
|
||||
await expect(addSourceType('raster-dem', sourceType)).rejects.toThrow();
|
||||
await expect(addSourceType('vector', sourceType)).rejects.toThrow();
|
||||
await expect(addSourceType('video', sourceType)).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
198
node_modules/maplibre-gl/src/source/source.ts
generated
vendored
Normal file
198
node_modules/maplibre-gl/src/source/source.ts
generated
vendored
Normal file
@@ -0,0 +1,198 @@
|
||||
import {VectorTileSource} from '../source/vector_tile_source';
|
||||
import {RasterTileSource} from '../source/raster_tile_source';
|
||||
import {RasterDEMTileSource} from '../source/raster_dem_tile_source';
|
||||
import {GeoJSONSource, type GeoJSONSourceShouldReloadTileOptions} from '../source/geojson_source';
|
||||
import {VideoSource} from '../source/video_source';
|
||||
import {ImageSource} from '../source/image_source';
|
||||
import {CanvasSource} from '../source/canvas_source';
|
||||
import {type Dispatcher} from '../util/dispatcher';
|
||||
|
||||
import type {SourceSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {Event, Evented} from '../util/evented';
|
||||
import type {Map} from '../ui/map';
|
||||
import type {Tile} from '../tile/tile';
|
||||
import type {OverscaledTileID, CanonicalTileID} from '../tile/tile_id';
|
||||
import type {LoadTileResult} from '../source/vector_tile_source';
|
||||
import type {CanvasSourceSpecification} from '../source/canvas_source';
|
||||
import {type CalculateTileZoomFunction} from '../geo/projection/covering_tiles';
|
||||
|
||||
const registeredSources = {} as {[key:string]: SourceClass};
|
||||
|
||||
/**
|
||||
* The `Source` interface must be implemented by each source type, including "core" types (`vector`, `raster`,
|
||||
* `video`, etc.) and all custom, third-party types.
|
||||
*
|
||||
* **Event** `data` - Fired with `{dataType: 'source', sourceDataType: 'metadata'}` to indicate that any necessary metadata
|
||||
* has been loaded so that it's okay to call `loadTile`; and with `{dataType: 'source', sourceDataType: 'content'}`
|
||||
* to indicate that the source data has changed, so that any current caches should be flushed.
|
||||
*
|
||||
* @group Sources
|
||||
*/
|
||||
export interface Source {
|
||||
readonly type: string;
|
||||
/**
|
||||
* The id for the source. Must not be used by any existing source.
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* The minimum zoom level for the source.
|
||||
*/
|
||||
minzoom: number;
|
||||
/**
|
||||
* The maximum zoom level for the source.
|
||||
*/
|
||||
maxzoom: number;
|
||||
/**
|
||||
* The tile size for the source.
|
||||
*/
|
||||
tileSize: number;
|
||||
/**
|
||||
* The attribution for the source.
|
||||
*/
|
||||
attribution?: string;
|
||||
/**
|
||||
* `true` if zoom levels are rounded to the nearest integer in the source data, `false` if they are floor-ed to the nearest integer.
|
||||
*/
|
||||
roundZoom?: boolean;
|
||||
/**
|
||||
* `false` if tiles can be drawn outside their boundaries, `true` if they cannot.
|
||||
*/
|
||||
isTileClipped?: boolean;
|
||||
tileID?: CanonicalTileID;
|
||||
/**
|
||||
* `true` if tiles should be sent back to the worker for each overzoomed zoom level, `false` if not.
|
||||
*/
|
||||
reparseOverscaled?: boolean;
|
||||
vectorLayerIds?: Array<string>;
|
||||
/**
|
||||
* True if the source has transition, false otherwise.
|
||||
*/
|
||||
hasTransition(): boolean;
|
||||
/**
|
||||
* True if the source is loaded, false otherwise.
|
||||
*/
|
||||
loaded(): boolean;
|
||||
/**
|
||||
* An ability to fire an event to all the listeners, see {@link Evented}
|
||||
* @param event - The event to fire
|
||||
*/
|
||||
fire(event: Event): unknown;
|
||||
/**
|
||||
* This method is called when the source is added to the map.
|
||||
* @param map - The map instance
|
||||
*/
|
||||
onAdd?(map: Map): void;
|
||||
/**
|
||||
* This method is called when the source is removed from the map.
|
||||
* @param map - The map instance
|
||||
*/
|
||||
onRemove?(map: Map): void;
|
||||
/**
|
||||
* This method does the heavy lifting of loading a tile.
|
||||
* In most cases it will defer the work to the relevant worker source.
|
||||
* @param tile - The tile to load
|
||||
*/
|
||||
loadTile(tile: Tile): Promise<LoadTileResult | void>;
|
||||
/**
|
||||
* True is the tile is part of the source, false otherwise.
|
||||
* @param tileID - The tile ID
|
||||
*/
|
||||
hasTile?(tileID: OverscaledTileID): boolean;
|
||||
/**
|
||||
* Allows to abort a tile loading.
|
||||
* @param tile - The tile to abort
|
||||
*/
|
||||
abortTile?(tile: Tile): Promise<void>;
|
||||
/**
|
||||
* Allows to unload a tile.
|
||||
* @param tile - The tile to unload
|
||||
*/
|
||||
unloadTile?(tile: Tile): Promise<void>;
|
||||
/**
|
||||
* @returns A plain (stringifiable) JS object representing the current state of the source.
|
||||
* Creating a source using the returned object as the `options` should result in a Source that is
|
||||
* equivalent to this one.
|
||||
*/
|
||||
serialize(): any;
|
||||
/**
|
||||
* Allows to execute a prepare step before the source is used.
|
||||
*/
|
||||
prepare?(): void;
|
||||
/**
|
||||
* Optional function to redefine how tiles are loaded at high pitch angles.
|
||||
*/
|
||||
calculateTileZoom?: CalculateTileZoomFunction;
|
||||
/**
|
||||
* Optional function to determine whether a tile should be reloaded, given a
|
||||
* set of options associated with a `MapSourceDataChangedEvent`.
|
||||
* @internal
|
||||
*/
|
||||
shouldReloadTile?(tile: Tile, options: GeoJSONSourceShouldReloadTileOptions): boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* A general definition of a {@link Source} class for factory usage
|
||||
*/
|
||||
export type SourceClass = {
|
||||
new (id: string, specification: SourceSpecification | CanvasSourceSpecification, dispatcher: Dispatcher, eventedParent: Evented): Source;
|
||||
};
|
||||
|
||||
/**
|
||||
* Creates a tiled data source instance given an options object.
|
||||
*
|
||||
* @param id - The id for the source. Must not be used by any existing source.
|
||||
* @param specification - Source options, specific to the source type (except for `options.type`, which is always required).
|
||||
* @param source - A source definition object compliant with
|
||||
* [`maplibre-gl-style-spec`](https://maplibre.org/maplibre-style-spec/#sources) or, for a third-party source type,
|
||||
* with that type's requirements.
|
||||
* @param dispatcher - A {@link Dispatcher} instance, which can be used to send messages to the workers.
|
||||
* @returns a newly created source
|
||||
*/
|
||||
export const create = (id: string, specification: SourceSpecification | CanvasSourceSpecification, dispatcher: Dispatcher, eventedParent: Evented): Source => {
|
||||
|
||||
const Class = getSourceType(specification.type);
|
||||
const source = new Class(id, specification, dispatcher, eventedParent);
|
||||
|
||||
if (source.id !== id) {
|
||||
throw new Error(`Expected Source id to be ${id} instead of ${source.id}`);
|
||||
}
|
||||
|
||||
return source;
|
||||
};
|
||||
|
||||
const getSourceType = (name: string): SourceClass => {
|
||||
switch (name) {
|
||||
case 'geojson':
|
||||
return GeoJSONSource;
|
||||
case 'image':
|
||||
return ImageSource;
|
||||
case 'raster':
|
||||
return RasterTileSource;
|
||||
case 'raster-dem':
|
||||
return RasterDEMTileSource;
|
||||
case 'vector':
|
||||
return VectorTileSource;
|
||||
case 'video':
|
||||
return VideoSource;
|
||||
case 'canvas':
|
||||
return CanvasSource;
|
||||
}
|
||||
return registeredSources[name];
|
||||
};
|
||||
|
||||
const setSourceType = (name: string, type: SourceClass) => {
|
||||
registeredSources[name] = type;
|
||||
};
|
||||
|
||||
/**
|
||||
* Adds a custom source type, making it available for use with {@link Map.addSource}.
|
||||
* @param name - The name of the source type; source definition objects use this name in the `{type: ...}` field.
|
||||
* @param SourceType - A {@link SourceClass} - which is a constructor for the `Source` interface.
|
||||
* @returns a promise that is resolved when the source type is ready or rejected with an error.
|
||||
*/
|
||||
export const addSourceType = async (name: string, SourceType: SourceClass): Promise<void> => {
|
||||
if (getSourceType(name)) {
|
||||
throw new Error(`A source type called "${name}" already exists.`);
|
||||
}
|
||||
setSourceType(name, SourceType);
|
||||
};
|
||||
23
node_modules/maplibre-gl/src/source/source_state.test.ts
generated
vendored
Normal file
23
node_modules/maplibre-gl/src/source/source_state.test.ts
generated
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
|
||||
import {describe, test, expect, vi} from 'vitest';
|
||||
import {SourceFeatureState} from './source_state';
|
||||
import {type InViewTiles} from '../tile/tile_manager_in_view_tiles';
|
||||
|
||||
describe('SourceFeatureState', () => {
|
||||
test('coalesceChanges updates revision when changes occur', () => {
|
||||
const sourceState = new SourceFeatureState();
|
||||
expect(sourceState.revision).toBe(0);
|
||||
|
||||
const inViewTilesMock = {
|
||||
setFeatureState: vi.fn()
|
||||
} as unknown as InViewTiles;
|
||||
const painterMock = {};
|
||||
|
||||
sourceState.coalesceChanges(inViewTilesMock, painterMock);
|
||||
expect(sourceState.revision).toBe(0);
|
||||
|
||||
sourceState.updateState('layer1', 'feature1', {prop: true});
|
||||
sourceState.coalesceChanges(inViewTilesMock, painterMock);
|
||||
expect(sourceState.revision).toBe(1);
|
||||
});
|
||||
});
|
||||
157
node_modules/maplibre-gl/src/source/source_state.ts
generated
vendored
Normal file
157
node_modules/maplibre-gl/src/source/source_state.ts
generated
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
import {extend} from '../util/util';
|
||||
import type {Tile} from '../tile/tile';
|
||||
import type {FeatureState} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {InViewTiles} from '../tile/tile_manager_in_view_tiles';
|
||||
|
||||
export type FeatureStates = {[featureId: string]: FeatureState};
|
||||
export type LayerFeatureStates = {[layer: string]: FeatureStates};
|
||||
|
||||
/**
|
||||
* @internal
|
||||
* SourceFeatureState manages the state and pending changes
|
||||
* to features in a source, separated by source layer.
|
||||
* stateChanges and deletedStates batch all changes to the tile (updates and removes, respectively)
|
||||
* between coalesce() events. addFeatureState() and removeFeatureState() also update their counterpart's
|
||||
* list of changes, such that coalesce() can apply the proper state changes while agnostic to the order of operations.
|
||||
* In deletedStates, all null's denote complete removal of state at that scope
|
||||
*/
|
||||
export class SourceFeatureState {
|
||||
state: LayerFeatureStates;
|
||||
stateChanges: LayerFeatureStates;
|
||||
deletedStates: {};
|
||||
revision: number;
|
||||
|
||||
constructor() {
|
||||
this.state = {};
|
||||
this.stateChanges = {};
|
||||
this.deletedStates = {};
|
||||
this.revision = 0;
|
||||
}
|
||||
|
||||
updateState(sourceLayer: string, featureId: number | string, newState: any) {
|
||||
const feature = String(featureId);
|
||||
this.stateChanges[sourceLayer] = this.stateChanges[sourceLayer] || {};
|
||||
this.stateChanges[sourceLayer][feature] = this.stateChanges[sourceLayer][feature] || {};
|
||||
extend(this.stateChanges[sourceLayer][feature], newState);
|
||||
|
||||
if (this.deletedStates[sourceLayer] === null) {
|
||||
this.deletedStates[sourceLayer] = {};
|
||||
for (const ft in this.state[sourceLayer]) {
|
||||
if (ft !== feature) this.deletedStates[sourceLayer][ft] = null;
|
||||
}
|
||||
} else {
|
||||
const featureDeletionQueued = this.deletedStates[sourceLayer] && this.deletedStates[sourceLayer][feature] === null;
|
||||
if (featureDeletionQueued) {
|
||||
this.deletedStates[sourceLayer][feature] = {};
|
||||
for (const prop in this.state[sourceLayer][feature]) {
|
||||
if (!newState[prop]) this.deletedStates[sourceLayer][feature][prop] = null;
|
||||
}
|
||||
} else {
|
||||
for (const key in newState) {
|
||||
const deletionInQueue = this.deletedStates[sourceLayer] && this.deletedStates[sourceLayer][feature] && this.deletedStates[sourceLayer][feature][key] === null;
|
||||
if (deletionInQueue) delete this.deletedStates[sourceLayer][feature][key];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
removeFeatureState(sourceLayer: string, featureId?: number | string, key?: string) {
|
||||
const sourceLayerDeleted = this.deletedStates[sourceLayer] === null;
|
||||
if (sourceLayerDeleted) return;
|
||||
|
||||
const feature = String(featureId);
|
||||
|
||||
this.deletedStates[sourceLayer] = this.deletedStates[sourceLayer] || {};
|
||||
|
||||
if (key && featureId !== undefined) {
|
||||
if (this.deletedStates[sourceLayer][feature] !== null) {
|
||||
this.deletedStates[sourceLayer][feature] = this.deletedStates[sourceLayer][feature] || {};
|
||||
this.deletedStates[sourceLayer][feature][key] = null;
|
||||
}
|
||||
} else if (featureId !== undefined) {
|
||||
const updateInQueue = this.stateChanges[sourceLayer] && this.stateChanges[sourceLayer][feature];
|
||||
if (updateInQueue) {
|
||||
this.deletedStates[sourceLayer][feature] = {};
|
||||
for (key in this.stateChanges[sourceLayer][feature]) this.deletedStates[sourceLayer][feature][key] = null;
|
||||
|
||||
} else {
|
||||
this.deletedStates[sourceLayer][feature] = null;
|
||||
}
|
||||
} else {
|
||||
this.deletedStates[sourceLayer] = null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
getState(sourceLayer: string, featureId: number | string) {
|
||||
const feature = String(featureId);
|
||||
const base = this.state[sourceLayer] || {};
|
||||
const changes = this.stateChanges[sourceLayer] || {};
|
||||
|
||||
const reconciledState = extend({}, base[feature], changes[feature]);
|
||||
|
||||
//return empty object if the whole source layer is awaiting deletion
|
||||
if (this.deletedStates[sourceLayer] === null) return {};
|
||||
else if (this.deletedStates[sourceLayer]) {
|
||||
const featureDeletions = this.deletedStates[sourceLayer][featureId];
|
||||
if (featureDeletions === null) return {};
|
||||
for (const prop in featureDeletions) delete reconciledState[prop];
|
||||
}
|
||||
return reconciledState;
|
||||
}
|
||||
|
||||
initializeTileState(tile: Tile, painter: any) {
|
||||
tile.setFeatureState(this.state, painter);
|
||||
}
|
||||
|
||||
coalesceChanges(inViewTiles: InViewTiles, painter: any) {
|
||||
//track changes with full state objects, but only for features that got modified
|
||||
const featuresChanged: LayerFeatureStates = {};
|
||||
|
||||
for (const sourceLayer in this.stateChanges) {
|
||||
this.state[sourceLayer] = this.state[sourceLayer] || {};
|
||||
const layerStates = {};
|
||||
for (const feature in this.stateChanges[sourceLayer]) {
|
||||
if (!this.state[sourceLayer][feature]) this.state[sourceLayer][feature] = {};
|
||||
extend(this.state[sourceLayer][feature], this.stateChanges[sourceLayer][feature]);
|
||||
layerStates[feature] = this.state[sourceLayer][feature];
|
||||
}
|
||||
featuresChanged[sourceLayer] = layerStates;
|
||||
}
|
||||
|
||||
for (const sourceLayer in this.deletedStates) {
|
||||
this.state[sourceLayer] = this.state[sourceLayer] || {};
|
||||
const layerStates = {};
|
||||
|
||||
if (this.deletedStates[sourceLayer] === null) {
|
||||
for (const ft in this.state[sourceLayer]) {
|
||||
layerStates[ft] = {};
|
||||
this.state[sourceLayer][ft] = {};
|
||||
}
|
||||
} else {
|
||||
for (const feature in this.deletedStates[sourceLayer]) {
|
||||
const deleteWholeFeatureState = this.deletedStates[sourceLayer][feature] === null;
|
||||
if (deleteWholeFeatureState) this.state[sourceLayer][feature] = {};
|
||||
else {
|
||||
for (const key of Object.keys(this.deletedStates[sourceLayer][feature])) {
|
||||
delete this.state[sourceLayer][feature][key];
|
||||
}
|
||||
}
|
||||
layerStates[feature] = this.state[sourceLayer][feature];
|
||||
}
|
||||
}
|
||||
|
||||
featuresChanged[sourceLayer] = featuresChanged[sourceLayer] || {};
|
||||
extend(featuresChanged[sourceLayer], layerStates);
|
||||
}
|
||||
|
||||
this.stateChanges = {};
|
||||
this.deletedStates = {};
|
||||
|
||||
if (Object.keys(featuresChanged).length === 0) return;
|
||||
|
||||
this.revision++;
|
||||
|
||||
inViewTiles.setFeatureState(featuresChanged, painter);
|
||||
}
|
||||
}
|
||||
77
node_modules/maplibre-gl/src/source/vector_tile_mlt.ts
generated
vendored
Normal file
77
node_modules/maplibre-gl/src/source/vector_tile_mlt.ts
generated
vendored
Normal file
@@ -0,0 +1,77 @@
|
||||
import Point from '@mapbox/point-geometry';
|
||||
import {type FeatureTable, decodeTile, type Feature as MLTFeature, GEOMETRY_TYPE} from '@maplibre/mlt';
|
||||
import type {VectorTileFeatureLike, VectorTileLayerLike, VectorTileLike} from '@maplibre/vt-pbf';
|
||||
|
||||
class MLTVectorTileFeature implements VectorTileFeatureLike {
|
||||
_featureData: MLTFeature;
|
||||
properties: {[_: string]: any};
|
||||
type: VectorTileFeatureLike['type'];
|
||||
extent: VectorTileFeatureLike['extent'];
|
||||
id: VectorTileFeatureLike['id'];
|
||||
|
||||
constructor(feature: MLTFeature, extent: number) {
|
||||
this._featureData = feature;
|
||||
this.properties = this._featureData.properties || {};
|
||||
switch (this._featureData.geometry?.type) {
|
||||
case GEOMETRY_TYPE.POINT:
|
||||
case GEOMETRY_TYPE.MULTIPOINT:
|
||||
this.type = 1;
|
||||
break;
|
||||
case GEOMETRY_TYPE.LINESTRING:
|
||||
case GEOMETRY_TYPE.MULTILINESTRING:
|
||||
this.type = 2;
|
||||
break;
|
||||
case GEOMETRY_TYPE.POLYGON:
|
||||
case GEOMETRY_TYPE.MULTIPOLYGON:
|
||||
this.type = 3;
|
||||
break;
|
||||
default:
|
||||
this.type = 0;
|
||||
};
|
||||
this.extent = extent;
|
||||
this.id = Number(this._featureData.id);
|
||||
}
|
||||
|
||||
loadGeometry(): Point[][] {
|
||||
const points: Point[][] = [];
|
||||
for (const ring of this._featureData.geometry.coordinates) {
|
||||
const pointRing: Point[] = [];
|
||||
for (const coord of ring) {
|
||||
pointRing.push(new Point(coord.x, coord.y));
|
||||
}
|
||||
points.push(pointRing);
|
||||
}
|
||||
return points;
|
||||
}
|
||||
}
|
||||
|
||||
class MLTVectorTileLayer implements VectorTileLayerLike {
|
||||
featureTable: FeatureTable;
|
||||
name: string;
|
||||
length: number;
|
||||
version: number;
|
||||
extent: number;
|
||||
features: MLTFeature[] = [];
|
||||
|
||||
constructor(featureTable: FeatureTable) {
|
||||
this.featureTable = featureTable;
|
||||
this.name = featureTable.name;
|
||||
this.extent = featureTable.extent;
|
||||
this.version = 2;
|
||||
this.features = featureTable.getFeatures();
|
||||
this.length = this.features.length;
|
||||
}
|
||||
|
||||
feature(i: number): VectorTileFeatureLike {
|
||||
return new MLTVectorTileFeature(this.features[i], this.extent);
|
||||
}
|
||||
}
|
||||
|
||||
export class MLTVectorTile implements VectorTileLike {
|
||||
layers: Record<string, VectorTileLayerLike> = {};
|
||||
|
||||
constructor(buffer: ArrayBuffer) {
|
||||
const features = decodeTile(new Uint8Array(buffer));
|
||||
this.layers = features.reduce((acc, f) => ({...acc, [f.name]: new MLTVectorTileLayer(f)}), {});
|
||||
}
|
||||
}
|
||||
125
node_modules/maplibre-gl/src/source/vector_tile_overzoomed.ts
generated
vendored
Normal file
125
node_modules/maplibre-gl/src/source/vector_tile_overzoomed.ts
generated
vendored
Normal file
@@ -0,0 +1,125 @@
|
||||
import Point from '@mapbox/point-geometry';
|
||||
import {type VectorTileFeatureLike, type VectorTileLayerLike, type VectorTileLike, fromVectorTileJs} from '@maplibre/vt-pbf';
|
||||
import {clipGeometry} from '../symbol/clip_line';
|
||||
import type {LoadVectorTileResult} from './vector_tile_worker_source';
|
||||
import type {CanonicalTileID} from '../tile/tile_id';
|
||||
|
||||
class VectorTileFeatureOverzoomed implements VectorTileFeatureLike {
|
||||
pointsArray: Point[][];
|
||||
type: VectorTileFeatureLike['type'];
|
||||
properties: VectorTileFeatureLike['properties'];
|
||||
id: VectorTileFeatureLike['id'];
|
||||
extent: VectorTileFeatureLike['extent'];
|
||||
|
||||
constructor(
|
||||
type: VectorTileFeatureLike['type'],
|
||||
geometry: Point[][],
|
||||
properties: VectorTileFeatureLike['properties'],
|
||||
id: VectorTileFeatureLike['id'],
|
||||
extent: VectorTileFeatureLike['extent']
|
||||
) {
|
||||
this.type = type;
|
||||
this.properties = properties ? properties : {};
|
||||
this.extent = extent;
|
||||
this.pointsArray = geometry;
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
loadGeometry() {
|
||||
// Clone the geometry and ensure all points are Point instances
|
||||
return this.pointsArray.map(ring =>
|
||||
ring.map(point => new Point(point.x, point.y))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
class VectorTileLayerOverzoomed implements VectorTileLayerLike {
|
||||
private _myFeatures: VectorTileFeatureOverzoomed[];
|
||||
name: string;
|
||||
extent: number;
|
||||
version: number = 2;
|
||||
length: number;
|
||||
|
||||
constructor(features: VectorTileFeatureOverzoomed[], layerName: string, extent: number) {
|
||||
this._myFeatures = features;
|
||||
this.name = layerName;
|
||||
this.length = features.length;
|
||||
this.extent = extent;
|
||||
}
|
||||
|
||||
feature(i: number): VectorTileFeatureLike {
|
||||
return this._myFeatures[i];
|
||||
}
|
||||
}
|
||||
|
||||
export class VectorTileOverzoomed implements VectorTileLike {
|
||||
layers: Record<string, VectorTileLayerLike> = {};
|
||||
|
||||
addLayer(layer: VectorTileLayerOverzoomed) {
|
||||
this.layers[layer.name] = layer;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Encodes the virtual tile into binary vector tile form.
|
||||
* This is a convenience that allows `FeatureIndex` to operate the same way across `VectorTileSource` and `GeoJSONSource` data.
|
||||
* @param virtualVectorTile - a syntetically created vector tile, this tile should have the relevant layer and features already added to it.
|
||||
* @returns - the encoded vector tile along with the original virtual tile binary data.
|
||||
*/
|
||||
export function toVirtualVectorTile(virtualVectorTile: VectorTileLike): LoadVectorTileResult {
|
||||
let pbf: Uint8Array = fromVectorTileJs(virtualVectorTile);
|
||||
if (pbf.byteOffset !== 0 || pbf.byteLength !== pbf.buffer.byteLength) {
|
||||
pbf = new Uint8Array(pbf); // Compatibility with node Buffer (https://github.com/mapbox/pbf/issues/35)
|
||||
}
|
||||
return {
|
||||
vectorTile: virtualVectorTile,
|
||||
rawData: pbf.buffer
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* This function slices a source tile layer into an overzoomed tile layer for a target tile ID.
|
||||
* @param sourceLayer - the source tile layer to slice
|
||||
* @param maxZoomTileID - the maximum zoom tile ID
|
||||
* @param targetTileID - the target tile ID
|
||||
* @returns - the overzoomed tile layer
|
||||
*/
|
||||
export function sliceVectorTileLayer(sourceLayer: VectorTileLayerLike, maxZoomTileID: CanonicalTileID, targetTileID: CanonicalTileID): VectorTileLayerOverzoomed {
|
||||
const {extent} = sourceLayer;
|
||||
const dz = targetTileID.z - maxZoomTileID.z;
|
||||
const scale = Math.pow(2, dz);
|
||||
|
||||
// Calculate the target tile's position within the source tile in target coordinate space
|
||||
// This ensures all tiles share the same coordinate system
|
||||
const offsetX = (targetTileID.x - maxZoomTileID.x * scale) * extent;
|
||||
const offsetY = (targetTileID.y - maxZoomTileID.y * scale) * extent;
|
||||
|
||||
const featureWrappers: VectorTileFeatureOverzoomed[] = [];
|
||||
for (let index = 0; index < sourceLayer.length; index++) {
|
||||
const feature: VectorTileFeatureLike = sourceLayer.feature(index);
|
||||
let geometry = feature.loadGeometry();
|
||||
|
||||
// Transform all coordinates to target tile space
|
||||
for (const ring of geometry) {
|
||||
for (const point of ring) {
|
||||
point.x = point.x * scale - offsetX;
|
||||
point.y = point.y * scale - offsetY;
|
||||
}
|
||||
}
|
||||
|
||||
const buffer = 128;
|
||||
geometry = clipGeometry(geometry, feature.type, -buffer, -buffer, extent + buffer, extent + buffer);
|
||||
if (geometry.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
featureWrappers.push(new VectorTileFeatureOverzoomed(
|
||||
feature.type,
|
||||
geometry,
|
||||
feature.properties,
|
||||
feature.id,
|
||||
extent
|
||||
));
|
||||
}
|
||||
return new VectorTileLayerOverzoomed(featureWrappers, sourceLayer.name, extent);
|
||||
}
|
||||
542
node_modules/maplibre-gl/src/source/vector_tile_source.test.ts
generated
vendored
Normal file
542
node_modules/maplibre-gl/src/source/vector_tile_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,542 @@
|
||||
import {describe, beforeEach, afterEach, test, expect, vi} from 'vitest';
|
||||
import {fakeServer, type FakeServer} from 'nise';
|
||||
import {type Source} from './source';
|
||||
import {VectorTileSource} from './vector_tile_source';
|
||||
import {type Tile} from '../tile/tile';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {Evented} from '../util/evented';
|
||||
import {RequestManager} from '../util/request_manager';
|
||||
import fixturesSource from '../../test/unit/assets/source.json' with {type: 'json'};
|
||||
import {getMockDispatcher, getWrapDispatcher, sleep, waitForEvent, waitForMetadataEvent} from '../util/test/util';
|
||||
import {type Map} from '../ui/map';
|
||||
import {type WorkerTileParameters} from './worker_source';
|
||||
import {SubdivisionGranularitySetting} from '../render/subdivision_granularity_settings';
|
||||
import {type ActorMessage, MessageType} from '../util/actor_messages';
|
||||
import {type MapSourceDataEvent} from '../ui/events';
|
||||
|
||||
function createSource(options, transformCallback?, clearTiles = () => {}) {
|
||||
const source = new VectorTileSource('id', options, getMockDispatcher(), options.eventedParent);
|
||||
source.onAdd({
|
||||
transform: {showCollisionBoxes: false},
|
||||
_getMapId: () => 1,
|
||||
_requestManager: new RequestManager(transformCallback),
|
||||
style: {
|
||||
tileManagers: {id: {clearTiles}},
|
||||
projection: {
|
||||
get subdivisionGranularity() {
|
||||
return SubdivisionGranularitySetting.noSubdivision;
|
||||
}
|
||||
}
|
||||
},
|
||||
getGlobalState: () => ({}),
|
||||
getPixelRatio() { return 1; },
|
||||
} as any as Map);
|
||||
|
||||
source.on('error', () => { }); // to prevent console log of errors
|
||||
|
||||
return source;
|
||||
}
|
||||
|
||||
describe('VectorTileSource', () => {
|
||||
let server: FakeServer;
|
||||
beforeEach(() => {
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
});
|
||||
|
||||
test('can be constructed from TileJSON', async () => {
|
||||
const source = createSource({
|
||||
minzoom: 1,
|
||||
maxzoom: 10,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
|
||||
await waitForMetadataEvent(source);
|
||||
expect(source.tiles).toEqual(['http://example.com/{z}/{x}/{y}.png']);
|
||||
expect(source.minzoom).toBe(1);
|
||||
expect(source.maxzoom).toBe(10);
|
||||
expect((source as Source).attribution).toBe('MapLibre');
|
||||
});
|
||||
|
||||
test('can be constructed from a TileJSON URL', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await promise;
|
||||
expect(source.tiles).toEqual(['http://example.com/{z}/{x}/{y}.png']);
|
||||
expect(source.minzoom).toBe(1);
|
||||
expect(source.maxzoom).toBe(10);
|
||||
expect((source as Source).attribution).toBe('MapLibre');
|
||||
});
|
||||
|
||||
test('transforms the request for TileJSON URL', () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
const transformSpy = vi.fn().mockImplementation((url) => {
|
||||
return {url};
|
||||
});
|
||||
|
||||
createSource({url: '/source.json'}, transformSpy);
|
||||
server.respond();
|
||||
expect(transformSpy).toHaveBeenCalledWith('/source.json', 'Source');
|
||||
});
|
||||
|
||||
test('can asynchronously transform the request for TileJSON URL', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
const source = createSource({url: '/source.json'}, async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
}));
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await waitForMetadataEvent(source);
|
||||
expect(server.requests[0].url).toBe('/source.json');
|
||||
expect(server.requests[0].requestHeaders.Authorization).toBe('Bearer token');
|
||||
});
|
||||
|
||||
test('fires event with metadata property', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
const source = createSource({url: '/source.json'});
|
||||
const dataEvent = waitForEvent(source, 'data', (e) => e.sourceDataType === 'content');
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await expect(dataEvent).resolves.toBeDefined();
|
||||
});
|
||||
|
||||
test('fires "dataloading" event', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
const evented = new Evented();
|
||||
const dataloadingSpy = vi.fn();
|
||||
evented.on('dataloading', dataloadingSpy);
|
||||
const source = createSource({url: '/source.json', eventedParent: evented});
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await promise;
|
||||
expect(dataloadingSpy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('fires "error" event if TileJSON request fails', async () => {
|
||||
server.respondWith('/source.json', [404, {}, '']);
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
const errorEvent = waitForEvent(source, 'error', (e) => e.error.status === 404);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await expect(errorEvent).resolves.toBeDefined();
|
||||
expect(source.loaded()).toBe(true);
|
||||
});
|
||||
|
||||
test('serialize URL', () => {
|
||||
const source = createSource({
|
||||
url: 'http://localhost:2900/source.json'
|
||||
});
|
||||
expect(source.serialize()).toEqual({
|
||||
type: 'vector',
|
||||
url: 'http://localhost:2900/source.json'
|
||||
});
|
||||
});
|
||||
|
||||
test('serialize TileJSON', () => {
|
||||
const source = createSource({
|
||||
minzoom: 1,
|
||||
maxzoom: 10,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
expect(source.serialize()).toEqual({
|
||||
type: 'vector',
|
||||
minzoom: 1,
|
||||
maxzoom: 10,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
});
|
||||
|
||||
function testScheme(scheme, expectedURL) {
|
||||
test(`scheme "${scheme}"`, async () => {
|
||||
const source = createSource({
|
||||
minzoom: 1,
|
||||
maxzoom: 10,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
scheme
|
||||
});
|
||||
|
||||
let receivedMessage: ActorMessage<MessageType> = null;
|
||||
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(message) {
|
||||
receivedMessage = message;
|
||||
return Promise.resolve({});
|
||||
}
|
||||
});
|
||||
|
||||
await waitForMetadataEvent(source);
|
||||
await source.loadTile({
|
||||
loadVectorData() {},
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5)
|
||||
} as any as Tile);
|
||||
|
||||
expect(receivedMessage.type).toBe(MessageType.loadTile);
|
||||
expect(expectedURL).toBe((receivedMessage.data as WorkerTileParameters).request.url);
|
||||
});
|
||||
}
|
||||
|
||||
testScheme('xyz', 'http://example.com/10/5/5.png');
|
||||
testScheme('tms', 'http://example.com/10/5/1018.png');
|
||||
|
||||
test('transforms tile urls before requesting', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
const transformSpy = vi.spyOn(source.map._requestManager, 'transformRequest');
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData() {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
source.loadTile(tile);
|
||||
expect(transformSpy).toHaveBeenCalledTimes(1);
|
||||
expect(transformSpy).toHaveBeenCalledWith('http://example.com/10/5/5.png', 'Tile');
|
||||
});
|
||||
|
||||
test('can asynchronously transform tile request', async () => {
|
||||
const source = createSource({
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
scheme: 'xyz'
|
||||
}, async (url) => ({
|
||||
url,
|
||||
headers: {Authorization: 'Bearer token'}
|
||||
}));
|
||||
let receivedMessage: ActorMessage<MessageType> = null;
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(message) {
|
||||
receivedMessage = message;
|
||||
return Promise.resolve({});
|
||||
}
|
||||
});
|
||||
await waitForMetadataEvent(source);
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData() {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
await source.loadTile(tile);
|
||||
expect((receivedMessage.data as WorkerTileParameters).request.url).toBe('http://example.com/10/5/5.png');
|
||||
expect((receivedMessage.data as WorkerTileParameters).request.headers.Authorization).toBe('Bearer token');
|
||||
});
|
||||
|
||||
test('loads a tile even in case of 404', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(_message) {
|
||||
const error = new Error();
|
||||
(error as any).status = 404;
|
||||
return Promise.reject(error);
|
||||
}
|
||||
});
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData: vi.fn(),
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
await source.loadTile(tile);
|
||||
expect(tile.loadVectorData).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('does not load a tile in case of error', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
async sendAsync(_message) {
|
||||
throw new Error('Error');
|
||||
}
|
||||
});
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData: vi.fn(),
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
await expect(source.loadTile(tile)).rejects.toThrow('Error');
|
||||
expect(tile.loadVectorData).toHaveBeenCalledTimes(0);
|
||||
});
|
||||
|
||||
test('loads an empty tile received from worker', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify(fixturesSource));
|
||||
|
||||
const source = createSource({url: '/source.json'});
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(_message) {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
});
|
||||
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
await promise;
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData: vi.fn(),
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
await source.loadTile(tile);
|
||||
expect(tile.loadVectorData).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
test('reloads a loading tile properly', async () => {
|
||||
const source = createSource({
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
const events = [];
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(message) {
|
||||
events.push(message.type);
|
||||
return Promise.resolve({});
|
||||
}
|
||||
});
|
||||
|
||||
await waitForMetadataEvent(source);
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData () {
|
||||
this.state = 'loaded';
|
||||
events.push('tileLoaded');
|
||||
},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
const initialLoadPromise = source.loadTile(tile);
|
||||
|
||||
expect(tile.state).toBe('loading');
|
||||
await source.loadTile(tile);
|
||||
expect(events).toEqual([MessageType.loadTile, 'tileLoaded', MessageType.reloadTile, 'tileLoaded']);
|
||||
await expect(initialLoadPromise).resolves.toStrictEqual({});
|
||||
});
|
||||
|
||||
test('respects TileJSON.bounds', async () => {
|
||||
const source = createSource({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
});
|
||||
await waitForMetadataEvent(source);
|
||||
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 96, 132))).toBeFalsy();
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 95, 132))).toBeTruthy();
|
||||
});
|
||||
|
||||
test('does not error on invalid bounds', async () => {
|
||||
const source = createSource({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, 91]
|
||||
});
|
||||
|
||||
await waitForMetadataEvent(source);
|
||||
expect(source.tileBounds.bounds).toEqual({_sw: {lng: -47, lat: -7}, _ne: {lng: -45, lat: 90}});
|
||||
});
|
||||
|
||||
test('respects TileJSON.bounds when loaded from TileJSON', async () => {
|
||||
server.respondWith('/source.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
bounds: [-47, -7, -45, -5]
|
||||
}));
|
||||
const source = createSource({url: '/source.json'});
|
||||
|
||||
const promise = waitForMetadataEvent(source);
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await promise;
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 96, 132))).toBeFalsy();
|
||||
expect(source.hasTile(new OverscaledTileID(8, 0, 8, 95, 132))).toBeTruthy();
|
||||
});
|
||||
|
||||
test('respects collectResourceTiming parameter on source', async () => {
|
||||
const source = createSource({
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png'],
|
||||
collectResourceTiming: true
|
||||
});
|
||||
let receivedMessage = null;
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(message) {
|
||||
receivedMessage = message;
|
||||
|
||||
// do nothing for cache size check dispatch
|
||||
source.dispatcher = getMockDispatcher();
|
||||
|
||||
return Promise.resolve({});
|
||||
}
|
||||
});
|
||||
|
||||
await waitForMetadataEvent(source);
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
loadVectorData() {},
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
await source.loadTile(tile);
|
||||
|
||||
expect((receivedMessage.data as WorkerTileParameters).request.collectResourceTiming).toBeTruthy();
|
||||
});
|
||||
|
||||
test('cancels TileJSON request if removed', async () => {
|
||||
const source = createSource({url: '/source.json'});
|
||||
await sleep(0);
|
||||
source.onRemove();
|
||||
expect((server.lastRequest as any).aborted).toBe(true);
|
||||
});
|
||||
|
||||
test('supports url property updates', async () => {
|
||||
server.respondWith('http://localhost:2900/source2.json', JSON.stringify({
|
||||
minzoom: 0,
|
||||
maxzoom: 22,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.mvt'],
|
||||
}));
|
||||
|
||||
const source = createSource({
|
||||
url: 'http://localhost:2900/source.json'
|
||||
});
|
||||
await sleep(0);
|
||||
const errorHandler = vi.fn();
|
||||
source.on('error', errorHandler);
|
||||
source.setUrl('http://localhost:2900/source2.json');
|
||||
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
|
||||
await waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'metadata');
|
||||
|
||||
expect(server.requests.length).toBe(2);
|
||||
expect(server.requests[0].aborted).toBe(true);
|
||||
expect(source.serialize()).toEqual({
|
||||
type: 'vector',
|
||||
url: 'http://localhost:2900/source2.json'
|
||||
});
|
||||
expect(errorHandler).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('supports tiles property updates', () => {
|
||||
const source = createSource({
|
||||
minzoom: 1,
|
||||
maxzoom: 10,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
source.setTiles(['http://example2.com/{z}/{x}/{y}.png']);
|
||||
expect(source.serialize()).toEqual({
|
||||
type: 'vector',
|
||||
minzoom: 1,
|
||||
maxzoom: 10,
|
||||
attribution: 'MapLibre',
|
||||
tiles: ['http://example2.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
});
|
||||
|
||||
test('setTiles updates tiles without clearing the cache', async () => {
|
||||
const clearTiles = vi.fn();
|
||||
const source = createSource({tiles: ['http://example.com/{z}/{x}/{y}.pbf']}, undefined, clearTiles);
|
||||
source.setTiles(['http://example2.com/{z}/{x}/{y}.pbf']);
|
||||
const e: MapSourceDataEvent = await waitForEvent(source, 'data', (e: MapSourceDataEvent) => e.sourceDataType === 'content');
|
||||
expect(e.sourceDataChanged).toBe(true);
|
||||
expect(clearTiles).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('returns early after worker response if tile was aborted', async () => {
|
||||
const source = createSource({
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
await waitForMetadataEvent(source);
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
aborted: false,
|
||||
etag: undefined,
|
||||
loadVectorData: vi.fn(),
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(_message, _abortController) {
|
||||
tile.aborted = true;
|
||||
return Promise.resolve({etag: 'test'} as any);
|
||||
}
|
||||
});
|
||||
|
||||
const result = await source.loadTile(tile);
|
||||
expect(result).toBeUndefined();
|
||||
expect(tile.loadVectorData).toHaveBeenCalledTimes(0);
|
||||
expect(tile.etag).toBeUndefined();
|
||||
});
|
||||
|
||||
test('stores worker etag on tile when present', async () => {
|
||||
const source = createSource({
|
||||
tiles: ['http://example.com/{z}/{x}/{y}.png']
|
||||
});
|
||||
|
||||
source.dispatcher = getWrapDispatcher()({
|
||||
sendAsync(_message) {
|
||||
return Promise.resolve({etag: 'test'} as any);
|
||||
}
|
||||
});
|
||||
await waitForMetadataEvent(source);
|
||||
|
||||
const tile = {
|
||||
tileID: new OverscaledTileID(10, 0, 10, 5, 5),
|
||||
state: 'loading',
|
||||
aborted: false,
|
||||
etag: undefined,
|
||||
loadVectorData: vi.fn(),
|
||||
setExpiryData() {}
|
||||
} as any as Tile;
|
||||
|
||||
await source.loadTile(tile);
|
||||
expect(tile.etag).toBe('test');
|
||||
});
|
||||
});
|
||||
326
node_modules/maplibre-gl/src/source/vector_tile_source.ts
generated
vendored
Normal file
326
node_modules/maplibre-gl/src/source/vector_tile_source.ts
generated
vendored
Normal file
@@ -0,0 +1,326 @@
|
||||
import {Event, ErrorEvent, Evented} from '../util/evented';
|
||||
|
||||
import {extend, pick} from '../util/util';
|
||||
import {loadTileJson} from './load_tilejson';
|
||||
import {TileBounds} from '../tile/tile_bounds';
|
||||
import {ResourceType} from '../util/request_manager';
|
||||
import {MessageType} from '../util/actor_messages';
|
||||
import {isAbortError} from '../util/abort_error';
|
||||
|
||||
import type {Source} from './source';
|
||||
import type {OverscaledTileID} from '../tile/tile_id';
|
||||
import type {Map} from '../ui/map';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {Tile} from '../tile/tile';
|
||||
import type {VectorSourceSpecification, PromoteIdSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {WorkerTileParameters, OverzoomParameters, WorkerTileResult} from './worker_source';
|
||||
|
||||
export type VectorTileSourceOptions = VectorSourceSpecification & {
|
||||
collectResourceTiming?: boolean;
|
||||
tileSize?: number;
|
||||
};
|
||||
|
||||
export type LoadTileResult = {
|
||||
/**
|
||||
* Indicates that the tile requested was not modified.
|
||||
*/
|
||||
unmodified?: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* A source containing vector tiles in [Maplibre Vector Tile format](https://maplibre.org/maplibre-tile-spec/) or [Mapbox Vector Tile format](https://docs.mapbox.com/vector-tiles/reference/).
|
||||
* (See the [Style Specification](https://maplibre.org/maplibre-style-spec/) for detailed documentation of options.)
|
||||
*
|
||||
* @group Sources
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.addSource('some id', {
|
||||
* type: 'vector',
|
||||
* url: 'https://demotiles.maplibre.org/tiles/tiles.json'
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.addSource('some id', {
|
||||
* type: 'vector',
|
||||
* tiles: ['https://d25uarhxywzl1j.cloudfront.net/v0.1/{z}/{x}/{y}.mvt'],
|
||||
* minzoom: 6,
|
||||
* maxzoom: 14
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.getSource('some id').setUrl("https://demotiles.maplibre.org/tiles/tiles.json");
|
||||
* ```
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* map.getSource('some id').setTiles(['https://d25uarhxywzl1j.cloudfront.net/v0.1/{z}/{x}/{y}.mvt']);
|
||||
* ```
|
||||
* @see [Add a vector tile source](https://maplibre.org/maplibre-gl-js/docs/examples/add-a-vector-tile-source/)
|
||||
*/
|
||||
export class VectorTileSource extends Evented implements Source {
|
||||
type: 'vector';
|
||||
id: string;
|
||||
minzoom: number;
|
||||
maxzoom: number;
|
||||
url: string;
|
||||
scheme: string;
|
||||
encoding: string;
|
||||
tileSize: number;
|
||||
promoteId: PromoteIdSpecification;
|
||||
|
||||
_options: VectorSourceSpecification;
|
||||
_collectResourceTiming: boolean;
|
||||
dispatcher: Dispatcher;
|
||||
map: Map;
|
||||
bounds: [number, number, number, number];
|
||||
tiles: Array<string>;
|
||||
tileBounds: TileBounds;
|
||||
reparseOverscaled: boolean;
|
||||
isTileClipped: boolean;
|
||||
_tileJSONRequest: AbortController;
|
||||
_loaded: boolean;
|
||||
|
||||
constructor(id: string, options: VectorTileSourceOptions, dispatcher: Dispatcher, eventedParent: Evented) {
|
||||
super();
|
||||
this.id = id;
|
||||
this.dispatcher = dispatcher;
|
||||
|
||||
this.type = 'vector';
|
||||
this.minzoom = 0;
|
||||
this.maxzoom = 22;
|
||||
this.scheme = 'xyz';
|
||||
this.tileSize = 512;
|
||||
this.reparseOverscaled = true;
|
||||
this.isTileClipped = true;
|
||||
this._loaded = false;
|
||||
|
||||
extend(this, pick(options, ['url', 'scheme', 'tileSize', 'promoteId', 'encoding']));
|
||||
this._options = extend({type: 'vector'}, options);
|
||||
|
||||
this._collectResourceTiming = options.collectResourceTiming;
|
||||
|
||||
if (this.tileSize !== 512) {
|
||||
throw new Error('vector tile sources must have a tileSize of 512');
|
||||
}
|
||||
|
||||
this.setEventedParent(eventedParent);
|
||||
}
|
||||
|
||||
async load(sourceDataChanged: boolean = false) {
|
||||
this._loaded = false;
|
||||
this.fire(new Event('dataloading', {dataType: 'source'}));
|
||||
this._tileJSONRequest = new AbortController();
|
||||
try {
|
||||
const tileJSON = await loadTileJson(this._options, this.map._requestManager, this._tileJSONRequest, this.map._ownerWindow);
|
||||
this._tileJSONRequest = null;
|
||||
this._loaded = true;
|
||||
if (tileJSON) {
|
||||
extend(this, tileJSON);
|
||||
if (tileJSON.bounds) this.tileBounds = new TileBounds(tileJSON.bounds, this.minzoom, this.maxzoom);
|
||||
|
||||
// `content` is included here to prevent a race condition where `Style._updateSources` is called
|
||||
// before the TileJSON arrives. this makes sure the tiles needed are loaded once TileJSON arrives
|
||||
// ref: https://github.com/mapbox/mapbox-gl-js/pull/4347#discussion_r104418088
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'metadata'}));
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'content', sourceDataChanged}));
|
||||
}
|
||||
} catch (err) {
|
||||
this._tileJSONRequest = null;
|
||||
this._loaded = true; // let's pretend it's loaded so the source will be ignored
|
||||
|
||||
// only fire error event if it is not due to aborting the request
|
||||
if (!isAbortError(err)) {
|
||||
this.fire(new ErrorEvent(err));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loaded(): boolean {
|
||||
return this._loaded;
|
||||
}
|
||||
|
||||
hasTile(tileID: OverscaledTileID) {
|
||||
return !this.tileBounds || this.tileBounds.contains(tileID.canonical);
|
||||
}
|
||||
|
||||
onAdd(map: Map) {
|
||||
this.map = map;
|
||||
this.load();
|
||||
}
|
||||
|
||||
setSourceProperty(callback: Function) {
|
||||
if (this._tileJSONRequest) {
|
||||
this._tileJSONRequest.abort();
|
||||
}
|
||||
|
||||
callback();
|
||||
|
||||
this.load(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source `tiles` property and re-renders the map.
|
||||
*
|
||||
* @param tiles - An array of one or more tile source URLs, as in the TileJSON spec.
|
||||
*/
|
||||
setTiles(tiles: Array<string>): this {
|
||||
this.setSourceProperty(() => {
|
||||
this._options.tiles = tiles;
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the source `url` property and re-renders the map.
|
||||
*
|
||||
* @param url - A URL to a TileJSON resource. Supported protocols are `http:` and `https:`.
|
||||
*/
|
||||
setUrl(url: string): this {
|
||||
this.setSourceProperty(() => {
|
||||
this.url = url;
|
||||
this._options.url = url;
|
||||
});
|
||||
|
||||
return this;
|
||||
}
|
||||
|
||||
onRemove() {
|
||||
if (this._tileJSONRequest) {
|
||||
this._tileJSONRequest.abort();
|
||||
this._tileJSONRequest = null;
|
||||
}
|
||||
}
|
||||
|
||||
serialize(): VectorSourceSpecification {
|
||||
return extend({}, this._options);
|
||||
}
|
||||
|
||||
async loadTile(tile: Tile): Promise<LoadTileResult | void> {
|
||||
const url = tile.tileID.canonical.url(this.tiles, this.map.getPixelRatio(), this.scheme);
|
||||
const params: WorkerTileParameters = {
|
||||
request: await this.map._requestManager.transformRequest(url, ResourceType.Tile),
|
||||
uid: tile.uid,
|
||||
tileID: tile.tileID,
|
||||
zoom: tile.tileID.overscaledZ,
|
||||
tileSize: this.tileSize * tile.tileID.overscaleFactor(),
|
||||
type: this.type,
|
||||
source: this.id,
|
||||
pixelRatio: this.map.getPixelRatio(),
|
||||
showCollisionBoxes: this.map.showCollisionBoxes,
|
||||
promoteId: this.promoteId,
|
||||
subdivisionGranularity: this.map.style.projection.subdivisionGranularity,
|
||||
encoding: this.encoding,
|
||||
overzoomParameters: await this._getOverzoomParameters(tile),
|
||||
etag: tile.etag
|
||||
};
|
||||
params.request.collectResourceTiming = this._collectResourceTiming;
|
||||
let messageType: MessageType.loadTile | MessageType.reloadTile = MessageType.reloadTile;
|
||||
if (!tile.actor || tile.state === 'expired') {
|
||||
tile.actor = this.dispatcher.getActor();
|
||||
messageType = MessageType.loadTile;
|
||||
} else if (tile.state === 'loading') {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
tile.reloadPromise = {resolve, reject};
|
||||
});
|
||||
}
|
||||
tile.abortController = new AbortController();
|
||||
try {
|
||||
const data = await tile.actor.sendAsync({type: messageType, data: params}, tile.abortController);
|
||||
delete tile.abortController;
|
||||
|
||||
if (tile.aborted) {
|
||||
return;
|
||||
}
|
||||
this._afterTileLoadWorkerResponse(tile, data);
|
||||
|
||||
const result: LoadTileResult = {};
|
||||
if (data?.etagUnmodified) result.unmodified = true;
|
||||
return result;
|
||||
} catch (err) {
|
||||
delete tile.abortController;
|
||||
|
||||
if (tile.aborted) {
|
||||
return;
|
||||
}
|
||||
if (err && err.status !== 404) {
|
||||
throw err;
|
||||
}
|
||||
this._afterTileLoadWorkerResponse(tile, null);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When the requested tile has a higher canonical Z than source maxzoom, pass overzoom parameters so worker can load the
|
||||
* deepest tile at source max zoom to generate sub tiles using geojsonvt for highest performance on vector overscaling
|
||||
*/
|
||||
private async _getOverzoomParameters(tile: Tile): Promise<OverzoomParameters | undefined> {
|
||||
if (tile.tileID.canonical.z <= this.maxzoom) {
|
||||
return undefined;
|
||||
}
|
||||
if (this.map._zoomLevelsToOverscale === undefined) {
|
||||
return undefined;
|
||||
}
|
||||
const maxZoomTileID = tile.tileID.scaledTo(this.maxzoom).canonical;
|
||||
const maxZoomTileUrl = maxZoomTileID.url(this.tiles, this.map.getPixelRatio(), this.scheme);
|
||||
|
||||
return {
|
||||
maxZoomTileID,
|
||||
overzoomRequest: await this.map._requestManager.transformRequest(maxZoomTileUrl, ResourceType.Tile)
|
||||
};
|
||||
}
|
||||
|
||||
private _afterTileLoadWorkerResponse(tile: Tile, data: WorkerTileResult) {
|
||||
if (data?.resourceTiming) {
|
||||
tile.resourceTiming = data.resourceTiming;
|
||||
}
|
||||
|
||||
if (data && this.map._refreshExpiredTiles) {
|
||||
tile.setExpiryData(data);
|
||||
}
|
||||
tile.etag = data?.etag;
|
||||
|
||||
tile.loadVectorData(data, this.map.painter);
|
||||
|
||||
if (tile.reloadPromise) {
|
||||
const reloadPromise = tile.reloadPromise;
|
||||
tile.reloadPromise = null;
|
||||
this.loadTile(tile).then(reloadPromise.resolve).catch(reloadPromise.reject);
|
||||
}
|
||||
}
|
||||
|
||||
async abortTile(tile: Tile): Promise<void> {
|
||||
if (tile.abortController) {
|
||||
tile.abortController.abort();
|
||||
delete tile.abortController;
|
||||
}
|
||||
if (tile.actor) {
|
||||
await tile.actor.sendAsync({
|
||||
type: MessageType.abortTile,
|
||||
data: {uid: tile.uid, type: this.type, source: this.id}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async unloadTile(tile: Tile): Promise<void> {
|
||||
tile.unloadVectorData();
|
||||
if (tile.actor) {
|
||||
await tile.actor.sendAsync({
|
||||
type: MessageType.removeTile,
|
||||
data: {
|
||||
uid: tile.uid,
|
||||
type: this.type,
|
||||
source: this.id}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
hasTransition() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
492
node_modules/maplibre-gl/src/source/vector_tile_worker_source.test.ts
generated
vendored
Normal file
492
node_modules/maplibre-gl/src/source/vector_tile_worker_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,492 @@
|
||||
import {describe, beforeEach, afterEach, test, expect, vi} from 'vitest';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import Protobuf from 'pbf';
|
||||
import {VectorTileWorkerSource} from '../source/vector_tile_worker_source';
|
||||
import {StyleLayerIndex} from '../style/style_layer_index';
|
||||
import {fakeServer, type FakeServer} from 'nise';
|
||||
import {type IActor} from '../util/actor';
|
||||
import {type TileParameters, type WorkerTileParameters, type WorkerTileResult, type WorkerTileWithData} from './worker_source';
|
||||
import {WorkerTile} from './worker_tile';
|
||||
import {setPerformance, sleep} from '../util/test/util';
|
||||
import {ABORT_ERROR} from '../util/abort_error';
|
||||
import {SubdivisionGranularitySetting} from '../render/subdivision_granularity_settings';
|
||||
import {OverscaledTileID, CanonicalTileID} from '../tile/tile_id';
|
||||
import {VectorTile} from '@mapbox/vector-tile';
|
||||
import Point from '@mapbox/point-geometry';
|
||||
|
||||
describe('vector tile worker source', () => {
|
||||
const actor = {sendAsync: () => Promise.resolve({})} as IActor;
|
||||
let server: FakeServer;
|
||||
|
||||
beforeEach(() => {
|
||||
global.fetch = null;
|
||||
server = fakeServer.create();
|
||||
setPerformance();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
server.restore();
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
test('VectorTileWorkerSource.abortTile aborts pending request', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
|
||||
const loadPromise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/abort'}
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
const abortPromise = source.abortTile({
|
||||
source: 'source',
|
||||
uid: 0
|
||||
} as any as TileParameters);
|
||||
|
||||
expect(source.tileState.loading).toEqual({});
|
||||
await expect(abortPromise).resolves.toBeFalsy();
|
||||
await expect(loadPromise).rejects.toThrow(expect.objectContaining({name: ABORT_ERROR}));
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.removeTile removes loaded tile', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
|
||||
source.tileState.loaded = {
|
||||
'0': {} as WorkerTile
|
||||
};
|
||||
|
||||
const res = await source.removeTile({
|
||||
source: 'source',
|
||||
uid: 0
|
||||
} as any as TileParameters);
|
||||
expect(res).toBeUndefined();
|
||||
|
||||
expect(source.tileState.loaded).toEqual({});
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.reloadTile reloads a previously-loaded tile', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
const parse = vi.fn().mockReturnValue(Promise.resolve({} as WorkerTileResult));
|
||||
|
||||
source.tileState.loaded = {
|
||||
'0': {
|
||||
status: 'done',
|
||||
vectorTile: {},
|
||||
parse
|
||||
} as any as WorkerTile
|
||||
};
|
||||
|
||||
const reloadPromise = source.reloadTile({uid: 0} as any as WorkerTileParameters);
|
||||
expect(parse).toHaveBeenCalledTimes(1);
|
||||
await expect(reloadPromise).resolves.toBeTruthy();
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.loadTile reparses tile if the reloadTile has been called during parsing', async () => {
|
||||
const rawTileData = new ArrayBuffer(0);
|
||||
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
type: 'symbol',
|
||||
layout: {
|
||||
'icon-image': 'hello',
|
||||
'text-font': ['StandardFont-Bold'],
|
||||
'text-field': '{name}'
|
||||
}
|
||||
}]);
|
||||
|
||||
const actor = {
|
||||
sendAsync: (message: {type: string; data: unknown}, abortController: AbortController) => {
|
||||
return new Promise((resolve, _reject) => {
|
||||
const res = setTimeout(() => {
|
||||
const response = message.type === 'getImages' ?
|
||||
{'hello': {width: 1, height: 1, data: new Uint8Array([0])}} :
|
||||
{'StandardFont-Bold': {width: 1, height: 1, data: new Uint8Array([0])}};
|
||||
resolve(response);
|
||||
}, 100);
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
clearTimeout(res);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
const source = new VectorTileWorkerSource(actor, layerIndex, ['hello']);
|
||||
source.loadVectorTile = (_params, _rawData) => {
|
||||
return {
|
||||
vectorTile: {
|
||||
layers: {
|
||||
test: {
|
||||
version: 2,
|
||||
name: 'test',
|
||||
extent: 8192,
|
||||
length: 1,
|
||||
feature: (featureIndex: number) => ({
|
||||
extent: 8192,
|
||||
type: 1,
|
||||
id: featureIndex,
|
||||
properties: {
|
||||
name: 'test'
|
||||
},
|
||||
loadGeometry () {
|
||||
return [[new Point(0, 0)]];
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
},
|
||||
rawData: rawTileData
|
||||
};
|
||||
};
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/pbf'}, rawTileData as any);
|
||||
});
|
||||
|
||||
source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf'},
|
||||
subdivisionGranularity: SubdivisionGranularitySetting.noSubdivision,
|
||||
} as any as WorkerTileParameters).then(() => expect(false).toBeTruthy());
|
||||
|
||||
server.respond();
|
||||
|
||||
// allow promise to run
|
||||
await sleep(0);
|
||||
|
||||
const res = await source.reloadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
subdivisionGranularity: SubdivisionGranularitySetting.noSubdivision,
|
||||
} as any as WorkerTileParameters) as WorkerTileWithData;
|
||||
expect(res).toBeDefined();
|
||||
expect(res.rawTileData).toBeDefined();
|
||||
expect(res.rawTileData).toStrictEqual(rawTileData);
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.loadTile reparses tile if reloadTile is called during reparsing', async () => {
|
||||
const rawTileData = new ArrayBuffer(0);
|
||||
const loadVectorData = (_params, _rawData) => {
|
||||
return {
|
||||
vectorTile: new VectorTile(new Protobuf(rawTileData)),
|
||||
rawData: rawTileData
|
||||
};
|
||||
};
|
||||
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
type: 'fill'
|
||||
}]);
|
||||
|
||||
const source = new VectorTileWorkerSource(actor, layerIndex, []);
|
||||
source.loadVectorTile = loadVectorData;
|
||||
|
||||
const parseWorkerTileMock = vi
|
||||
.spyOn(WorkerTile.prototype, 'parse')
|
||||
.mockImplementation(function(_data, _layerIndex, _availableImages, _actor) {
|
||||
this.status = 'parsing';
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(() => resolve({} as WorkerTileResult), 20);
|
||||
});
|
||||
});
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/pbf'}, rawTileData as any);
|
||||
});
|
||||
|
||||
const loadPromise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf'}
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
server.respond();
|
||||
|
||||
// let the promise start
|
||||
await sleep(0);
|
||||
|
||||
const res = await source.reloadTile({
|
||||
source: 'source',
|
||||
uid: '0',
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
} as any as WorkerTileParameters);
|
||||
expect(res).toBeDefined();
|
||||
expect(parseWorkerTileMock).toHaveBeenCalledTimes(2);
|
||||
await expect(loadPromise).resolves.toBeTruthy();
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource loadTile uses _getOverzoomTile when overzoomParameters is provided', async () => {
|
||||
const source = new VectorTileWorkerSource({} as any, new StyleLayerIndex(), []);
|
||||
const mockVectorTile = {layers: {}} as any;
|
||||
|
||||
source.loadVectorTile = vi.fn().mockReturnValue({
|
||||
vectorTile: mockVectorTile,
|
||||
rawData: new ArrayBuffer(0)
|
||||
});
|
||||
|
||||
const getOverzoomTileSpy = vi.spyOn(source as any, '_getOverzoomTile').mockReturnValue({
|
||||
vectorTile: mockVectorTile,
|
||||
rawData: new ArrayBuffer(0)
|
||||
});
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/pbf'}, new ArrayBuffer(0) as any);
|
||||
});
|
||||
|
||||
const params = {
|
||||
uid: '1',
|
||||
tileID: new OverscaledTileID(16, 0, 16, 100, 100),
|
||||
source: 'test',
|
||||
overzoomParameters: {
|
||||
maxZoomTileID: new CanonicalTileID(14, 25, 25),
|
||||
overzoomRequest: {url: ''}
|
||||
}
|
||||
} as WorkerTileParameters;
|
||||
|
||||
const promise = source.loadTile(params);
|
||||
server.respond();
|
||||
await promise;
|
||||
|
||||
expect(getOverzoomTileSpy).toHaveBeenCalledWith(params, mockVectorTile);
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.reloadTile does not reparse tiles with no vectorTile data but does call callback', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
const parse = vi.fn();
|
||||
|
||||
source.tileState.loaded = {
|
||||
'0': {
|
||||
status: 'done',
|
||||
parse
|
||||
} as any as WorkerTile
|
||||
};
|
||||
|
||||
await source.reloadTile({uid: 0} as any as WorkerTileParameters);
|
||||
expect(parse).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.loadTile returns null for an empty tile', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
source.loadVectorTile = (_params, _rawData) => null;
|
||||
const parse = vi.fn();
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/pbf'}, 'something...');
|
||||
});
|
||||
|
||||
const promise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf'}
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
server.respond();
|
||||
|
||||
expect(parse).not.toHaveBeenCalled();
|
||||
expect(await promise).toBeNull();
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.returns a good error message when failing to parse a tile', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
const parse = vi.fn();
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/pbf'}, 'something...');
|
||||
});
|
||||
|
||||
const loadTilePromise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf'}
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
server.respond();
|
||||
|
||||
expect(parse).not.toHaveBeenCalled();
|
||||
await expect(loadTilePromise).rejects.toThrowError(/Unable to parse the tile at/);
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.returns a good error message when failing to parse a gzipped tile', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
const parse = vi.fn();
|
||||
|
||||
server.respondWith(new Uint8Array([0x1f, 0x8b]).buffer);
|
||||
|
||||
const loadTilePromise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf'}
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
server.respond();
|
||||
|
||||
expect(parse).not.toHaveBeenCalled();
|
||||
await expect(loadTilePromise).rejects.toThrowError(/gzipped/);
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource provides resource timing information', async () => {
|
||||
const rawTileData = fs.readFileSync(path.join(__dirname, '/../../test/unit/assets/mbsv5-6-18-23.vector.pbf')).buffer.slice(0) as ArrayBuffer;
|
||||
|
||||
const loadVectorData = (_params, _rawData) => {
|
||||
return {
|
||||
vectorTile: new VectorTile(new Protobuf(rawTileData)),
|
||||
rawData: rawTileData,
|
||||
cacheControl: null,
|
||||
expires: null
|
||||
};
|
||||
};
|
||||
|
||||
const exampleResourceTiming = {
|
||||
connectEnd: 473,
|
||||
connectStart: 473,
|
||||
decodedBodySize: 86494,
|
||||
domainLookupEnd: 473,
|
||||
domainLookupStart: 473,
|
||||
duration: 341,
|
||||
encodedBodySize: 52528,
|
||||
entryType: 'resource',
|
||||
fetchStart: 473.5,
|
||||
initiatorType: 'xmlhttprequest',
|
||||
name: 'http://localhost:2900/faketile.pbf',
|
||||
nextHopProtocol: 'http/1.1',
|
||||
redirectEnd: 0,
|
||||
redirectStart: 0,
|
||||
requestStart: 477,
|
||||
responseEnd: 815,
|
||||
responseStart: 672,
|
||||
secureConnectionStart: 0
|
||||
};
|
||||
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
type: 'fill'
|
||||
}]);
|
||||
|
||||
const source = new VectorTileWorkerSource(actor, layerIndex, []);
|
||||
source.loadVectorTile = loadVectorData;
|
||||
|
||||
window.performance.getEntriesByName = vi.fn().mockReturnValue([exampleResourceTiming]);
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/pbf'}, 'ok');
|
||||
});
|
||||
|
||||
const promise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf', collectResourceTiming: true}
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
const res = await promise;
|
||||
|
||||
expect(res.resourceTiming[0]).toEqual(exampleResourceTiming);
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource provides resource timing information (fallback method)', async () => {
|
||||
const rawTileData = fs.readFileSync(path.join(__dirname, '/../../test/unit/assets/mbsv5-6-18-23.vector.pbf')).buffer.slice(0) as ArrayBuffer;
|
||||
|
||||
const loadVectorData = (_params, _rawData) => {
|
||||
return {
|
||||
vectorTile: new VectorTile(new Protobuf(rawTileData)),
|
||||
rawData: rawTileData,
|
||||
cacheControl: null,
|
||||
expires: null
|
||||
};
|
||||
};
|
||||
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
type: 'fill'
|
||||
}]);
|
||||
|
||||
const source = new VectorTileWorkerSource(actor, layerIndex, []);
|
||||
source.loadVectorTile = loadVectorData;
|
||||
|
||||
const sampleMarks = [100, 350];
|
||||
const marks = {};
|
||||
const measures = {};
|
||||
window.performance.getEntriesByName = vi.fn().mockImplementation(name => (measures[name] || []));
|
||||
window.performance.mark = vi.fn().mockImplementation(name => {
|
||||
marks[name] = sampleMarks.shift();
|
||||
return null;
|
||||
});
|
||||
window.performance.measure = vi.fn().mockImplementation((name, start, end) => {
|
||||
measures[name] = measures[name] || [];
|
||||
measures[name].push({
|
||||
duration: marks[end] - marks[start],
|
||||
entryType: 'measure',
|
||||
name,
|
||||
startTime: marks[start]
|
||||
});
|
||||
return null;
|
||||
});
|
||||
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {'Content-Type': 'application/pbf'}, 'ok');
|
||||
});
|
||||
|
||||
const promise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf', collectResourceTiming: true}
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
await sleep(0);
|
||||
server.respond();
|
||||
const res = await promise;
|
||||
|
||||
expect(res.resourceTiming[0]).toEqual(
|
||||
{'duration': 250, 'entryType': 'measure', 'name': 'http://localhost:2900/faketile.pbf', 'startTime': 100}
|
||||
);
|
||||
});
|
||||
|
||||
test('VectorTileWorkerSource.loadTile skips parsing and returns unmodified when e-tags match', async () => {
|
||||
const source = new VectorTileWorkerSource(actor, new StyleLayerIndex(), []);
|
||||
|
||||
source.loadVectorTile = () => {
|
||||
throw new Error('loadVectorTile should not be called when etag matches');
|
||||
};
|
||||
|
||||
const rawTileData = new ArrayBuffer(0);
|
||||
server.respondWith(request => {
|
||||
request.respond(200, {
|
||||
'Content-Type': 'application/pbf',
|
||||
'ETag': '"v1"'
|
||||
}, rawTileData as any);
|
||||
});
|
||||
|
||||
const promise = source.loadTile({
|
||||
source: 'source',
|
||||
uid: 0,
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0, w: 0}},
|
||||
request: {url: 'http://localhost:2900/faketile.pbf'},
|
||||
etag: '"v1"',
|
||||
subdivisionGranularity: SubdivisionGranularitySetting.noSubdivision,
|
||||
} as any as WorkerTileParameters);
|
||||
|
||||
server.respond();
|
||||
const res = await promise;
|
||||
|
||||
expect(res).toBeDefined();
|
||||
expect(res.etagUnmodified).toBe(true);
|
||||
});
|
||||
});
|
||||
239
node_modules/maplibre-gl/src/source/vector_tile_worker_source.ts
generated
vendored
Normal file
239
node_modules/maplibre-gl/src/source/vector_tile_worker_source.ts
generated
vendored
Normal file
@@ -0,0 +1,239 @@
|
||||
import Protobuf from 'pbf';
|
||||
import {VectorTile} from '@mapbox/vector-tile';
|
||||
import {type ExpiryData, getArrayBuffer} from '../util/ajax';
|
||||
import {WorkerTile} from './worker_tile';
|
||||
import {WorkerTileState, type ParsingState} from './worker_tile_state';
|
||||
import {BoundedLRUCache} from '../tile/tile_cache';
|
||||
import {extend} from '../util/util';
|
||||
import {RequestPerformance} from '../util/request_performance';
|
||||
import {VectorTileOverzoomed, sliceVectorTileLayer, toVirtualVectorTile} from './vector_tile_overzoomed';
|
||||
import {MLTVectorTile} from './vector_tile_mlt';
|
||||
import type {
|
||||
WorkerSource,
|
||||
WorkerTileParameters,
|
||||
TileParameters,
|
||||
WorkerTileResult
|
||||
} from '../source/worker_source';
|
||||
import type {IActor} from '../util/actor';
|
||||
import type {StyleLayer} from '../style/style_layer';
|
||||
import type {StyleLayerIndex} from '../style/style_layer_index';
|
||||
import type {VectorTileLayerLike, VectorTileLike} from '@maplibre/vt-pbf';
|
||||
|
||||
export type LoadVectorTileResult = {
|
||||
vectorTile: VectorTileLike;
|
||||
rawData: ArrayBufferLike;
|
||||
};
|
||||
|
||||
/**
|
||||
* The {@link WorkerSource} implementation that supports {@link VectorTileSource}. This class is
|
||||
* used by vector tile sources to perform tile processing operations in a separate worker thread.
|
||||
*/
|
||||
export class VectorTileWorkerSource implements WorkerSource {
|
||||
actor: IActor;
|
||||
layerIndex: StyleLayerIndex;
|
||||
availableImages: Array<string>;
|
||||
tileState: WorkerTileState;
|
||||
overzoomedTileResultCache: BoundedLRUCache<string, LoadVectorTileResult>;
|
||||
|
||||
constructor(actor: IActor, layerIndex: StyleLayerIndex, availableImages: Array<string>) {
|
||||
this.actor = actor;
|
||||
this.layerIndex = layerIndex;
|
||||
this.availableImages = availableImages;
|
||||
this.tileState = new WorkerTileState();
|
||||
this.overzoomedTileResultCache = new BoundedLRUCache<string, LoadVectorTileResult>(1000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads a vector tile
|
||||
*/
|
||||
loadVectorTile(params: WorkerTileParameters, rawData: ArrayBuffer): LoadVectorTileResult {
|
||||
try {
|
||||
const vectorTile = params.encoding !== 'mlt'
|
||||
? new VectorTile(new Protobuf(rawData))
|
||||
: new MLTVectorTile(rawData);
|
||||
|
||||
return {vectorTile, rawData};
|
||||
} catch (ex) {
|
||||
const bytes = new Uint8Array(rawData);
|
||||
const isGzipped = bytes[0] === 0x1f && bytes[1] === 0x8b;
|
||||
let errorMessage = `Unable to parse the tile at ${params.request.url}, `;
|
||||
if (isGzipped) {
|
||||
errorMessage += 'please make sure the data is not gzipped and that you have configured the relevant header in the server';
|
||||
} else {
|
||||
errorMessage += `got error: ${ex.message}`;
|
||||
}
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.loadTile}.
|
||||
*/
|
||||
async loadTile(params: WorkerTileParameters): Promise<WorkerTileResult | null> {
|
||||
const {uid, overzoomParameters} = params;
|
||||
|
||||
if (overzoomParameters) {
|
||||
params.request = overzoomParameters.overzoomRequest;
|
||||
}
|
||||
|
||||
const timing = this._startRequestTiming(params);
|
||||
const workerTile = new WorkerTile(params);
|
||||
|
||||
this.tileState.startLoading(uid, workerTile);
|
||||
const abortController = new AbortController();
|
||||
workerTile.abort = abortController;
|
||||
try {
|
||||
// Download the tile data from the network.
|
||||
const tileResponse = await getArrayBuffer(params.request, abortController);
|
||||
|
||||
// Tile data hasn't changed (etag support) - return an unmodified result
|
||||
if (params.etag && params.etag === tileResponse.etag) {
|
||||
this.tileState.finishLoading(uid);
|
||||
return this._getEtagUnmodifiedResult(tileResponse, timing);
|
||||
}
|
||||
|
||||
const tileResult = this.loadVectorTile(params, tileResponse.data);
|
||||
this.tileState.finishLoading(uid);
|
||||
if (!tileResult) return null;
|
||||
|
||||
let {vectorTile, rawData} = tileResult;
|
||||
if (overzoomParameters) {
|
||||
({vectorTile, rawData} = this._getOverzoomTile(params, vectorTile));
|
||||
}
|
||||
|
||||
const cacheControl = this._getExpiryData(tileResponse);
|
||||
const resourceTiming = this._finishRequestTiming(timing);
|
||||
|
||||
workerTile.vectorTile = vectorTile;
|
||||
this.tileState.markLoaded(uid, workerTile);
|
||||
|
||||
const parseState = {rawData, cacheControl, resourceTiming}; // Keep data so reloadTile can access if parse is canceled.
|
||||
this.tileState.setParsing(uid, parseState);
|
||||
try {
|
||||
return await this._parseWorkerTile(workerTile, params, parseState);
|
||||
} finally {
|
||||
this.tileState.clearParsing(uid);
|
||||
}
|
||||
} catch (err) {
|
||||
this.tileState.finishLoading(uid);
|
||||
workerTile.status = 'done';
|
||||
this.tileState.markLoaded(uid, workerTile);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
_getEtagUnmodifiedResult(response: ExpiryData, timing: RequestPerformance): WorkerTileResult {
|
||||
const cacheControl = this._getExpiryData(response);
|
||||
const resourceTiming = this._finishRequestTiming(timing);
|
||||
return extend({etagUnmodified: true as const}, cacheControl, resourceTiming);
|
||||
}
|
||||
|
||||
async _parseWorkerTile(workerTile: WorkerTile, params: WorkerTileParameters, parseState?: ParsingState): Promise<WorkerTileResult> {
|
||||
let result = await workerTile.parse(workerTile.vectorTile, this.layerIndex, this.availableImages, this.actor, params.subdivisionGranularity);
|
||||
|
||||
if (parseState) {
|
||||
const {rawData, cacheControl, resourceTiming} = parseState;
|
||||
// Transferring a copy of rawTileData because the worker needs to retain its copy.
|
||||
result = extend({rawTileData: rawData.slice(0), encoding: params.encoding}, result, cacheControl, resourceTiming);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
_getExpiryData({expires, cacheControl, etag}: ExpiryData): ExpiryData {
|
||||
const data: ExpiryData = {};
|
||||
if (expires) data.expires = expires;
|
||||
if (cacheControl) data.cacheControl = cacheControl;
|
||||
if (etag) data.etag = etag;
|
||||
return data;
|
||||
}
|
||||
|
||||
_startRequestTiming(params: WorkerTileParameters): RequestPerformance | undefined {
|
||||
if (!params.request?.collectResourceTiming) return;
|
||||
return new RequestPerformance(params.request.url);
|
||||
}
|
||||
|
||||
_finishRequestTiming(timing: RequestPerformance): {resourceTiming?: any} {
|
||||
const timingData = timing?.finish();
|
||||
if (!timingData) return {};
|
||||
|
||||
// it's necessary to eval the result of getEntriesByName() here via parse/stringify
|
||||
// late evaluation in the main thread causes TypeError: illegal invocation
|
||||
return {resourceTiming: JSON.parse(JSON.stringify(timingData))};
|
||||
}
|
||||
|
||||
/**
|
||||
* If we are seeking a tile deeper than the source's max available canonical tile, get the overzoomed tile
|
||||
* @param params - the worker tile parameters
|
||||
* @param maxZoomVectorTile - the original vector tile at the source's max available canonical zoom
|
||||
* @returns the overzoomed tile and its raw data
|
||||
*/
|
||||
private _getOverzoomTile(params: WorkerTileParameters, maxZoomVectorTile: VectorTileLike): LoadVectorTileResult {
|
||||
const {tileID, source, overzoomParameters} = params;
|
||||
const {maxZoomTileID} = overzoomParameters;
|
||||
|
||||
const cacheKey = `${maxZoomTileID.key}_${tileID.key}`;
|
||||
const cachedOverzoomTile = this.overzoomedTileResultCache.get(cacheKey);
|
||||
|
||||
if (cachedOverzoomTile) {
|
||||
return cachedOverzoomTile;
|
||||
}
|
||||
|
||||
const overzoomedVectorTile = new VectorTileOverzoomed();
|
||||
const layerFamilies: Record<string, StyleLayer[][]> = this.layerIndex.familiesBySource[source];
|
||||
|
||||
for (const sourceLayerId in layerFamilies) {
|
||||
const sourceLayer: VectorTileLayerLike = maxZoomVectorTile.layers[sourceLayerId];
|
||||
if (!sourceLayer) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const slicedTileLayer = sliceVectorTileLayer(sourceLayer, maxZoomTileID, tileID.canonical);
|
||||
if (slicedTileLayer.length > 0) {
|
||||
overzoomedVectorTile.addLayer(slicedTileLayer);
|
||||
}
|
||||
}
|
||||
const overzoomedVectorTileResult = toVirtualVectorTile(overzoomedVectorTile);
|
||||
this.overzoomedTileResultCache.set(cacheKey, overzoomedVectorTileResult);
|
||||
|
||||
return overzoomedVectorTileResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.reloadTile}.
|
||||
*/
|
||||
async reloadTile(params: WorkerTileParameters): Promise<WorkerTileResult> {
|
||||
const uid = params.uid;
|
||||
|
||||
const workerTile = this.tileState.getLoaded(uid);
|
||||
if (!workerTile) throw new Error('Should not be trying to reload a tile that was never loaded or has been removed');
|
||||
|
||||
workerTile.showCollisionBoxes = params.showCollisionBoxes;
|
||||
|
||||
if (workerTile.status === 'parsing') {
|
||||
// if we are cancelling the original parse, make sure to pass the rawTileData from the original parse
|
||||
const parseState = this.tileState.consumeParsing(uid);
|
||||
return await this._parseWorkerTile(workerTile, params, parseState);
|
||||
}
|
||||
|
||||
// If there was no vector tile data on the initial load, don't try and reparse the tile.
|
||||
// this seems like a missing case where cache control is lost? see #3309
|
||||
if (workerTile.status === 'done' && workerTile.vectorTile) {
|
||||
return await this._parseWorkerTile(workerTile, params);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.abortTile}.
|
||||
*/
|
||||
async abortTile(params: TileParameters): Promise<void> {
|
||||
this.tileState.abort(params.uid);
|
||||
}
|
||||
|
||||
/**
|
||||
* Implements {@link WorkerSource.removeTile}.
|
||||
*/
|
||||
async removeTile(params: TileParameters): Promise<void> {
|
||||
this.tileState.removeLoaded(params.uid);
|
||||
}
|
||||
}
|
||||
137
node_modules/maplibre-gl/src/source/video_source.test.ts
generated
vendored
Normal file
137
node_modules/maplibre-gl/src/source/video_source.test.ts
generated
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
import {describe, test, expect, vi} from 'vitest';
|
||||
import {VideoSource} from './video_source';
|
||||
import {extend} from '../util/util';
|
||||
import {getMockDispatcher, waitForEvent} from '../util/test/util';
|
||||
|
||||
import type {Coordinates} from './image_source';
|
||||
import {Tile} from '../tile/tile';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {Evented} from '../util/evented';
|
||||
import {type IReadonlyTransform} from '../geo/transform_interface';
|
||||
import {MercatorTransform} from '../geo/projection/mercator_transform';
|
||||
|
||||
class StubMap extends Evented {
|
||||
transform: IReadonlyTransform;
|
||||
style: any;
|
||||
painter: any;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.transform = new MercatorTransform();
|
||||
this.style = {};
|
||||
this.painter = {
|
||||
context: {
|
||||
gl: {
|
||||
texSubImage2D: () => {}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function createSource(options) {
|
||||
const c = options && options.video || window.document.createElement('video');
|
||||
|
||||
options = extend({coordinates: [[0, 0], [1, 0], [1, 1], [0, 1]]}, options);
|
||||
|
||||
const source = new VideoSource('id', options, getMockDispatcher(), options.eventedParent);
|
||||
|
||||
source.video = c;
|
||||
return source;
|
||||
}
|
||||
|
||||
describe('VideoSource', () => {
|
||||
// Attribution File:Volcano Lava Sample.webm: U.S. Geological Survey (USGS), Public domain, via Wikimedia Commons
|
||||
const source = createSource({
|
||||
type: 'video',
|
||||
urls: ['cropped.mp4', 'https://upload.wikimedia.org/wikipedia/commons/2/22/Volcano_Lava_Sample.webm'],
|
||||
coordinates: [
|
||||
[-76.54, 39.18],
|
||||
[-76.52, 39.18],
|
||||
[-76.52, 39.17],
|
||||
[-76.54, 39.17]
|
||||
]
|
||||
});
|
||||
|
||||
test('constructor', () => {
|
||||
expect(source.minzoom).toBe(0);
|
||||
expect(source.maxzoom).toBe(22);
|
||||
expect(source.tileSize).toBe(512);
|
||||
});
|
||||
|
||||
test('sets coordinates', () => {
|
||||
const newCoordinates = [[0, 0], [-1, 0], [-1, -1], [0, -1]] as Coordinates;
|
||||
source.setCoordinates(newCoordinates);
|
||||
const serialized = source.serialize();
|
||||
|
||||
expect(serialized.coordinates).toEqual(newCoordinates);
|
||||
|
||||
});
|
||||
|
||||
//test video retrieval by first supplying the video element directly
|
||||
test('gets video', () => {
|
||||
const el = window.document.createElement('video');
|
||||
// Attribution File:Volcano Lava Sample.webm: U.S. Geological Survey (USGS), Public domain, via Wikimedia Commons
|
||||
const source = createSource({
|
||||
type: 'video',
|
||||
video: el,
|
||||
urls: ['cropped.mp4', 'https://upload.wikimedia.org/wikipedia/commons/2/22/Volcano_Lava_Sample.webm'],
|
||||
coordinates: [
|
||||
[-76.54, 39.18],
|
||||
[-76.52, 39.18],
|
||||
[-76.52, 39.17],
|
||||
[-76.54, 39.17]
|
||||
]
|
||||
});
|
||||
|
||||
expect(source.getVideo()).toBe(el);
|
||||
});
|
||||
|
||||
test('fires idle event on prepare call when there is at least one not loaded tile', async () => {
|
||||
const source = createSource({
|
||||
type: 'video',
|
||||
urls: [],
|
||||
video: {
|
||||
readyState: 2,
|
||||
play: () => {}
|
||||
},
|
||||
coordinates: [
|
||||
[-76.54, 39.18],
|
||||
[-76.52, 39.18],
|
||||
[-76.52, 39.17],
|
||||
[-76.54, 39.17]
|
||||
]
|
||||
});
|
||||
const tile = new Tile(new OverscaledTileID(1, 0, 1, 0, 0), 512);
|
||||
const dataEvent = waitForEvent(source, 'data', (e) => e.dataType === 'source' && e.sourceDataType === 'idle');
|
||||
source.onAdd(new StubMap() as any);
|
||||
|
||||
source.tiles[String(tile.tileID.wrap)] = tile;
|
||||
// assign dummies directly so we don't need to stub the gl things
|
||||
source.texture = {
|
||||
update: () => {},
|
||||
bind: () => {}
|
||||
} as any;
|
||||
source.prepare();
|
||||
await dataEvent;
|
||||
expect(tile.state).toBe('loaded');
|
||||
});
|
||||
|
||||
test('onRemove removes playing listener and pauses video', () => {
|
||||
const video = window.document.createElement('video');
|
||||
const removeListenerSpy = vi.spyOn(video, 'removeEventListener');
|
||||
const pauseSpy = vi.spyOn(video, 'pause');
|
||||
|
||||
const source = createSource({
|
||||
type: 'video',
|
||||
urls: [],
|
||||
coordinates: [[-76.54, 39.18], [-76.52, 39.18], [-76.52, 39.17], [-76.54, 39.17]]
|
||||
});
|
||||
source.video = video;
|
||||
|
||||
source.onRemove();
|
||||
|
||||
expect(removeListenerSpy).toHaveBeenCalledWith('playing', expect.any(Function));
|
||||
expect(pauseSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
204
node_modules/maplibre-gl/src/source/video_source.ts
generated
vendored
Normal file
204
node_modules/maplibre-gl/src/source/video_source.ts
generated
vendored
Normal file
@@ -0,0 +1,204 @@
|
||||
import {getVideo} from '../util/ajax';
|
||||
import {ResourceType} from '../util/request_manager';
|
||||
|
||||
import {ImageSource} from './image_source';
|
||||
import {Texture} from '../render/texture';
|
||||
import {Event, ErrorEvent} from '../util/evented';
|
||||
import {ValidationError} from '@maplibre/maplibre-gl-style-spec';
|
||||
|
||||
import type {Map} from '../ui/map';
|
||||
import type {Dispatcher} from '../util/dispatcher';
|
||||
import type {Evented} from '../util/evented';
|
||||
import type {VideoSourceSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
|
||||
/**
|
||||
* A data source containing video.
|
||||
* (See the [Style Specification](https://maplibre.org/maplibre-style-spec/#sources-video) for detailed documentation of options.)
|
||||
*
|
||||
* @group Sources
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // add to map
|
||||
* map.addSource('some id', {
|
||||
* type: 'video',
|
||||
* url: [
|
||||
* 'https://www.mapbox.com/blog/assets/baltimore-smoke.mp4',
|
||||
* 'https://www.mapbox.com/blog/assets/baltimore-smoke.webm'
|
||||
* ],
|
||||
* coordinates: [
|
||||
* [-76.54, 39.18],
|
||||
* [-76.52, 39.18],
|
||||
* [-76.52, 39.17],
|
||||
* [-76.54, 39.17]
|
||||
* ]
|
||||
* });
|
||||
*
|
||||
* // update
|
||||
* let mySource = map.getSource('some id');
|
||||
* mySource.setCoordinates([
|
||||
* [-76.54335737228394, 39.18579907229748],
|
||||
* [-76.52803659439087, 39.1838364847587],
|
||||
* [-76.5295386314392, 39.17683392507606],
|
||||
* [-76.54520273208618, 39.17876344106642]
|
||||
* ]);
|
||||
*
|
||||
* map.removeSource('some id'); // remove
|
||||
* ```
|
||||
* @see [Add a video](https://maplibre.org/maplibre-gl-js/docs/examples/video-on-a-map/)
|
||||
*
|
||||
* Note that when rendered as a raster layer, the layer's `raster-fade-duration` property will cause the video to fade in.
|
||||
* This happens when playback is started, paused and resumed, or when the video's coordinates are updated. To avoid this behavior,
|
||||
* set the layer's `raster-fade-duration` property to `0`.
|
||||
*/
|
||||
export class VideoSource extends ImageSource {
|
||||
options: VideoSourceSpecification;
|
||||
urls: Array<string>;
|
||||
video: HTMLVideoElement;
|
||||
roundZoom: boolean;
|
||||
|
||||
private _onPlayingHandler = () => {
|
||||
this.map?.triggerRepaint();
|
||||
};
|
||||
|
||||
constructor(id: string, options: VideoSourceSpecification, dispatcher: Dispatcher, eventedParent: Evented) {
|
||||
super(id, options, dispatcher, eventedParent);
|
||||
this.roundZoom = true;
|
||||
this.type = 'video';
|
||||
this.options = options;
|
||||
}
|
||||
|
||||
async load() {
|
||||
this._loaded = false;
|
||||
const options = this.options;
|
||||
|
||||
this.urls = [];
|
||||
for (const url of options.urls) {
|
||||
this.urls.push((await this.map._requestManager.transformRequest(url, ResourceType.Source)).url);
|
||||
}
|
||||
try {
|
||||
const video = await getVideo(this.urls);
|
||||
this._loaded = true;
|
||||
if (!video) {
|
||||
return;
|
||||
}
|
||||
this.video = video;
|
||||
this.video.loop = true;
|
||||
|
||||
// Start repainting when video starts playing. hasTransition() will then return
|
||||
// true to trigger additional frames as long as the videos continues playing.
|
||||
this.video.addEventListener('playing', this._onPlayingHandler);
|
||||
|
||||
if (this.map) {
|
||||
this.video.play();
|
||||
}
|
||||
|
||||
this._finishLoading();
|
||||
} catch (err) {
|
||||
this.fire(new ErrorEvent(err));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Pauses the video.
|
||||
*/
|
||||
pause() {
|
||||
if (this.video) {
|
||||
this.video.pause();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Plays the video.
|
||||
*/
|
||||
play() {
|
||||
if (this.video) {
|
||||
this.video.play();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets playback to a timestamp, in seconds.
|
||||
*/
|
||||
seek(seconds: number) {
|
||||
if (this.video) {
|
||||
const seekableRange = this.video.seekable;
|
||||
if (seconds < seekableRange.start(0) || seconds > seekableRange.end(0)) {
|
||||
this.fire(new ErrorEvent(new ValidationError(`sources.${this.id}`, null, `Playback for this video can be set only between the ${seekableRange.start(0)} and ${seekableRange.end(0)}-second mark.`)));
|
||||
} else this.video.currentTime = seconds;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the HTML `video` element.
|
||||
*
|
||||
* @returns The HTML `video` element.
|
||||
*/
|
||||
getVideo(): HTMLVideoElement {
|
||||
return this.video;
|
||||
}
|
||||
|
||||
onAdd(map: Map) {
|
||||
if (this.map) return;
|
||||
this.map = map;
|
||||
this.load();
|
||||
if (this.video) {
|
||||
this.video.play();
|
||||
this.setCoordinates(this.coordinates);
|
||||
}
|
||||
}
|
||||
|
||||
onRemove() {
|
||||
super.onRemove();
|
||||
if (this.video) {
|
||||
this.video.removeEventListener('playing', this._onPlayingHandler);
|
||||
this.video.pause();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the video's coordinates and re-renders the map.
|
||||
*/
|
||||
prepare(): this {
|
||||
if (Object.keys(this.tiles).length === 0 || this.video.readyState < 2) {
|
||||
return; // not enough data for current position
|
||||
}
|
||||
|
||||
const context = this.map.painter.context;
|
||||
const gl = context.gl;
|
||||
|
||||
if (!this.texture) {
|
||||
this.texture = new Texture(context, this.video, gl.RGBA);
|
||||
this.texture.bind(gl.LINEAR, gl.CLAMP_TO_EDGE);
|
||||
} else if (!this.video.paused) {
|
||||
this.texture.bind(gl.LINEAR, gl.CLAMP_TO_EDGE);
|
||||
gl.texSubImage2D(gl.TEXTURE_2D, 0, 0, 0, gl.RGBA, gl.UNSIGNED_BYTE, this.video);
|
||||
}
|
||||
|
||||
let newTilesLoaded = false;
|
||||
for (const w in this.tiles) {
|
||||
const tile = this.tiles[w];
|
||||
if (tile.state !== 'loaded') {
|
||||
tile.state = 'loaded';
|
||||
tile.texture = this.texture;
|
||||
newTilesLoaded = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (newTilesLoaded) {
|
||||
this.fire(new Event('data', {dataType: 'source', sourceDataType: 'idle', sourceId: this.id}));
|
||||
}
|
||||
}
|
||||
|
||||
serialize(): VideoSourceSpecification {
|
||||
return {
|
||||
type: 'video',
|
||||
urls: this.urls,
|
||||
coordinates: this.coordinates
|
||||
};
|
||||
}
|
||||
|
||||
hasTransition() {
|
||||
return this.video && !this.video.paused;
|
||||
}
|
||||
}
|
||||
150
node_modules/maplibre-gl/src/source/worker.test.ts
generated
vendored
Normal file
150
node_modules/maplibre-gl/src/source/worker.test.ts
generated
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
import {describe, beforeEach, test, expect, vi} from 'vitest';
|
||||
import {fakeServer} from 'nise';
|
||||
import Worker from './worker';
|
||||
import {type LayerSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import {type WorkerGlobalScopeInterface} from '../util/web_worker';
|
||||
import {type CanonicalTileID, type OverscaledTileID} from '../tile/tile_id';
|
||||
import {type WorkerSource, type WorkerTileParameters, type WorkerTileResult} from './worker_source';
|
||||
import {rtlWorkerPlugin} from './rtl_text_plugin_worker';
|
||||
import {type ActorTarget, type IActor} from '../util/actor';
|
||||
import {MessageType} from '../util/actor_messages';
|
||||
|
||||
class WorkerSourceMock implements WorkerSource {
|
||||
availableImages: string[];
|
||||
constructor(private actor: IActor) {}
|
||||
loadTile(_: WorkerTileParameters): Promise<WorkerTileResult> {
|
||||
return this.actor.sendAsync({type: MessageType.loadTile, data: {} as any}, new AbortController());
|
||||
}
|
||||
reloadTile(_: WorkerTileParameters): Promise<WorkerTileResult> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
abortTile(_: WorkerTileParameters): Promise<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
removeTile(_: WorkerTileParameters): Promise<void> {
|
||||
throw new Error('Method not implemented.');
|
||||
}
|
||||
}
|
||||
|
||||
describe('Worker RTLTextPlugin', () => {
|
||||
let worker: Worker;
|
||||
let _self: WorkerGlobalScopeInterface & ActorTarget;
|
||||
|
||||
beforeEach(() => {
|
||||
_self = {
|
||||
addEventListener() {},
|
||||
importScripts() {}
|
||||
} as any;
|
||||
worker = new Worker(_self);
|
||||
global.fetch = null;
|
||||
});
|
||||
|
||||
test('should call setMethods in plugin', () => {
|
||||
const spy = vi.spyOn(rtlWorkerPlugin, 'setMethods').mockImplementation(() => {});
|
||||
|
||||
_self.registerRTLTextPlugin({} as any);
|
||||
|
||||
expect(spy).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
test('should call syncState when rtl message is received', async () => {
|
||||
const syncStateSpy = vi.spyOn(rtlWorkerPlugin, 'syncState').mockImplementation((_, __) => Promise.resolve({} as any));
|
||||
|
||||
await worker.actor.messageHandlers[MessageType.syncRTLPluginState]('', {} as any) as any;
|
||||
|
||||
expect(syncStateSpy).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Worker generic testing', () => {
|
||||
let worker: Worker;
|
||||
let _self: WorkerGlobalScopeInterface & ActorTarget;
|
||||
|
||||
beforeEach(() => {
|
||||
_self = {
|
||||
addEventListener() {}
|
||||
} as any;
|
||||
worker = new Worker(_self);
|
||||
global.fetch = null;
|
||||
});
|
||||
|
||||
test('should validate handlers execution in worker for load tile', async () => {
|
||||
const server = fakeServer.create();
|
||||
const messagePromise = worker.actor.messageHandlers[MessageType.loadTile]('0', {
|
||||
type: 'vector',
|
||||
source: 'source',
|
||||
uid: '0',
|
||||
tileID: {overscaledZ: 0, wrap: 0, canonical: {x: 0, y: 0, z: 0} as CanonicalTileID} as any as OverscaledTileID,
|
||||
request: {url: '/error'}// Sinon fake server gives 404 responses by default
|
||||
} as WorkerTileParameters);
|
||||
server.respond();
|
||||
await expect(messagePromise).rejects.toBeDefined();
|
||||
server.restore();
|
||||
});
|
||||
|
||||
test('isolates different instances\' data', () => {
|
||||
worker.actor.messageHandlers[MessageType.setLayers]('0', [
|
||||
{id: 'one', type: 'circle'} as LayerSpecification
|
||||
]);
|
||||
|
||||
worker.actor.messageHandlers[MessageType.setLayers]('1', [
|
||||
{id: 'one', type: 'circle'} as LayerSpecification,
|
||||
{id: 'two', type: 'circle'} as LayerSpecification
|
||||
]);
|
||||
|
||||
expect(worker.layerIndexes[0]).not.toBe(worker.layerIndexes[1]);
|
||||
});
|
||||
|
||||
test('worker source messages dispatched to the correct map instance', () => {
|
||||
const externalSourceName = 'test';
|
||||
|
||||
const sendAsyncSpy = vi.fn().mockReturnValue(Promise.resolve({} as any));
|
||||
worker.actor.sendAsync = sendAsyncSpy;
|
||||
|
||||
_self.registerWorkerSource(externalSourceName, WorkerSourceMock);
|
||||
|
||||
expect(() => {
|
||||
_self.registerWorkerSource(externalSourceName, WorkerSourceMock);
|
||||
}).toThrow(`Worker source with name "${externalSourceName}" already registered.`);
|
||||
|
||||
worker.actor.messageHandlers[MessageType.loadTile]('999', {type: externalSourceName} as WorkerTileParameters);
|
||||
|
||||
expect(sendAsyncSpy).toHaveBeenCalled();
|
||||
expect(sendAsyncSpy.mock.calls[0][0].type).toBe(MessageType.loadTile);
|
||||
expect(sendAsyncSpy.mock.calls[0][0].targetMapId).toBe('999');
|
||||
expect(sendAsyncSpy.mock.calls[0][1]).toBeDefined();
|
||||
});
|
||||
|
||||
test('Referrer is set', () => {
|
||||
worker.actor.messageHandlers[MessageType.setReferrer]('fakeId', 'myMap');
|
||||
expect(worker.referrer).toBe('myMap');
|
||||
});
|
||||
|
||||
test('calls callback on error', async () => {
|
||||
const server = fakeServer.create();
|
||||
const messagePromise = worker.actor.messageHandlers[MessageType.importScript]('0', '/error');
|
||||
server.respond();
|
||||
await expect(messagePromise).rejects.toBeDefined();
|
||||
server.restore();
|
||||
});
|
||||
|
||||
test('set images', () => {
|
||||
expect(worker.availableImages['0']).toBeUndefined();
|
||||
worker.actor.messageHandlers[MessageType.setImages]('0', ['availableImages']);
|
||||
expect(worker.availableImages['0']).toEqual(['availableImages']);
|
||||
});
|
||||
|
||||
test('clears resources when map is removed', () => {
|
||||
worker.actor.messageHandlers[MessageType.setLayers]('0', []);
|
||||
expect(worker.layerIndexes['0']).toBeDefined();
|
||||
worker.actor.messageHandlers[MessageType.removeMap]('0', undefined);
|
||||
expect(worker.layerIndexes['0']).toBeUndefined();
|
||||
});
|
||||
|
||||
test('propagates global state', () => {
|
||||
const globalState = {key: 'value'};
|
||||
worker.actor.messageHandlers[MessageType.updateGlobalState]('0', globalState);
|
||||
expect(worker.globalStates.get('0')).not.toBe(globalState);
|
||||
expect(worker.globalStates.get('0')).toEqual(globalState);
|
||||
});
|
||||
});
|
||||
288
node_modules/maplibre-gl/src/source/worker.ts
generated
vendored
Normal file
288
node_modules/maplibre-gl/src/source/worker.ts
generated
vendored
Normal file
@@ -0,0 +1,288 @@
|
||||
import {Actor, type ActorTarget, type IActor} from '../util/actor';
|
||||
import {StyleLayerIndex} from '../style/style_layer_index';
|
||||
import {VectorTileWorkerSource} from './vector_tile_worker_source';
|
||||
import {RasterDEMTileWorkerSource} from './raster_dem_tile_worker_source';
|
||||
import {rtlWorkerPlugin, type RTLTextPlugin} from './rtl_text_plugin_worker';
|
||||
import {GeoJSONWorkerSource, type LoadGeoJSONParameters} from './geojson_worker_source';
|
||||
import {isWorker} from '../util/util';
|
||||
import {addProtocol, removeProtocol} from './protocol_crud';
|
||||
import {type PluginState} from './rtl_text_plugin_status';
|
||||
import type {
|
||||
WorkerSource,
|
||||
WorkerSourceConstructor,
|
||||
WorkerTileParameters,
|
||||
WorkerDEMTileParameters,
|
||||
TileParameters
|
||||
} from '../source/worker_source';
|
||||
|
||||
import type {WorkerGlobalScopeInterface} from '../util/web_worker';
|
||||
import type {LayerSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import {
|
||||
MessageType,
|
||||
type ClusterIDAndSource,
|
||||
type GetClusterLeavesParams,
|
||||
type RemoveSourceParams,
|
||||
type UpdateLayersParameters
|
||||
} from '../util/actor_messages';
|
||||
|
||||
/**
|
||||
* The Worker class responsible for background thread related execution
|
||||
*/
|
||||
export default class Worker {
|
||||
self: WorkerGlobalScopeInterface & ActorTarget;
|
||||
actor: Actor;
|
||||
layerIndexes: {[_: string]: StyleLayerIndex};
|
||||
availableImages: {[_: string]: Array<string>};
|
||||
externalWorkerSourceTypes: { [_: string]: WorkerSourceConstructor };
|
||||
/**
|
||||
* This holds a cache for the already created worker source instances.
|
||||
* The cache is build with the following hierarchy:
|
||||
* [mapId][sourceType][sourceName]: worker source instance
|
||||
* sourceType can be 'vector' for example
|
||||
*/
|
||||
workerSources: {
|
||||
[_: string]: {
|
||||
[_: string]: {
|
||||
[_: string]: WorkerSource;
|
||||
};
|
||||
};
|
||||
};
|
||||
/**
|
||||
* This holds a cache for the already created DEM worker source instances.
|
||||
* The cache is build with the following hierarchy:
|
||||
* [mapId][sourceType]: DEM worker source instance
|
||||
* sourceType can be 'raster-dem' for example
|
||||
*/
|
||||
demWorkerSources: {
|
||||
[_: string]: {
|
||||
[_: string]: RasterDEMTileWorkerSource;
|
||||
};
|
||||
};
|
||||
referrer: string;
|
||||
globalStates: Map<string, Record<string, any>>;
|
||||
|
||||
constructor(self: WorkerGlobalScopeInterface & ActorTarget) {
|
||||
this.self = self;
|
||||
this.actor = new Actor(self);
|
||||
|
||||
this.layerIndexes = {};
|
||||
this.availableImages = {};
|
||||
|
||||
this.workerSources = {};
|
||||
this.demWorkerSources = {};
|
||||
this.externalWorkerSourceTypes = {};
|
||||
|
||||
this.globalStates = new Map<string, Record<string, any>>();
|
||||
|
||||
this.self.registerWorkerSource = (name: string, WorkerSource: WorkerSourceConstructor) => {
|
||||
if (this.externalWorkerSourceTypes[name]) {
|
||||
throw new Error(`Worker source with name "${name}" already registered.`);
|
||||
}
|
||||
this.externalWorkerSourceTypes[name] = WorkerSource;
|
||||
};
|
||||
|
||||
this.self.addProtocol = addProtocol;
|
||||
this.self.removeProtocol = removeProtocol;
|
||||
|
||||
// This is invoked by the RTL text plugin when the download via the `importScripts` call has finished, and the code has been parsed.
|
||||
this.self.registerRTLTextPlugin = (rtlTextPlugin: RTLTextPlugin) => {
|
||||
|
||||
rtlWorkerPlugin.setMethods(rtlTextPlugin);
|
||||
};
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.loadDEMTile, (mapId: string, params: WorkerDEMTileParameters) => {
|
||||
return this._getDEMWorkerSource(mapId, params.source).loadTile(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.removeDEMTile, async (mapId: string, params: TileParameters) => {
|
||||
this._getDEMWorkerSource(mapId, params.source).removeTile(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.getClusterExpansionZoom, async (mapId: string, params: ClusterIDAndSource) => {
|
||||
return (this._getWorkerSource(mapId, params.type, params.source) as GeoJSONWorkerSource).getClusterExpansionZoom(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.getClusterChildren, async (mapId: string, params: ClusterIDAndSource) => {
|
||||
return (this._getWorkerSource(mapId, params.type, params.source) as GeoJSONWorkerSource).getClusterChildren(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.getClusterLeaves, async (mapId: string, params: GetClusterLeavesParams) => {
|
||||
return (this._getWorkerSource(mapId, params.type, params.source) as GeoJSONWorkerSource).getClusterLeaves(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.loadData, (mapId: string, params: LoadGeoJSONParameters) => {
|
||||
return (this._getWorkerSource(mapId, params.type, params.source) as GeoJSONWorkerSource).loadData(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.loadTile, (mapId: string, params: WorkerTileParameters) => {
|
||||
return this._getWorkerSource(mapId, params.type, params.source).loadTile(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.reloadTile, (mapId: string, params: WorkerTileParameters) => {
|
||||
return this._getWorkerSource(mapId, params.type, params.source).reloadTile(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.abortTile, (mapId: string, params: TileParameters) => {
|
||||
return this._getWorkerSource(mapId, params.type, params.source).abortTile(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.removeTile, (mapId: string, params: TileParameters) => {
|
||||
return this._getWorkerSource(mapId, params.type, params.source).removeTile(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.removeSource, async (mapId: string, params: RemoveSourceParams) => {
|
||||
if (!this.workerSources[mapId] ||
|
||||
!this.workerSources[mapId][params.type] ||
|
||||
!this.workerSources[mapId][params.type][params.source]) {
|
||||
return;
|
||||
}
|
||||
|
||||
const worker = this.workerSources[mapId][params.type][params.source];
|
||||
delete this.workerSources[mapId][params.type][params.source];
|
||||
|
||||
if (worker.removeSource !== undefined) {
|
||||
worker.removeSource(params);
|
||||
}
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.removeMap, async (mapId: string) => {
|
||||
delete this.layerIndexes[mapId];
|
||||
delete this.availableImages[mapId];
|
||||
delete this.workerSources[mapId];
|
||||
delete this.demWorkerSources[mapId];
|
||||
this.globalStates.delete(mapId);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.setReferrer, async (_mapId: string, params: string) => {
|
||||
this.referrer = params;
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.syncRTLPluginState, (mapId: string, params: PluginState) => {
|
||||
return this._syncRTLPluginState(mapId, params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.importScript, async (_mapId: string, params: string) => {
|
||||
this.self.importScripts(params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.setImages, (mapId: string, params: string[]) => {
|
||||
return this._setImages(mapId, params);
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.updateLayers, async (mapId: string, params: UpdateLayersParameters) => {
|
||||
this._getLayerIndex(mapId).update(params.layers, params.removedIds, this._getGlobalState(mapId));
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.updateGlobalState, async (mapId: string, params: Record<string, any>) => {
|
||||
const globalState = this._getGlobalState(mapId);
|
||||
for (const key in params) {
|
||||
globalState[key] = params[key];
|
||||
}
|
||||
});
|
||||
|
||||
this.actor.registerMessageHandler(MessageType.setLayers, async (mapId: string, params: Array<LayerSpecification>) => {
|
||||
this._getLayerIndex(mapId).replace(params, this._getGlobalState(mapId));
|
||||
});
|
||||
}
|
||||
|
||||
private _getGlobalState(mapId: string): Record<string, any> {
|
||||
let state = this.globalStates.get(mapId);
|
||||
if (!state) {
|
||||
state = {};
|
||||
this.globalStates.set(mapId, state);
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
private async _setImages(mapId: string, images: Array<string>): Promise<void> {
|
||||
this.availableImages[mapId] = images;
|
||||
for (const workerSource in this.workerSources[mapId]) {
|
||||
const ws = this.workerSources[mapId][workerSource];
|
||||
for (const source in ws) {
|
||||
ws[source].availableImages = images;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private async _syncRTLPluginState(mapId: string, incomingState: PluginState): Promise<PluginState> {
|
||||
const state = await rtlWorkerPlugin.syncState(incomingState, this.self.importScripts);
|
||||
return state;
|
||||
}
|
||||
|
||||
private _getAvailableImages(mapId: string) {
|
||||
let availableImages = this.availableImages[mapId];
|
||||
|
||||
if (!availableImages) {
|
||||
availableImages = [];
|
||||
}
|
||||
|
||||
return availableImages;
|
||||
}
|
||||
|
||||
private _getLayerIndex(mapId: string) {
|
||||
let layerIndexes = this.layerIndexes[mapId];
|
||||
if (!layerIndexes) {
|
||||
layerIndexes = this.layerIndexes[mapId] = new StyleLayerIndex();
|
||||
}
|
||||
return layerIndexes;
|
||||
}
|
||||
|
||||
/**
|
||||
* This is basically a lazy initialization of a worker per mapId and sourceType and sourceName
|
||||
* @param mapId - the mapId
|
||||
* @param sourceType - the source type - 'vector' for example
|
||||
* @param sourceName - the source name - 'osm' for example
|
||||
* @returns a new instance or a cached one
|
||||
*/
|
||||
private _getWorkerSource(mapId: string, sourceType: string, sourceName: string): WorkerSource {
|
||||
if (!this.workerSources[mapId])
|
||||
this.workerSources[mapId] = {};
|
||||
if (!this.workerSources[mapId][sourceType])
|
||||
this.workerSources[mapId][sourceType] = {};
|
||||
|
||||
if (!this.workerSources[mapId][sourceType][sourceName]) {
|
||||
// use a wrapped actor so that we can attach a target mapId param
|
||||
// to any messages invoked by the WorkerSource, this is very important when there are multiple maps
|
||||
const actor: IActor = {
|
||||
sendAsync: (message, abortController) => {
|
||||
message.targetMapId = mapId;
|
||||
return this.actor.sendAsync(message, abortController);
|
||||
}
|
||||
};
|
||||
switch (sourceType) {
|
||||
case 'vector':
|
||||
this.workerSources[mapId][sourceType][sourceName] = new VectorTileWorkerSource(actor, this._getLayerIndex(mapId), this._getAvailableImages(mapId));
|
||||
break;
|
||||
case 'geojson':
|
||||
this.workerSources[mapId][sourceType][sourceName] = new GeoJSONWorkerSource(actor, this._getLayerIndex(mapId), this._getAvailableImages(mapId));
|
||||
break;
|
||||
default:
|
||||
this.workerSources[mapId][sourceType][sourceName] = new (this.externalWorkerSourceTypes[sourceType])(actor, this._getLayerIndex(mapId), this._getAvailableImages(mapId));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return this.workerSources[mapId][sourceType][sourceName];
|
||||
}
|
||||
|
||||
/**
|
||||
* This is basically a lazy initialization of a worker per mapId and source
|
||||
* @param mapId - the mapId
|
||||
* @param sourceType - the source type - 'raster-dem' for example
|
||||
* @returns a new instance or a cached one
|
||||
*/
|
||||
private _getDEMWorkerSource(mapId: string, sourceType: string) {
|
||||
if (!this.demWorkerSources[mapId])
|
||||
this.demWorkerSources[mapId] = {};
|
||||
|
||||
if (!this.demWorkerSources[mapId][sourceType]) {
|
||||
this.demWorkerSources[mapId][sourceType] = new RasterDEMTileWorkerSource();
|
||||
}
|
||||
|
||||
return this.demWorkerSources[mapId][sourceType];
|
||||
}
|
||||
}
|
||||
|
||||
if (isWorker(self)) {
|
||||
self.worker = new Worker(self);
|
||||
}
|
||||
145
node_modules/maplibre-gl/src/source/worker_source.ts
generated
vendored
Normal file
145
node_modules/maplibre-gl/src/source/worker_source.ts
generated
vendored
Normal file
@@ -0,0 +1,145 @@
|
||||
import type {ExpiryData, RequestParameters} from '../util/ajax';
|
||||
import type {RGBAImage, AlphaImage} from '../util/image';
|
||||
import type {GlyphPositions} from '../render/glyph_atlas';
|
||||
import type {ImageAtlas} from '../render/image_atlas';
|
||||
import type {CanonicalTileID, OverscaledTileID} from '../tile/tile_id';
|
||||
import type {Bucket} from '../data/bucket';
|
||||
import type {FeatureIndex} from '../data/feature_index';
|
||||
import type {CollisionBoxArray} from '../data/array_types.g';
|
||||
import type {DEMEncoding} from '../data/dem_data';
|
||||
import type {StyleGlyph} from '../style/style_glyph';
|
||||
import type {StyleImage} from '../style/style_image';
|
||||
import type {PromoteIdSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {RemoveSourceParams} from '../util/actor_messages';
|
||||
import type {IActor} from '../util/actor';
|
||||
import type {StyleLayerIndex} from '../style/style_layer_index';
|
||||
import type {SubdivisionGranularitySetting} from '../render/subdivision_granularity_settings';
|
||||
import type {DashEntry} from '../render/line_atlas';
|
||||
|
||||
/**
|
||||
* Parameters to identify a tile
|
||||
*/
|
||||
export type TileParameters = {
|
||||
type: string;
|
||||
source: string;
|
||||
uid: string | number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parameters that are send when requesting to load a tile to the worker
|
||||
*/
|
||||
export type WorkerTileParameters = TileParameters & {
|
||||
tileID: OverscaledTileID;
|
||||
request?: RequestParameters;
|
||||
zoom: number;
|
||||
maxZoom?: number;
|
||||
tileSize: number;
|
||||
promoteId: PromoteIdSpecification;
|
||||
pixelRatio: number;
|
||||
showCollisionBoxes: boolean;
|
||||
collectResourceTiming?: boolean;
|
||||
returnDependencies?: boolean;
|
||||
subdivisionGranularity: SubdivisionGranularitySetting;
|
||||
encoding?: string;
|
||||
/**
|
||||
* Provide this property when the requested tile has a higher canonical Z than source maxzoom.
|
||||
* This allows the worker to know that it needs to overzoom from a source tile.
|
||||
*/
|
||||
overzoomParameters?: OverzoomParameters;
|
||||
etag?: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parameters needed in order to load a tile that is overzoomed from a source tile
|
||||
*/
|
||||
export type OverzoomParameters = {
|
||||
maxZoomTileID: CanonicalTileID;
|
||||
overzoomRequest: RequestParameters;
|
||||
};
|
||||
|
||||
/**
|
||||
* The parameters needed in order to load a DEM tile
|
||||
*/
|
||||
export type WorkerDEMTileParameters = TileParameters & {
|
||||
rawImageData: RGBAImage | ImageBitmap | ImageData;
|
||||
encoding: DEMEncoding;
|
||||
redFactor: number;
|
||||
greenFactor: number;
|
||||
blueFactor: number;
|
||||
baseShift: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* The worker tile's result type
|
||||
*/
|
||||
export type WorkerTileWithData = ExpiryData & {
|
||||
buckets: Array<Bucket>;
|
||||
imageAtlas: ImageAtlas;
|
||||
dashPositions: Record<string, DashEntry>;
|
||||
glyphAtlasImage: AlphaImage;
|
||||
featureIndex: FeatureIndex;
|
||||
collisionBoxArray: CollisionBoxArray;
|
||||
rawTileData?: ArrayBuffer;
|
||||
encoding?: string;
|
||||
resourceTiming?: Array<PerformanceResourceTiming>;
|
||||
// Only used for benchmarking:
|
||||
glyphMap?: {
|
||||
[_: string]: {
|
||||
[_: number]: StyleGlyph;
|
||||
};
|
||||
} | null;
|
||||
iconMap?: {
|
||||
[_: string]: StyleImage;
|
||||
} | null;
|
||||
glyphPositions?: GlyphPositions | null;
|
||||
etagUnmodified?: false;
|
||||
};
|
||||
|
||||
export type WorkerTileWithoutData = ExpiryData & {
|
||||
etagUnmodified: true; // Strict for type narrowing
|
||||
resourceTiming?: Array<PerformanceResourceTiming>;
|
||||
};
|
||||
|
||||
export type WorkerTileResult = WorkerTileWithData | WorkerTileWithoutData;
|
||||
|
||||
/**
|
||||
* This is how the @see {@link WorkerSource} constructor should look like.
|
||||
*/
|
||||
export interface WorkerSourceConstructor {
|
||||
new (actor: IActor, layerIndex: StyleLayerIndex, availableImages: Array<string>): WorkerSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* `WorkerSource` should be implemented by custom source types to provide code that can be run on the WebWorkers.
|
||||
* Each of the methods has a relevant event that triggers it from the main thread with the relevant parameters.
|
||||
* @see {@link Map.addSourceType}
|
||||
*/
|
||||
export interface WorkerSource {
|
||||
availableImages: Array<string>;
|
||||
|
||||
/**
|
||||
* Loads a tile from the given params and parse it into buckets ready to send
|
||||
* back to the main thread for rendering. Should call the callback with:
|
||||
* `{ buckets, featureIndex, collisionIndex, rawTileData}`.
|
||||
*/
|
||||
loadTile(params: WorkerTileParameters): Promise<WorkerTileResult>;
|
||||
/**
|
||||
* Re-parses a tile that has already been loaded. Yields the same data as
|
||||
* {@link WorkerSource.loadTile}.
|
||||
*/
|
||||
reloadTile(params: WorkerTileParameters): Promise<WorkerTileResult>;
|
||||
/**
|
||||
* Aborts loading a tile that is in progress.
|
||||
*/
|
||||
abortTile(params: TileParameters): Promise<void>;
|
||||
/**
|
||||
* Removes this tile from any local caches.
|
||||
*/
|
||||
removeTile(params: TileParameters): Promise<void>;
|
||||
/**
|
||||
* Tells the WorkerSource to abort in-progress tasks and release resources.
|
||||
* The foreground Source is responsible for ensuring that 'removeSource' is
|
||||
* the last message sent to the WorkerSource.
|
||||
*/
|
||||
removeSource?: (params: RemoveSourceParams) => Promise<void>;
|
||||
}
|
||||
356
node_modules/maplibre-gl/src/source/worker_tile.test.ts
generated
vendored
Normal file
356
node_modules/maplibre-gl/src/source/worker_tile.test.ts
generated
vendored
Normal file
@@ -0,0 +1,356 @@
|
||||
import {describe, test, expect, vi} from 'vitest';
|
||||
import {WorkerTile} from '../source/worker_tile';
|
||||
import {type Feature, GeoJSONWrapper, type VectorTileLike} from '@maplibre/vt-pbf';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
import {StyleLayerIndex} from '../style/style_layer_index';
|
||||
import {type WorkerTileParameters, type WorkerTileWithData} from './worker_source';
|
||||
import {SubdivisionGranularitySetting} from '../render/subdivision_granularity_settings';
|
||||
import {type EvaluationParameters} from '../style/evaluation_parameters';
|
||||
import {type PossiblyEvaluated} from '../style/properties';
|
||||
import {Color} from '@maplibre/maplibre-gl-style-spec';
|
||||
import {type CirclePaintProps, type CirclePaintPropsPossiblyEvaluated} from '../style/style_layer/circle_style_layer_properties.g';
|
||||
import {type SymbolLayoutProps, type SymbolLayoutPropsPossiblyEvaluated} from '../style/style_layer/symbol_style_layer_properties.g';
|
||||
import {MessageType} from '../util/actor_messages';
|
||||
|
||||
function createWorkerTile(params?: {globalState?: Record<string, any>}): WorkerTile {
|
||||
return new WorkerTile({
|
||||
uid: '',
|
||||
zoom: 0,
|
||||
maxZoom: 20,
|
||||
tileSize: 512,
|
||||
source: 'source',
|
||||
tileID: new OverscaledTileID(1, 0, 1, 1, 1),
|
||||
overscaling: 1,
|
||||
globalState: params?.globalState
|
||||
} as any as WorkerTileParameters);
|
||||
}
|
||||
|
||||
function createWrapper() {
|
||||
return new GeoJSONWrapper([{
|
||||
type: 1,
|
||||
geometry: [0, 0],
|
||||
tags: {}
|
||||
} as any as Feature]);
|
||||
}
|
||||
|
||||
function createLineWrapper() {
|
||||
return new GeoJSONWrapper([{
|
||||
type: 2,
|
||||
geometry: [[0, 0], [1, 1]],
|
||||
tags: {}
|
||||
} as any as Feature]);
|
||||
}
|
||||
|
||||
describe('worker tile', () => {
|
||||
test('WorkerTile.parse', async () => {
|
||||
const originalWarn = console.warn;
|
||||
console.warn = vi.fn();
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
type: 'circle'
|
||||
}]);
|
||||
|
||||
const tile = createWorkerTile();
|
||||
const result = await tile.parse(createWrapper(), layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision) as WorkerTileWithData;
|
||||
expect(result.buckets[0]).toBeTruthy();
|
||||
console.warn = originalWarn;
|
||||
});
|
||||
|
||||
test('WorkerTile.parse layer with layout property', async () => {
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
type: 'line',
|
||||
layout: {
|
||||
'line-join': 'bevel'
|
||||
}
|
||||
}]);
|
||||
|
||||
const tile = createWorkerTile();
|
||||
const result = await tile.parse(createLineWrapper(), layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision) as WorkerTileWithData;
|
||||
expect(result.buckets[0]).toBeTruthy();
|
||||
expect(result.buckets[0].layers[0].layout._values['line-join'].value.value).toBe('bevel');
|
||||
});
|
||||
|
||||
test('WorkerTile.parse layer with layout property using global-state', async () => {
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
type: 'line',
|
||||
layout: {
|
||||
'line-join': ['global-state', 'test']
|
||||
}
|
||||
}], {test: 'bevel'});
|
||||
|
||||
const tile = createWorkerTile({
|
||||
globalState: {test: 'bevel'}
|
||||
});
|
||||
const result = await tile.parse(createLineWrapper(), layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision) as WorkerTileWithData;
|
||||
expect(result.buckets[0]).toBeTruthy();
|
||||
expect(result.buckets[0].layers[0].layout._values['line-join'].value.value).toBe('bevel');
|
||||
});
|
||||
|
||||
test('WorkerTile.parse layer with paint property using global-state', async () => {
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
type: 'fill-extrusion',
|
||||
paint: {
|
||||
'fill-extrusion-height': ['global-state', 'test']
|
||||
}
|
||||
}], {test: 1});
|
||||
|
||||
const tile = createWorkerTile({
|
||||
globalState: {test: 1}
|
||||
});
|
||||
const result = await tile.parse(createLineWrapper(), layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision) as WorkerTileWithData;
|
||||
expect(result.buckets[0]).toBeTruthy();
|
||||
expect(result.buckets[0].layers[0].paint._values['fill-extrusion-height'].value.value).toBe(1);
|
||||
});
|
||||
|
||||
test('WorkerTile.parse skips hidden layers', async () => {
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test-hidden',
|
||||
source: 'source',
|
||||
type: 'fill',
|
||||
layout: {visibility: 'none'}
|
||||
}]);
|
||||
|
||||
const tile = createWorkerTile();
|
||||
const result = await tile.parse(createWrapper(), layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision) as WorkerTileWithData;
|
||||
expect(result.buckets).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('WorkerTile.parse skips layers without a corresponding source layer', async () => {
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'nonesuch',
|
||||
type: 'fill'
|
||||
}]);
|
||||
|
||||
const tile = createWorkerTile();
|
||||
const result = await tile.parse({layers: {}}, layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision) as WorkerTileWithData;
|
||||
expect(result.buckets).toHaveLength(0);
|
||||
});
|
||||
|
||||
test('WorkerTile.parse warns once when encountering a v1 vector tile layer', async () => {
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
type: 'fill'
|
||||
}]);
|
||||
|
||||
const data = {
|
||||
layers: {
|
||||
test: {
|
||||
version: 1
|
||||
}
|
||||
}
|
||||
} as any as VectorTileLike;
|
||||
|
||||
const spy = vi.spyOn(console, 'warn').mockImplementation(() => {});
|
||||
|
||||
const tile = createWorkerTile();
|
||||
await tile.parse(data, layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision);
|
||||
expect(spy.mock.calls[0][0]).toMatch(/does not use vector tile spec v2/);
|
||||
});
|
||||
|
||||
test('WorkerTile.parse would request all types of dependencies', async () => {
|
||||
const tile = createWorkerTile();
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: '1',
|
||||
type: 'fill',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
paint: {
|
||||
'fill-pattern': 'hello'
|
||||
}
|
||||
}, {
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
type: 'symbol',
|
||||
layout: {
|
||||
'icon-image': 'hello',
|
||||
'text-font': ['StandardFont-Bold'],
|
||||
'text-field': '{name}'
|
||||
}
|
||||
}, {
|
||||
id: 'line-layer',
|
||||
type: 'line',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
paint: {
|
||||
'line-dasharray': ['case', ['has', 'road_type'], ['literal', [2, 1]], ['literal', [1, 2]]]
|
||||
}
|
||||
}]);
|
||||
|
||||
const data = {
|
||||
layers: {
|
||||
test: {
|
||||
version: 2,
|
||||
name: 'test',
|
||||
extent: 8192,
|
||||
length: 1,
|
||||
feature: (featureIndex: number) => ({
|
||||
extent: 8192,
|
||||
type: 1,
|
||||
id: featureIndex,
|
||||
properties: {
|
||||
name: 'test'
|
||||
},
|
||||
loadGeometry () {
|
||||
return [[{x: 0, y: 0}]];
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} as any as VectorTileLike;
|
||||
|
||||
const sendAsync = vi.fn().mockImplementation((message: {type: string; data: any}) => {
|
||||
if (message.type === MessageType.getImages) {
|
||||
return Promise.resolve({'hello': {width: 1, height: 1, data: new Uint8Array([0])}});
|
||||
} else if (message.type === MessageType.getGlyphs) {
|
||||
return Promise.resolve({'StandardFont-Bold': {width: 1, height: 1, data: new Uint8Array([0])}});
|
||||
} else if (message.type === MessageType.getDashes) {
|
||||
return Promise.resolve({
|
||||
'2,1,false': {y: 0, height: 16, width: 256},
|
||||
'1,2,false': {y: 16, height: 16, width: 256}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
const actorMock = {
|
||||
sendAsync
|
||||
};
|
||||
const result = await tile.parse(data, layerIndex, ['hello'], actorMock, SubdivisionGranularitySetting.noSubdivision);
|
||||
expect(result).toBeDefined();
|
||||
expect(sendAsync).toHaveBeenCalledTimes(4); // icons, patterns, glyphs, dashes
|
||||
expect(sendAsync).toHaveBeenCalledWith(expect.objectContaining({type: 'GI', data: expect.objectContaining({'icons': ['hello'], 'type': 'icons'})}), expect.any(Object));
|
||||
expect(sendAsync).toHaveBeenCalledWith(expect.objectContaining({type: 'GI', data: expect.objectContaining({'icons': ['hello'], 'type': 'patterns'})}), expect.any(Object));
|
||||
expect(sendAsync).toHaveBeenCalledWith(expect.objectContaining({type: 'GG', data: expect.objectContaining({'source': 'source', 'type': 'glyphs', 'stacks': {'StandardFont-Bold': [101, 115, 116]}})}), expect.any(Object));
|
||||
expect(sendAsync).toHaveBeenCalledWith(expect.objectContaining({type: 'GDA', data: expect.objectContaining({'dashes': expect.any(Object)})}), expect.any(Object));
|
||||
});
|
||||
|
||||
test('WorkerTile.parse would cancel and only event once on repeated reparsing', async () => {
|
||||
const tile = createWorkerTile();
|
||||
const layerIndex = new StyleLayerIndex([{
|
||||
id: '1',
|
||||
type: 'fill',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
paint: {
|
||||
'fill-pattern': 'hello'
|
||||
}
|
||||
}, {
|
||||
id: 'test',
|
||||
source: 'source',
|
||||
'source-layer': 'test',
|
||||
type: 'symbol',
|
||||
layout: {
|
||||
'icon-image': 'hello',
|
||||
'text-font': ['StandardFont-Bold'],
|
||||
'text-field': '{name}'
|
||||
}
|
||||
}]);
|
||||
|
||||
const data = {
|
||||
layers: {
|
||||
test: {
|
||||
version: 2,
|
||||
name: 'test',
|
||||
extent: 8192,
|
||||
length: 1,
|
||||
feature: (featureIndex: number) => ({
|
||||
extent: 8192,
|
||||
type: 1,
|
||||
id: featureIndex,
|
||||
properties: {
|
||||
name: 'test'
|
||||
},
|
||||
loadGeometry () {
|
||||
return [[{x: 0, y: 0}]];
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} as any as VectorTileLike;
|
||||
|
||||
let cancelCount = 0;
|
||||
const sendAsync = vi.fn().mockImplementation((message: {type: string; data: unknown}, abortController: AbortController) => {
|
||||
return new Promise((resolve, _reject) => {
|
||||
const res = setTimeout(() => {
|
||||
const response = message.type === 'getImages' ?
|
||||
{'hello': {width: 1, height: 1, data: new Uint8Array([0])}} :
|
||||
{'StandardFont-Bold': {width: 1, height: 1, data: new Uint8Array([0])}};
|
||||
resolve(response);
|
||||
}
|
||||
);
|
||||
abortController.signal.addEventListener('abort', () => {
|
||||
cancelCount += 1;
|
||||
clearTimeout(res);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
const actorMock = {
|
||||
sendAsync
|
||||
};
|
||||
tile.parse(data, layerIndex, ['hello'], actorMock, SubdivisionGranularitySetting.noSubdivision).then(() => expect(false).toBeTruthy());
|
||||
tile.parse(data, layerIndex, ['hello'], actorMock, SubdivisionGranularitySetting.noSubdivision).then(() => expect(false).toBeTruthy());
|
||||
const result = await tile.parse(data, layerIndex, ['hello'], actorMock, SubdivisionGranularitySetting.noSubdivision);
|
||||
expect(result).toBeDefined();
|
||||
expect(cancelCount).toBe(6);
|
||||
expect(sendAsync).toHaveBeenCalledTimes(9);
|
||||
expect(sendAsync).toHaveBeenCalledWith(expect.objectContaining({data: expect.objectContaining({'icons': ['hello'], 'type': 'icons'})}), expect.any(Object));
|
||||
expect(sendAsync).toHaveBeenCalledWith(expect.objectContaining({data: expect.objectContaining({'icons': ['hello'], 'type': 'patterns'})}), expect.any(Object));
|
||||
expect(sendAsync).toHaveBeenCalledWith(expect.objectContaining({data: expect.objectContaining({'source': 'source', 'type': 'glyphs', 'stacks': {'StandardFont-Bold': [101, 115, 116]}})}), expect.any(Object));
|
||||
});
|
||||
|
||||
test('WorkerTile.parse passes global-state to layout properties', async () => {
|
||||
const globalState = {} as any;
|
||||
const layerIndex = new StyleLayerIndex([
|
||||
{
|
||||
id: 'layer-id',
|
||||
type: 'symbol',
|
||||
source: 'source',
|
||||
layout: {
|
||||
'text-size': ['global-state', 'size']
|
||||
}
|
||||
}
|
||||
], globalState);
|
||||
|
||||
const tile = createWorkerTile({globalState});
|
||||
globalState.size = 12;
|
||||
await tile.parse(createLineWrapper(), layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision);
|
||||
const layer = layerIndex._layers['layer-id'];
|
||||
layer.recalculate({} as EvaluationParameters, []);
|
||||
const layout = layer.layout as PossiblyEvaluated<SymbolLayoutProps, SymbolLayoutPropsPossiblyEvaluated>;
|
||||
expect(layout.get('text-size').evaluate({} as any, {})).toBe(12);
|
||||
});
|
||||
|
||||
test('WorkerTile.parse passes global-state to paint properties', async () => {
|
||||
const layerIndex = new StyleLayerIndex([
|
||||
{
|
||||
id: 'circle',
|
||||
type: 'circle',
|
||||
source: 'source',
|
||||
paint: {
|
||||
'circle-color': ['global-state', 'color'],
|
||||
'circle-radius': ['global-state', 'radius']
|
||||
}
|
||||
}
|
||||
], {radius: 15, color: '#FF0000'});
|
||||
|
||||
const tile = createWorkerTile({});
|
||||
await tile.parse(createLineWrapper(), layerIndex, [], {} as any, SubdivisionGranularitySetting.noSubdivision);
|
||||
const layer = layerIndex._layers['circle'];
|
||||
layer.recalculate({zoom: 0} as EvaluationParameters, []);
|
||||
const paint = layer.paint as PossiblyEvaluated<CirclePaintProps, CirclePaintPropsPossiblyEvaluated>;
|
||||
expect(paint.get('circle-color').evaluate({} as any, {})).toEqual(new Color(1, 0, 0, 1));
|
||||
expect(paint.get('circle-radius').evaluate({} as any, {})).toBe(15);
|
||||
});
|
||||
});
|
||||
215
node_modules/maplibre-gl/src/source/worker_tile.ts
generated
vendored
Normal file
215
node_modules/maplibre-gl/src/source/worker_tile.ts
generated
vendored
Normal file
@@ -0,0 +1,215 @@
|
||||
import {FeatureIndex} from '../data/feature_index';
|
||||
import {performSymbolLayout} from '../symbol/symbol_layout';
|
||||
import {CollisionBoxArray} from '../data/array_types.g';
|
||||
import {DictionaryCoder} from '../util/dictionary_coder';
|
||||
import {SymbolBucket} from '../data/bucket/symbol_bucket';
|
||||
import {LineBucket} from '../data/bucket/line_bucket';
|
||||
import {FillBucket} from '../data/bucket/fill_bucket';
|
||||
import {FillExtrusionBucket} from '../data/bucket/fill_extrusion_bucket';
|
||||
import {warnOnce, mapObject} from '../util/util';
|
||||
import {ImageAtlas} from '../render/image_atlas';
|
||||
import {GlyphAtlas} from '../render/glyph_atlas';
|
||||
import {EvaluationParameters} from '../style/evaluation_parameters';
|
||||
import {OverscaledTileID} from '../tile/tile_id';
|
||||
|
||||
import type {Bucket} from '../data/bucket';
|
||||
import type {IActor} from '../util/actor';
|
||||
import type {StyleLayer} from '../style/style_layer';
|
||||
import type {StyleLayerIndex} from '../style/style_layer_index';
|
||||
import type {
|
||||
WorkerTileParameters,
|
||||
WorkerTileResult,
|
||||
} from '../source/worker_source';
|
||||
import type {PromoteIdSpecification} from '@maplibre/maplibre-gl-style-spec';
|
||||
import type {VectorTileLike} from '@maplibre/vt-pbf';
|
||||
import {type GetDashesResponse, MessageType, type GetGlyphsResponse, type GetImagesResponse} from '../util/actor_messages';
|
||||
import type {SubdivisionGranularitySetting} from '../render/subdivision_granularity_settings';
|
||||
export class WorkerTile {
|
||||
tileID: OverscaledTileID;
|
||||
uid: string | number;
|
||||
zoom: number;
|
||||
pixelRatio: number;
|
||||
tileSize: number;
|
||||
source: string;
|
||||
promoteId: PromoteIdSpecification;
|
||||
overscaling: number;
|
||||
showCollisionBoxes: boolean;
|
||||
collectResourceTiming: boolean;
|
||||
returnDependencies: boolean;
|
||||
|
||||
status: 'parsing' | 'done';
|
||||
data: VectorTileLike;
|
||||
collisionBoxArray: CollisionBoxArray;
|
||||
|
||||
abort: AbortController;
|
||||
vectorTile: VectorTileLike;
|
||||
inFlightDependencies: AbortController[];
|
||||
|
||||
constructor(params: WorkerTileParameters) {
|
||||
this.tileID = new OverscaledTileID(params.tileID.overscaledZ, params.tileID.wrap, params.tileID.canonical.z, params.tileID.canonical.x, params.tileID.canonical.y);
|
||||
this.uid = params.uid;
|
||||
this.zoom = params.zoom;
|
||||
this.pixelRatio = params.pixelRatio;
|
||||
this.tileSize = params.tileSize;
|
||||
this.source = params.source;
|
||||
this.overscaling = this.tileID.overscaleFactor();
|
||||
this.showCollisionBoxes = params.showCollisionBoxes;
|
||||
this.collectResourceTiming = !!params.collectResourceTiming;
|
||||
this.returnDependencies = !!params.returnDependencies;
|
||||
this.promoteId = params.promoteId;
|
||||
this.inFlightDependencies = [];
|
||||
}
|
||||
|
||||
async parse(data: VectorTileLike, layerIndex: StyleLayerIndex, availableImages: Array<string>, actor: IActor, subdivisionGranularity: SubdivisionGranularitySetting): Promise<WorkerTileResult> {
|
||||
this.status = 'parsing';
|
||||
this.data = data;
|
||||
|
||||
this.collisionBoxArray = new CollisionBoxArray();
|
||||
const sourceLayerCoder = new DictionaryCoder(Object.keys(data.layers).sort());
|
||||
|
||||
const featureIndex = new FeatureIndex(this.tileID, this.promoteId);
|
||||
featureIndex.bucketLayerIDs = [];
|
||||
|
||||
const buckets: {[_: string]: Bucket} = {};
|
||||
|
||||
const options = {
|
||||
featureIndex,
|
||||
iconDependencies: {},
|
||||
patternDependencies: {},
|
||||
glyphDependencies: {},
|
||||
dashDependencies: {},
|
||||
availableImages,
|
||||
subdivisionGranularity
|
||||
};
|
||||
|
||||
const layerFamilies = layerIndex.familiesBySource[this.source];
|
||||
for (const sourceLayerId in layerFamilies) {
|
||||
const sourceLayer = data.layers[sourceLayerId];
|
||||
if (!sourceLayer) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sourceLayer.version === 1) {
|
||||
warnOnce(`Vector tile source "${this.source}" layer "${sourceLayerId}" ` +
|
||||
'does not use vector tile spec v2 and therefore may have some rendering errors.');
|
||||
}
|
||||
|
||||
const sourceLayerIndex = sourceLayerCoder.encode(sourceLayerId);
|
||||
const features = [];
|
||||
for (let index = 0; index < sourceLayer.length; index++) {
|
||||
const feature = sourceLayer.feature(index);
|
||||
const id = featureIndex.getId(feature, sourceLayerId);
|
||||
features.push({feature, id, index, sourceLayerIndex});
|
||||
}
|
||||
|
||||
for (const family of layerFamilies[sourceLayerId]) {
|
||||
const layer = family[0];
|
||||
|
||||
if (layer.source !== this.source) {
|
||||
warnOnce(`layer.source = ${layer.source} does not equal this.source = ${this.source}`);
|
||||
}
|
||||
if (layer.isHidden(this.zoom, true)) continue;
|
||||
recalculateLayers(family, this.zoom, availableImages);
|
||||
|
||||
const bucket = buckets[layer.id] = layer.createBucket({
|
||||
index: featureIndex.bucketLayerIDs.length,
|
||||
layers: family,
|
||||
zoom: this.zoom,
|
||||
pixelRatio: this.pixelRatio,
|
||||
overscaling: this.overscaling,
|
||||
collisionBoxArray: this.collisionBoxArray,
|
||||
sourceLayerIndex,
|
||||
sourceID: this.source
|
||||
});
|
||||
|
||||
bucket.populate(features, options, this.tileID.canonical);
|
||||
featureIndex.bucketLayerIDs.push(family.map((l) => l.id));
|
||||
}
|
||||
}
|
||||
|
||||
// options.glyphDependencies looks like: {"SomeFontName":{"10":true,"32":true}}
|
||||
// this line makes an object like: {"SomeFontName":[10,32]}
|
||||
const stacks: {[_: string]: Array<number>} = mapObject(options.glyphDependencies, (glyphs) => Object.keys(glyphs).map(Number));
|
||||
|
||||
this.inFlightDependencies.forEach((request) => request?.abort());
|
||||
this.inFlightDependencies = [];
|
||||
|
||||
let getGlyphsPromise = Promise.resolve<GetGlyphsResponse>({});
|
||||
if (Object.keys(stacks).length) {
|
||||
const abortController = new AbortController();
|
||||
this.inFlightDependencies.push(abortController);
|
||||
getGlyphsPromise = actor.sendAsync({type: MessageType.getGlyphs, data: {stacks, source: this.source, tileID: this.tileID, type: 'glyphs'}}, abortController);
|
||||
}
|
||||
|
||||
const icons = Object.keys(options.iconDependencies);
|
||||
let getIconsPromise = Promise.resolve<GetImagesResponse>({});
|
||||
if (icons.length) {
|
||||
const abortController = new AbortController();
|
||||
this.inFlightDependencies.push(abortController);
|
||||
getIconsPromise = actor.sendAsync({type: MessageType.getImages, data: {icons, source: this.source, tileID: this.tileID, type: 'icons'}}, abortController);
|
||||
}
|
||||
|
||||
const patterns = Object.keys(options.patternDependencies);
|
||||
let getPatternsPromise = Promise.resolve<GetImagesResponse>({});
|
||||
if (patterns.length) {
|
||||
const abortController = new AbortController();
|
||||
this.inFlightDependencies.push(abortController);
|
||||
getPatternsPromise = actor.sendAsync({type: MessageType.getImages, data: {icons: patterns, source: this.source, tileID: this.tileID, type: 'patterns'}}, abortController);
|
||||
}
|
||||
|
||||
const dashes = options.dashDependencies;
|
||||
let getDashesPromise = Promise.resolve<GetDashesResponse>({} as GetDashesResponse);
|
||||
if (Object.keys(dashes).length) {
|
||||
const abortController = new AbortController();
|
||||
this.inFlightDependencies.push(abortController);
|
||||
getDashesPromise = actor.sendAsync({type: MessageType.getDashes, data: {dashes}}, abortController);
|
||||
}
|
||||
|
||||
const [glyphMap, iconMap, patternMap, dashPositions] = await Promise.all([getGlyphsPromise, getIconsPromise, getPatternsPromise, getDashesPromise]);
|
||||
|
||||
const glyphAtlas = new GlyphAtlas(glyphMap);
|
||||
const imageAtlas = new ImageAtlas(iconMap, patternMap);
|
||||
|
||||
for (const key in buckets) {
|
||||
const bucket = buckets[key];
|
||||
if (bucket instanceof SymbolBucket) {
|
||||
recalculateLayers(bucket.layers, this.zoom, availableImages);
|
||||
performSymbolLayout({
|
||||
bucket,
|
||||
glyphMap,
|
||||
glyphPositions: glyphAtlas.positions,
|
||||
imageMap: iconMap,
|
||||
imagePositions: imageAtlas.iconPositions,
|
||||
showCollisionBoxes: this.showCollisionBoxes,
|
||||
canonical: this.tileID.canonical,
|
||||
subdivisionGranularity: options.subdivisionGranularity
|
||||
});
|
||||
} else if (bucket.hasDependencies && (bucket instanceof FillBucket || bucket instanceof FillExtrusionBucket || bucket instanceof LineBucket)) {
|
||||
recalculateLayers(bucket.layers, this.zoom, availableImages);
|
||||
bucket.addFeatures(options, this.tileID.canonical, imageAtlas.patternPositions, dashPositions);
|
||||
}
|
||||
}
|
||||
|
||||
this.status = 'done';
|
||||
return {
|
||||
buckets: Object.values(buckets).filter(b => !b.isEmpty()),
|
||||
featureIndex,
|
||||
collisionBoxArray: this.collisionBoxArray,
|
||||
glyphAtlasImage: glyphAtlas.image,
|
||||
imageAtlas,
|
||||
dashPositions,
|
||||
// Only used for benchmarking:
|
||||
glyphMap: this.returnDependencies ? glyphMap : null,
|
||||
iconMap: this.returnDependencies ? iconMap : null,
|
||||
glyphPositions: this.returnDependencies ? glyphAtlas.positions : null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
function recalculateLayers(layers: ReadonlyArray<StyleLayer>, zoom: number, availableImages: Array<string>) {
|
||||
// Layers are shared and may have been used by a WorkerTile with a different zoom.
|
||||
const parameters = new EvaluationParameters(zoom);
|
||||
for (const layer of layers) {
|
||||
layer.recalculate(parameters, availableImages);
|
||||
}
|
||||
}
|
||||
62
node_modules/maplibre-gl/src/source/worker_tile_state.ts
generated
vendored
Normal file
62
node_modules/maplibre-gl/src/source/worker_tile_state.ts
generated
vendored
Normal file
@@ -0,0 +1,62 @@
|
||||
import type {WorkerTile} from './worker_tile';
|
||||
import {type ExpiryData} from '../util/ajax';
|
||||
|
||||
export type ParsingState = {
|
||||
rawData: ArrayBufferLike;
|
||||
cacheControl?: ExpiryData;
|
||||
resourceTiming?: any;
|
||||
};
|
||||
|
||||
export class WorkerTileState {
|
||||
loading: Record<string, WorkerTile> = {};
|
||||
loaded: Record<string, WorkerTile> = {};
|
||||
parsing: Record<string, ParsingState> = {};
|
||||
|
||||
startLoading(uid: string | number, tile: WorkerTile): void {
|
||||
this.loading[uid] = tile;
|
||||
}
|
||||
|
||||
finishLoading(uid: string | number): void {
|
||||
delete this.loading[uid];
|
||||
}
|
||||
|
||||
abort(uid: string | number): void {
|
||||
const tile = this.loading[uid];
|
||||
if (!tile?.abort) return;
|
||||
tile.abort.abort();
|
||||
delete this.loading[uid];
|
||||
}
|
||||
|
||||
setParsing(uid: string | number, state: ParsingState): void {
|
||||
this.parsing[uid] = state;
|
||||
}
|
||||
|
||||
consumeParsing(uid: string | number): ParsingState | undefined {
|
||||
const state = this.parsing[uid];
|
||||
if (!state) return undefined;
|
||||
delete this.parsing[uid];
|
||||
return state;
|
||||
}
|
||||
|
||||
clearParsing(uid: string | number): void {
|
||||
delete this.parsing[uid];
|
||||
}
|
||||
|
||||
markLoaded(uid: string | number, tile: WorkerTile): void {
|
||||
this.loaded[uid] = tile;
|
||||
}
|
||||
|
||||
getLoaded(uid: string | number): WorkerTile | undefined {
|
||||
const tile = this.loaded[uid];
|
||||
if (!tile) return undefined;
|
||||
return tile;
|
||||
}
|
||||
|
||||
removeLoaded(uid: string | number): void {
|
||||
delete this.loaded[uid];
|
||||
}
|
||||
|
||||
clearLoaded(): void {
|
||||
this.loaded = {};
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user