diff --git a/docs/docs-sidebar.json b/docs/docs-sidebar.json index 6abb9ad9da..21d756e129 100644 --- a/docs/docs-sidebar.json +++ b/docs/docs-sidebar.json @@ -9,100 +9,6 @@ "upgrade-guide" ] }, - { - "type": "category", - "label": "Developer Guide", - "items": [ - { - "type": "category", - "label": "Basics", - "items": [ - "developer-guide/using-loaders", - "developer-guide/using-writers", - "developer-guide/loader-categories", - "developer-guide/using-worker-loaders", - "developer-guide/using-streaming-loaders", - "developer-guide/node" - ] - }, - { - "type": "category", - "label": "Advanced Topics", - "items": [ - "developer-guide/dependencies", - "developer-guide/creating-loaders-and-writers", - "developer-guide/composite-loaders", - "developer-guide/error-handling", - "developer-guide/dev-env" - ] - }, - { - "type": "category", - "label": "Concepts", - "items": [ - "developer-guide/concepts/javascript-apis", - "developer-guide/concepts/binary-data", - "developer-guide/concepts/streaming", - "developer-guide/concepts/async-iterators", - "developer-guide/concepts/worker-threads" - ] - } - ] - }, - { - "type": "category", - "label": "Tutorials", - "items": [ - { - "type": "category", - "label": "I3S", - "items": [ - "modules/i3s/recipes/building-scene-layer", - "modules/i3s/recipes/attribute-driven-colorization" - ] - } - ] - }, - { - "type": "category", - "label": "Formats", - "items": [ - "formats/README", - "modules/arrow/formats/arrow", - "modules/arrow/formats/geoarrow", - "modules/bson/formats/bson", - "modules/csv/formats/csv", - "modules/pcd/formats/pcd", - "modules/ply/formats/ply", - "modules/json/formats/geojson", - "modules/json/formats/geojson-geometry", - "modules/gltf/formats/glb", - "modules/gltf/formats/gltf", - "modules/geopackage/formats/geopackage", - "modules/kml/formats/kml", - "modules/kml/formats/gpx", - "modules/kml/formats/tcx", - "modules/las/formats/las", - "modules/mvt/formats/mvt", - "modules/mvt/formats/tilejson", - "modules/parquet/formats/parquet", - "modules/parquet/formats/geoparquet", - "modules/pmtiles/formats/pmtiles", - "modules/shapefile/formats/shapefile", - "modules/textures/formats/compressed-textures", - "modules/wms/formats/csw", - "modules/wms/formats/wms", - "modules/wms/formats/wmts", - "modules/wms/formats/wfs", - "modules/wms/formats/gml", - "modules/lerc/formats/lerc", - "modules/wkt/formats/wkt", - "modules/wkt/formats/wkb", - "modules/wkt/formats/wkt-crs", - "modules/xml/formats/xml", - "modules/zip/formats/zip" - ] - }, { "type": "category", "label": "Loader Catalog", @@ -131,6 +37,7 @@ "modules/3d-tiles/api-reference/cesium-ion-loader", "modules/arrow/api-reference/arrow-loader", "modules/arrow/api-reference/arrow-writer", + "modules/arrow/api-reference/geoarrow-loader", "modules/bson/api-reference/bson-loader", "modules/bson/api-reference/bson-writer", "modules/csv/api-reference/csv-loader", @@ -184,6 +91,101 @@ "modules/zip/api-reference/zip-writer" ] }, + { + "type": "category", + "label": "Formats", + "items": [ + "formats/README", + "modules/arrow/formats/arrow", + "modules/arrow/formats/geoarrow", + "modules/bson/formats/bson", + "modules/csv/formats/csv", + "modules/pcd/formats/pcd", + "modules/ply/formats/ply", + "modules/flatgeobuf/formats/flatgeobuf", + "modules/json/formats/geojson", + "modules/json/formats/geojson-geometry", + "modules/gltf/formats/glb", + "modules/gltf/formats/gltf", + "modules/geopackage/formats/geopackage", + "modules/kml/formats/kml", + "modules/kml/formats/gpx", + "modules/kml/formats/tcx", + "modules/las/formats/las", + "modules/mvt/formats/mvt", + "modules/mvt/formats/tilejson", + "modules/parquet/formats/parquet", + "modules/parquet/formats/geoparquet", + "modules/pmtiles/formats/pmtiles", + "modules/shapefile/formats/shapefile", + "modules/textures/formats/compressed-textures", + "modules/wms/formats/csw", + "modules/wms/formats/wms", + "modules/wms/formats/wmts", + "modules/wms/formats/wfs", + "modules/wms/formats/gml", + "modules/lerc/formats/lerc", + "modules/wkt/formats/wkt", + "modules/wkt/formats/wkb", + "modules/wkt/formats/wkt-crs", + "modules/xml/formats/xml", + "modules/zip/formats/zip" + ] + }, + { + "type": "category", + "label": "Developer Guide", + "items": [ + { + "type": "category", + "label": "Basics", + "items": [ + "developer-guide/using-loaders", + "developer-guide/using-writers", + "developer-guide/loader-categories", + "developer-guide/using-worker-loaders", + "developer-guide/using-streaming-loaders", + "developer-guide/node" + ] + }, + { + "type": "category", + "label": "Advanced Topics", + "items": [ + "developer-guide/dependencies", + "developer-guide/creating-loaders-and-writers", + "developer-guide/composite-loaders", + "developer-guide/error-handling", + "developer-guide/dev-env" + ] + }, + { + "type": "category", + "label": "Concepts", + "items": [ + "developer-guide/concepts/javascript-apis", + "developer-guide/concepts/binary-data", + "developer-guide/concepts/streaming", + "developer-guide/concepts/async-iterators", + "developer-guide/concepts/worker-threads" + ] + } + ] + }, + { + "type": "category", + "label": "Tutorials", + "items": [ + { + "type": "category", + "label": "I3S", + "items": [ + "modules/i3s/recipes/building-scene-layer", + "modules/i3s/recipes/attribute-driven-colorization" + ] + } + ] + }, { "type": "category", "label": "Submodule API Reference", diff --git a/docs/modules/arrow/README.md b/docs/modules/arrow/README.md index e2d9afe6b9..f6b65a6de1 100644 --- a/docs/modules/arrow/README.md +++ b/docs/modules/arrow/README.md @@ -18,6 +18,7 @@ npm install @loaders.gl/core @loaders.gl/arrow | -------------------------------------------------------------------- | | [`ArrowLoader`](/docs/modules/arrow/api-reference/arrow-loader) | | [`ArrowWorkerLoader`](/docs/modules/arrow/api-reference/arrow-loader) | +| [`GeoArrowLoader`](/docs/modules/arrow/api-reference/geoarrow-loader) | | Writer | | -------------------------------------------------------------- | diff --git a/docs/modules/arrow/api-reference/arrow-loader.md b/docs/modules/arrow/api-reference/arrow-loader.md index 076fd3d198..99e8498ae0 100644 --- a/docs/modules/arrow/api-reference/arrow-loader.md +++ b/docs/modules/arrow/api-reference/arrow-loader.md @@ -2,8 +2,6 @@ ![arrow-logo](../images/apache-arrow-small.png) -> The Arrow loaders are still under development. - The `ArrowLoader` parses the Apache Arrow columnar table format. | Loader | Characteristic | diff --git a/docs/modules/arrow/api-reference/geoarrow-loader.md b/docs/modules/arrow/api-reference/geoarrow-loader.md new file mode 100644 index 0000000000..0a98b6dab8 --- /dev/null +++ b/docs/modules/arrow/api-reference/geoarrow-loader.md @@ -0,0 +1,33 @@ +# GeoArrowLoader + +![arrow-logo](../images/apache-arrow-small.png) + +

+ From-v4.1 +

+ +The `GeoArrowLoader` parses Apache Arrow columnar table format files, and looks for `GeoArrow` type extensions to parse geometries from the table. + +| Loader | Characteristic | +| --------------------- | ------------------------------------------------------------------------- | +| File Format | [IPC: Encapsulated Message Format](https://arrow.apache.org/docs/format/Columnar.html#serialization-and-interprocess-communication-ipc) | +| Data Format | [Geometry Table](/docs/specifications/category-table) | +| File Extension | `.arrow` | +| File Type | Binary | +| Decoder Type | `load`, `parse`, `parseSync`, `parseInBatches` | +| Worker Thread Support | Yes | +| Streaming Support | Yes | + +## Usage + +```typescript +import {GeoArrowLoader} from '@loaders.gl/arrow'; +import {load} from '@loaders.gl/core'; + +const data = await load(url, GeoArrowLoader, options); +``` + +## Options + +| Option | Type | Default | Description | +| ------ | ---- | ------- | ----------- | diff --git a/docs/modules/arrow/formats/geoarrow.md b/docs/modules/arrow/formats/geoarrow.md index 38ed71f5a7..b79835d986 100644 --- a/docs/modules/arrow/formats/geoarrow.md +++ b/docs/modules/arrow/formats/geoarrow.md @@ -22,22 +22,22 @@ Geospatial tabular data where one or more columns contains feature geometries an Note that GeoArrow is not a separate format from Apache Arrow rather, the GeoArrow specification simply describes additional conventions for metadata and layout of geospatial data. This means that a valid GeoArrow file is always a valid Arrow file. This is done through [Arrow extension type](https://arrow.apache.org/docs/format/Columnar.html#extension-types) definitions that ensure type-level metadata (e.g., CRS) is propagated when used in Arrow implementations. +## Geometry Types + +| Geometry type | Read | Write | Description | +| -------------------------- | ---- | ----- | -------------------- | +| `geoarrow.point` | ✅ | ❌ | | +| `geoarrow.multipoint` | ✅ | ❌ | | +| `geoarrow.linestring` | ✅ | ❌ | | +| `geoarrow.multilinestring` | ✅ | ❌ | | +| `geoarrow.polygon` | ✅ | ❌ | | +| `geoarrow.multipolygon` | ✅ | ❌ | | +| `geoarrow.wkb` | ✅ | ❌ | `WKB` also supported | +| `geoarrow.wkt` | ✅ | ❌ | `WKT` also supported | + ## Relationship with GeoParquet -The [GeoParquet specification](https://github.com/opengeospatial/geoparquet) is closely related to GeoArrow. Notable differences: +The [GeoParquet](/docs/modules/parquet/formats/geoparquet) [specification](https://github.com/opengeospatial/geoparquet) is closely related to GeoArrow. Notable differences: - GeoParquet is a file-level metadata specification - GeoArrow is a field-level metadata and memory layout specification - -## Geometry Types - -| Geometry type | Read | Write | Description | -| -------------------------- | ---- | ----- | ----------- | -| `geoarrow.multipolygon` | ✅ | ❌ | | -| `geoarrow.polygon` | ✅ | ❌ | | -| `geoarrow.multipoint` | ✅ | ❌ | | -| `geoarrow.point` | ✅ | ❌ | | -| `geoarrow.multilinestring` | ✅ | ❌ | | -| `geoarrow.linestring` | ✅ | ❌ | | -| `geoarrow.wkb` | ❌ | ❌ | | -| `geoarrow.wkt` | ❌ | ❌ | | \ No newline at end of file diff --git a/docs/modules/flatgeobuf/api-reference/flatgeobuf-loader.md b/docs/modules/flatgeobuf/api-reference/flatgeobuf-loader.md index 571f9633fe..c67e79d2de 100644 --- a/docs/modules/flatgeobuf/api-reference/flatgeobuf-loader.md +++ b/docs/modules/flatgeobuf/api-reference/flatgeobuf-loader.md @@ -1,4 +1,4 @@ -# FlatGeobufLoader 🚧 +# FlatGeobufLoader

From-v3.1 @@ -6,13 +6,13 @@ BETA

-Loader for the [FlatGeobuf](http://flatgeobuf.org/) format, a binary FlatBuffers-encoded format that defines geospatial geometries. +Loader for the [FlatGeobuf](/docs/modules/flatgeobuf/formats/flatgeobuf) format, a binary FlatBuffers-encoded format that defines geospatial geometries. | Loader | Characteristic | | -------------- | --------------------------------------------------------------- | | File Extension | `.fgb`, | | File Type | Binary | -| File Format | [FlatGeobuf](http://flatgeobuf.org/) | +| File Format | [FlatGeobuf](/docs/modules/flatgeobuf/formats/flatgeobuf) | | Data Format | [Geometry](/docs/specifications/category-gis) | | Supported APIs | `load`, `loadInBatches`, `parse`, `parseSync`, `parseInBatches` | @@ -39,4 +39,4 @@ The parser will return an array of [GeoJSON `features`](https://tools.ietf.org/h ## Attribution -The `FlatGeobufLoader` wraps the [`flatgeobuf`](https://github.com/bjornharrtell/flatgeobuf) NPM module under the ISC license. +The `FlatGeobufLoader` wraps the [`flatgeobuf`](https://github.com/bjornharrtell/flatgeobuf) NPM module which is published under the ISC license. diff --git a/docs/modules/flatgeobuf/formats/flatgeobuf.md b/docs/modules/flatgeobuf/formats/flatgeobuf.md new file mode 100644 index 0000000000..9c1b3ce43f --- /dev/null +++ b/docs/modules/flatgeobuf/formats/flatgeobuf.md @@ -0,0 +1,17 @@ +# FlatGeobuf + +- *[`@loaders.gl/flatgeobuf`](/docs/moodules/flatgeobuf)* +- *[FlatGeobuf](http://flatgeobuf.org/)* + +FlatGeobuf is a binary (FlatBuffers-encoded) format that defines geospatial geometries. It is row-oriented rather than columnar (like GeoParquet and GeoArrow) and offers a different set of trade-offs. + +FlatGeobuf was inspired by [geobuf](https://github.com/mapbox/geobuf) and [flatbush](https://github.com/mourner/flatbush). + +## Characteristics + +- binary +- row oriented +- supports appends, but no random writes + +Goals are to be suitable for large volumes of static data, significantly faster than legacy formats without size limitations for contents or metainformation and to be suitable for streaming/random access. + diff --git a/docs/modules/geopackage/README.md b/docs/modules/geopackage/README.md index 18598c78d7..ddf31b3af7 100644 --- a/docs/modules/geopackage/README.md +++ b/docs/modules/geopackage/README.md @@ -1,4 +1,4 @@ -# Overview 🚧 +# Overview ![ogc-logo](../../images/logos/ogc-logo-60.png) diff --git a/docs/modules/geopackage/api-reference/geopackage-loader.md b/docs/modules/geopackage/api-reference/geopackage-loader.md index dff6dae211..65bef30d5f 100644 --- a/docs/modules/geopackage/api-reference/geopackage-loader.md +++ b/docs/modules/geopackage/api-reference/geopackage-loader.md @@ -1,4 +1,4 @@ -# GeoPackageLoader 🚧 +# GeoPackageLoader ![ogc-logo](../../../images/logos/ogc-logo-60.png) @@ -6,6 +6,10 @@ From-v3.0

+:::caution +The `GeoPackageLoader` depends on the [`sql.js`](https://github.com/sql-js/sql.js) npm module which has caused issues with certain JavaScript bundlers. It is recommended that you do your own tests before using the `GeoPackageLoader` in your project. +::: + GeoPackage loader | Loader | Characteristic | diff --git a/docs/modules/las/README.md b/docs/modules/las/README.md index e783d40f06..1b1f461c55 100644 --- a/docs/modules/las/README.md +++ b/docs/modules/las/README.md @@ -1,6 +1,11 @@ # Overview -The `@loaders.gl/las` module handles the [LASER file format](https://www.asprs.org/divisions-committees/lidar-division/laser-las-file-format-exchange-activities) (LAS) or its compressed version (LAZ), a public format for the interchange of 3-dimensional point cloud data data, developed for LIDAR mapping purposes. +The `@loaders.gl/las` module supports the [LASER file format](/docs/modules/las/formats/las) (LAS) and its compressed version (LAZ). + +:::caution +The `@loaders.gl/las` module only supports LAS/lAZ files up to LAS v1.3. It does not support LAS v1.4 files. +For more detail, see the discussion in [Github Issues](https://github.com/visgl/loaders.gl/issues/591). +::: ## Installation diff --git a/docs/modules/las/api-reference/las-loader.md b/docs/modules/las/api-reference/las-loader.md index 59ed17ee78..557b7f38f5 100644 --- a/docs/modules/las/api-reference/las-loader.md +++ b/docs/modules/las/api-reference/las-loader.md @@ -1,5 +1,10 @@ # LASLoader +:::caution +The `@loaders.gl/las` module only supports LAS/lAZ files up to LAS v1.3. It does not support LAS v1.4 files. +For more detail, see the discussion in [Github Issues](https://github.com/visgl/loaders.gl/issues/591). +::: + The `LASLoader` parses a point cloud in the LASER file format. | Loader | Characteristic | diff --git a/docs/modules/las/formats/las.md b/docs/modules/las/formats/las.md index 1347004b3c..eb3376f018 100644 --- a/docs/modules/las/formats/las.md +++ b/docs/modules/las/formats/las.md @@ -1,3 +1,47 @@ # LAS / LAZ -The `@loaders.gl/las` module handles the [LASER file format](https://www.asprs.org/divisions-committees/lidar-division/laser-las-file-format-exchange-activities) (LAS) or its compressed version (LAZ), a public format for the interchange of 3-dimensional point cloud data data, developed for LIDAR mapping purposes. +- *[`@loaders.gl/las`](/docs/modules/las)* +- *[Wikipedia](https://en.wikipedia.org/wiki/LAS_file_format)* - *[LAS Spec](https://www.loc.gov/preservation/digital/formats/fdd/fdd000418.shtml)* - *[LASER file format](https://www.asprs.org/divisions-committees/lidar-division/laser-las-file-format-exchange-activities)* - *[LASzip project](https://github.com/LASzip/LASzip)* - *[LAZ spec](https://www.cs.unc.edu/~isenburg/lastools/download/laszip.pdf)* + +The *LASER file format* (LAS) and its compressed version (LAZ) are public formats for the interchange of 3-dimensional point cloud data data, developed for LIDAR mapping purposes. + +## Variants + +LAS file format is not compressed. However there is an open source project (LASzip) which defined and implemented the open file format LAZ to losslessly compress LAS data. + +| Variant | Description | +| ------- | --------------------- | +| LAS | Uncompressed | +| LAZ | Lossless compression. | + +## Version History + +| **Version** | **Date** | **loaders.gl
Support** | **Description** | +| ----------- | -------- | --------------------------- | --------------------------------------------------------------- | +| 1.4 | | ❌ | 64 bit support | +| 1.3 | | ✅ | Extended variable length records (EVLR) to hold longer metadata | +| 1.2 | | ✅ | | +| 1.1 | | ✅ | | +| 1.0 | | ✅ | | + +Notes: +- Work on LAS 2.0 was started but was suspended indefinitely. + +## File Structure + +A LAS file consists of sections: + +| Section | Description | +| --------------------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | +| Public header block | Describes format, number of points, extent of the point cloud and other generic data. | +| Variable length records (VLR) | Any number of optional records (up to 64K bytes) to provide various data such as the spatial reference system used, metadata, waveform packet information and user application data. | +| Point data records | Data for each of the individual points in the point cloud, including coordinates, classification (e.g. terrain or building), flight and scan data, etc. | +| Extended variable length records (EVLR) | From v1.3. Similar to VLRs but located after the point data records and allow a much larger data payload per record due to the use of 8-byte size descriptors. | +| Point data records | A LAS file contains point records in one of the point data record formats defined by the LAS specification | + +Notes: +- As of LAS 1.4, there are 11 point data record formats (0 through 10) available. All point data records must be of the same format within the file. The various formats differ in the data fields available, such as GPS time, RGB and NIR color and wave packet information. +- The 3D point coordinates are represented within the point data records by 32-bit integers, to which a scaling and offset defined in the public header must be applied in order to obtain the actual coordinates. +- As the number of bytes used per point data record is explicitly given in the public header block, it is possible to add user-defined fields in "extra bytes" to the fields given by the specification-defined point data record formats. A standardized way of interpreting such extra bytes was introduced in the LAS 1.4 specification, in the form of a specific EVLR. + + diff --git a/docs/modules/mvt/api-reference/tilejson-loader.md b/docs/modules/mvt/api-reference/tilejson-loader.md index 928525e262..7c270088b9 100644 --- a/docs/modules/mvt/api-reference/tilejson-loader.md +++ b/docs/modules/mvt/api-reference/tilejson-loader.md @@ -4,7 +4,8 @@ From-v4.0

-The `TileJSONLoader` parses header/metadata from a pmtiles archive +The `TileJSONLoader` parses metadata from a TileJSON / tilestats file. It merges layer and field information from both tilestats and TileJSON and returns a strongly typed data structure. + | Loader | Characteristic | | --------------------- | -------------------------------------------------- | @@ -25,7 +26,12 @@ import {load} from '@loaders.gl/core'; const tileJSON = await load(url, TileJSONLoader, options); ``` +## Data Format + +See [TileJSON format](/docs/modules/mvt/formats/tilejson.md]. + ## Options | Option | Type | Default | Description | | ------ | ---- | ------- | ----------- | +| \ No newline at end of file diff --git a/docs/modules/mvt/formats/mvt.md b/docs/modules/mvt/formats/mvt.md index 665589668b..cf782b2312 100644 --- a/docs/modules/mvt/formats/mvt.md +++ b/docs/modules/mvt/formats/mvt.md @@ -1,12 +1,14 @@ # Mapbox Vector Tile +- *[`@loaders.gl/mvt`](/docs/modules/mvt)* - *[Mapbox Vector Tile Specification](https://github.com/mapbox/vector-tile-spec)* - A specification for encoding tiled vector data. MVT is a protobuf-encoded format that defines geospatial geometries. +tiles contain layers with features, the features can have geometries and properties. + ## Metadata It is often useful to have global metadata about a tileset. A common complementary format for encoding tileset metadata is [TileJSON](./tilejson). diff --git a/docs/modules/mvt/formats/tilejson.md b/docs/modules/mvt/formats/tilejson.md index 5c29814728..574fba8428 100644 --- a/docs/modules/mvt/formats/tilejson.md +++ b/docs/modules/mvt/formats/tilejson.md @@ -1,30 +1,87 @@ # TileJSON / Tilestats -- *[TileJSON specification](https://github.com/mapbox/tilejson-spec/blob/master/3.0.0/README.md)* -- *[Tilestats information](https://github.com/mapbox/mapbox-geostats) -- *[Tilestats generation](https://github.com/mapbox/mapbox-geostats#output-the-stats) +- *[`@loaders.gl/mvt`](/docs/modules/mvt)* +- *[TileJSON specification](https://github.com/mapbox/tilejson-spec/blob/master/3.0.0/README.md)* - *[Tilestats information](https://github.com/mapbox/mapbox-geostats) - *[Tilestats generation](https://github.com/mapbox/mapbox-geostats#output-the-stats) ## TileJSON -Metadata about a tileset. +A TileJSON file contains JSON-encoded metadata about a vector tileset including which layers and fields (columns) can be found in the tiles. -for representing metadata about multiple types of web-based map layers, to aid clients in configuration and browsing. +The information in the TileJSON enables clients (such as web viewers) to understand the structure of data in the tileset up front, instead of piecing it together as tiles are loaded. -As the name suggests, TileJSON is JSON encoded. -## Tilestats +While not all vector tilesets have a TileJSON file, when it is present there is normally a single TileJSON file per tileset, and this file is typically found at the root in TMS or XYZ based URL schemas, so that applications can make a speculative attempt to fetch it from a known place. -Tilestats is a valuable inofficial "extension" to TileJSON. It provides column statistics, notably: +## tilestats + +`tilestats`` is an inofficial "extension" to TileJSON. It provides column statistics, notably: - the data type of each column - min/max values for numeric columns (enabling e.g. global color scale calculations). - a sample of values for each column -Tilestats are not always available so applications must be prepared to work in their absence. -However, tilestats is output by major tilers such as [tippecanoe](https://github.com/mapbox/mapbox-geostats#output-the-stats). +tilestats provide "global" information about the data in the tileset that can allows for e.g. +the creation of correct color maps that do not depend on the view (which requires knowing a priori the min and max values of each field / column). + +Note that tilestats are not always available for a given tileset, so applications must be prepared to work in their absence. +However, tilestats are output by major tilers such as [tippecanoe](https://github.com/mapbox/mapbox-geostats#output-the-stats). + +## Format Description +loaders.gl returns a typed `TileJSON` object, with merged tilestats information (if present). + +```typescript +export type TileJSON +``` ## Fields -| Data | TileJSON | tilestats | Description | -| --- | --- | --- | --- | -| \ No newline at end of file +| Data | Type | TileJSON | tilestats | Description | +| ------------------- | ------------------ | -------- | --------- | --------------------------------------------------------------------- | +| `name?` | `string` | | | Name of the tileset. | +| `description?` | `string` | | | Short description of the tileset. | +| `version?` | `string` | | | Version of the tileset. | +| `tileFormat?` | `string` | | | Format of the tiles in the tileset.. | +| `tilesetType?` | `string` | | | Type of tileset. | +| `generator?` | `string` | | | Generating application. (e.g. tippecanoe adds this). | +| `generatorOptions?` | `string` | | | Generating application options. (e.g. tippecanoe adds this). | +| `scheme?` | `'xyz'` \| `'tms'` | | | Tile indexing scheme. Typically `tms`, i.e `z/x/y` coordinates. | +| `tiles?` | `string[]` | | | Sharded URLs (can increased loading speed on HTTP 1 connections) | +| `boundingBox?` | `[[w, s], [e, n]]` | | | limits of bounding box using axis units and order of specified CRS. | +| `maxZoom?` | `number` | | | May be set to the maxZoom of the first layer | +| `minZoom?` | `number` | | | May be set to the minZoom of the first layer | +| `center?` | `number[]` | | | Center point of the data in the tileset | +| `htmlAttribution?` | `string` | | | Attribution (can contain HTML syntax) | +| `htmlLegend?` | `string` | | | Legend (can contain HTML syntax) | +| `layers?` | `TileJSONLayer[]` | | | Layer information (combines tilestats (if present) and tilejson info) | +| `metaJson?` | `any` | | | Any unparsed, nested JSON metadata | + +- `boundingBox` typing: `[min: [w: number, s: number], max: [e: number, n: number]]` + +```ts +export type TileJSONLayer; +``` + +| Data | Type | TileJSON | tilestats | Description | +| ------------------- | ----------------- | -------- | --------- | ------------------------------------------------------------------------------------ | +| `name` | `string` | | | The name (id) of this layer (tilejson.vector_layers[].id / tilestats.layers[].layer) | +| `description?` | `string` | | | The description of this layer (tilejson.layer.description) | +| `featureCount?` | `number` | | | The number of features in this layer (tilestats.layer.count) | +| `dominantGeometry?` | `string` | | | The dominant geometry type in this layer (tilestats.layer.geometry) | +| `minZoom?` | `number` | | | An array of details about the first 100 attributes in this layer | +| `maxZoom?` | `number` | | | | +| `fields` | `TileJSONField[]` | | | | + + +```ts +export type TileJSONField; +``` + +| Data | Type | TileJSON | tilestats | Description | +| ------------------- | ----------- | -------- | --------- | ---------------------------------------------------- | +| `name` | `string` | | | The name of this attribute | +| `description?` | `string` | | | | +| `type` | `string` | | | | +| `min?` | `number` | | | min value (if there are *any* numbers in the values) | +| `max?` | `number` | | | max value (if there are *any* numbers in the values) | +| `uniqueValueCount?` | `number` | | | Number of unique values across the tileset | +| `values?` | `unknown[]` | | | An array of this attribute's first 100 unique values | diff --git a/docs/modules/pmtiles/formats/pmtiles.md b/docs/modules/pmtiles/formats/pmtiles.md index a71c97e939..f1d40c3c97 100644 --- a/docs/modules/pmtiles/formats/pmtiles.md +++ b/docs/modules/pmtiles/formats/pmtiles.md @@ -6,159 +6,38 @@ PMTiles is a single-file archive format for tiled data designed to enable indivi ## Overview -TBA +PMTiles is a general format for storing tiled data addressed by Z/X/Y coordinates in a single big archive file. This can be used to store cartographic basemap vector tiles, remote sensing observations, JPEG images, or more. -## Versions +PMTiles readers use HTTP Range Requests to fetch only the relevant tile or metadata inside a PMTiles archive on-demand. -## Version 3 +## Tile types -- File Structure -97% smaller overhead - Spec version 2 would always issue a 512 kilobyte initial request; version 3 reduces this to 16 kilobytes. What remains the same is that nearly any map tile can be retrieved in at most two additional requests. -- Unlimited metadata - version 2 had a hard cap on the amount of JSON metadata of about 300 kilobytes; version 3 removes this limit. This is essential for tools like tippecanoe to store detailed column statistics. Essential archive information, such as tile type and compression methods, are stored in a binary header separate from application metadata. -- Hilbert tile IDs - tiles internally are addressed by a single 64-bit Hilbert tile ID instead of Z/X/Y. See the blog post on Tile IDs for details. -- Archive ordering - An optional clustered mode enforces that tile contents are laid out in Tile ID order. -- Compressed directories and metadata - Directories used to fetch offsets of tile data consume about 10% the space of those in version 2. See the blog post on compressed directories for details. -- JavaScript -Compression - The TypeScript pmtiles library now includes a decompressor - fflate - to allow reading compressed vector tile archives directly in the browser. This reduces the size and latency of vector tiles by as much as 70%. -- Tile Cancellation - All JavaScript plugins now support tile cancellation, meaning quick zooming across many levels will interrupt the loading of tiles that are never shown. This has a significant effect on the perceived user experience, as tiles at the end of a animation will appear earlier. -- ETag support - clients can detect when files change on static storage by reading the ETag HTTP header. This means that PMTiles-based map applications can update datasets in place at low frequency without running into caching problems. - -## Version 3 Specification - -### File structure - -A PMTiles archive is a single-file archive of square tiles with five main sections: - -1. A fixed-size, 127-byte **Header** starting with `PMTiles` and then the spec version - currently `3` - that contains offsets to the next sections. -2. A root **Directory**, described below. The Header and Root combined must be less than 16,384 bytes. -3. JSON metadata. -4. Optionally, a section of **Leaf Directories**, encoded the same way as the root. -5. The tile data. - -### Entries - -A Directory is a list of `Entries`, in ascending order by `TileId`: - - Entry = (TileId uint64, Offset uint64, Length uint32, RunLength uint32) - -* `TileId` starts at 0 and corresponds to a cumulative position on the series of square Hilbert curves starting at z=0. -* `Offset` is the position of the tile in the file relative to the start of the data section. -* `Length` is the size of the tile in bytes. -* `RunLength` is how many times this tile is repeated: the `TileId=5,RunLength=2` means that tile is present at IDs 5 and 6. -* If `RunLength=0`, the offset/length points to a Leaf Directory where `TileId` is the first entry. - -### Directory Serialization - -Entries are stored in memory as integers, but serialized to disk using these compression steps: - -1. A little-endian varint indicating the # of entries -2. Delta encoding of `TileId` -3. Zeroing of `Offset`: - * `0` if it is equal to the `Offset` + `Length` of the previous entry - * `Offset+1` otherwise -4. Varint encoding of all numbers -5. Columnar ordering: all `TileId`s, all `RunLength`s, all `Length`s, then all `Offset`s -6. Finally, general purpose compression as described by the `Header`'s `InternalCompression` field - -##3 Directory Hierarchy - -* The number of entries in the root directory and leaf directories is up to the implementation. -* However, the compressed size of the header plus root directory is required in v3 to be under **16,384 bytes**. This is to allow latency-optimized clients to prefetch the root directory and guarantee it is complete. A sophisticated writer might need several attempts to optimize this. -* Root size, leaf sizes and depth should be configurable by the user to optimize for different trade-offs: cost, bandwidth, latency. +PMTiles is a container format and can in principle contain any type of quadtree tiles. A number of vector and image tile types are predefined. -### Header Design +| Type | MIME type | pmtiles | Description | +| ---- | -------------------------------------- | ------- | --------------------------------------------------- | +| MVT | `'application/vnd.mapbox-vector-tile'` | `1` | [Mapbox Vector Tile](/docs/modules/mvt/formats/mvt) | +| PNG | `'image/png'` | `2` | | +| JPEG | `'image/jpeg'` | `3` | | +| WEBP | `'image/webp'` | `4` | | +| AVIF | `'image/avif'` | `5` | | +| - | `'application/octet-stream'` | - | Undefined / Custom types | -*Certain fields belonging to metadata in v2 are promoted to fixed-size header fields. This allows a map container to be initialized to the desired extent or center without blocking on the JSON metadata, and allows proxies to return well-defined HTTP headers.* -The `Header` is 127 bytes, with little-endian integer values: +## Metadata -| offset | description | width | -| ------ | ----------------------------------------------------------------------------------------- | ----- | -| 0 | magic number `PMTiles` | 7 | -| 7 | spec version, currently `3` | 1 | -| 8 | offset of root directory | 8 | -| 16 | length of root directory | 8 | -| 24 | offset of JSON metadata, possibly compressed by `InternalCompression` | 8 | -| 32 | length of JSON metadata | 8 | -| 40 | offset of leaf directories | 8 | -| 48 | length of leaf directories | 8 | -| 56 | offset of tile data | 8 | -| 64 | length of tile data | 8 | -| 72 | # of addressed tiles, 0 if unknown | 8 | -| 80 | # of tile entries, 0 if unknown | 8 | -| 88 | # of tile contents, 0 if unknown | 8 | -| 96 | boolean clustered flag, `0x1` if true | 1 | -| 97 | `InternalCompression` enum (0 = Unknown, 1 = None, 2 = Gzip, 3 = Brotli, 4 = Zstd) | 1 | -| 98 | `TileCompression` enum | 1 | -| 99 | tile type enum (0 = Unknown/Other, 1 = MVT (PBF Vector Tile), 2 = PNG, 3 = JPEG, 4 = WEBP | 1 | -| 100 | min zoom | 1 | -| 101 | max zoom | 1 | -| 102 | min longitude (signed 32-bit integer: longitude * 10,000,000) | 4 | -| 106 | min latitude | 4 | -| 110 | max longitude | 4 | -| 114 | max latitude | 4 | -| 118 | center zoom | 1 | -| 119 | center longitude | 4 | -| 123 | center latitude | 4 | +The pmtiles header has a field that can store JSON metadata. This means that for MVT pmtiles, [TileJSON](/docs/modules/mvts/formats/tilejson) is typically available, stored in the PMTiles header metadata field. -### Notes -* **# of addressed tiles**: the total number of tiles before run-length encoding, i.e. `Sum(RunLength)` over all entries. -* **# of tile entries**: the total number of entries across all directories where `RunLength > 0`. -* **# # of tile contents**: the number of referenced blobs in the tile section, or the unique # of offsets. If the archive is completely deduplicated, this is equal to the # of unique tile contents. If there is no deduplication, this is equal to the number of tile entries above. -* **boolean clustered flag**: if true, blobs in the data section are ordered by Hilbert `TileId`. When writing with deduplication, this means that offsets are either contiguous with the previous offset+length, or refer to a lesser offset. -* **compression enum**: Mandatory, tells the client how to decompress contents as well as provide correct `Content-Encoding` headers to browsers. -* **tile type**: A hint as to the tile contents. Clients and proxies may use this to: - * Automatically determine a visualization method - * provide a conventional MIME type `Content-Type` HTTP header - * Enforce a canonical extension e.g. `.mvt`, `png`, `jpeg`, `.webp` to prevent duplication in caches +## Version History -### Organization +**Version 3** -In most cases, the archive should be in the order `Header`, Root Directory, JSON Metadata, Leaf Directories, Tile Data. It is possible to relocate sections other than `Header` arbitrarily, but no current writers/readers take advantage of this. A future design may allow for reverse-ordered archives to enable single-pass writing. - - -## Version 2 - -*Note: this is deprecated in favor of spec version 3.* - -PMTiles is a binary serialization format designed for two main access patterns: over the network, via HTTP 1.1 Byte Serving (`Range:` requests), or via memory-mapped files on disk. **All integer values are little-endian.** - -A PMTiles archive is composed of: -* a fixed-size 512,000 byte header section -* Followed by any number of tiles in arbitrary format -* Optionally followed by any number of *leaf directories* - -### Header -* The header begins with a 2-byte magic number, "PM" -* Followed by 2 bytes, the PMTiles specification version (currently 2). -* Followed by 4 bytes, the length of metadata (M bytes) -* Followed by 2 bytes, the number of entries in the *root directory* (N entries) -* Followed by M bytes of metadata, which **must be a JSON string with bounds, minzoom and maxzoom properties (new in v2)** -* Followed by N * 17 bytes, the root directory. - -### Directory structure - -A directory is a contiguous sequence of 17 byte entries. A directory can have at most 21,845 entries. **A directory must be sorted by Z, X and then Y order (new in v2).** - -An entry consists of: -* 1 byte: the zoom level (Z) of the entry, with the top bit set to 1 instead of 0 to indicate the offset/length points to a leaf directory and not a tile. -* 3 bytes: the X (column) of the entry. -* 3 bytes: the Y (row) of the entry. -* 6 bytes: the offset of where the tile begins in the archive. -* 4 bytes: the length of the tile, in bytes. - -**All leaf directory entries follow non-leaf entries. All leaf directories in a single directory must have the same Z value. (new in v2).** - -### Notes -* A full directory of 21,845 entries holds exactly a complete pyramid with 8 levels, or 1+4+16+64+256+1024+4096+16384. -* A PMTiles archive with less than 21,845 tiles should have a root directory and no leaf directories. -* Multiple tile entries can point to the same offset; this is useful for de-duplicating certain tiles, such as an empty "ocean" tile. -* Analogously, multiple leaf directory entries can point to the same offset; this can avoid inefficiently-packed small leaf directories. -* The tentative media type for PMTiles archives is `application/vnd.pmtiles`. - -### Implementation suggestions - -* PMTiles is designed to make implementing a writer simple. Reserve 512KB, then write all tiles, recording their entry information; then write all leaf directories; finally, rewind to 0 and write the header. -* The order of tile data in the archive is unspecified; an optimized implementation should arrange tiles on a 2D space-filling curve. -* PMTiles readers should cache directory entries by byte offset, not by Z/X/Y. This means that deduplicated leaf directories result in cache hits. \ No newline at end of file +- File Structure - smaller overhead +- Unlimited metadata - version 2 had a hard cap on the amount of JSON metadata of about 300 kilobytes Allows tools like tippecanoe to store detailed column statistics. Essential archive information, such as tile type and compression methods, are stored in a binary header separate from application metadata. +- Hilbert tile IDs - tiles internally are addressed by a single 64-bit Hilbert tile ID instead of Z/X/Y. See the blog post on Tile IDs for details. +- Archive ordering - An optional clustered mode enforces that tile contents are laid out in Tile ID order. +- Compressed directories and metadata - Directories used to fetch offsets of tile data consume about 10% the space of those in version 2. +- JavaScript Compression - The TypeScript pmtiles library now includes a decompressor - fflate - to allow reading compressed vector tile archives directly in the browser. This reduces the size and latency of vector tiles by as much as 70%. +- Tile Cancellation - All JavaScript plugins now support tile cancellation, meaning quick zooming across many levels will interrupt the loading of tiles that are never shown. This has a significant effect on the perceived user experience, as tiles at the end of a animation will appear earlier. +- ETag support - clients can detect when files change on static storage by reading the ETag HTTP header. This means that PMTiles-based map applications can update datasets in place at low frequency without running into caching problems. diff --git a/docs/upgrade-guide.md b/docs/upgrade-guide.md index 3dcfa274c3..688ec0e999 100644 --- a/docs/upgrade-guide.md +++ b/docs/upgrade-guide.md @@ -1,6 +1,13 @@ # Upgrade Guide -## Upgrading to loaders.gl v4.0 +## Upgrading to v4.1 + +**@loaders.gl/wkt** + +- `WKBLoader`/`TWKBLoader`/`HexWKBLoader` - The default `shape` is now `geojson-geometry` rather than `binary-geometry`. If you were relying on `binary-geometry`, just add add a `shape: 'binary-geometry'` option, as in `load(..., WKBLoader, {wkb: {shape: 'binary-geometry}})`. +- The `geometry` shape is deprecated, and now called `geojson-geometry`. + +## Upgrading to v4.0 **Node.js v18+** diff --git a/docs/whats-new.mdx b/docs/whats-new.mdx index 9fedf6a692..bef27771d4 100644 --- a/docs/whats-new.mdx +++ b/docs/whats-new.mdx @@ -1,5 +1,20 @@ # What's New +## v4.1 (In development) + +Target Release Date: Early 2024 + +- Tooling upgrades: Typescript 5.3, vite 5.0, docusauraus 3.0 + +**@loaders.gl/arrow** + +- New [`GeoArrowLoader`](/docs/modules/arrow/api-reference/geoarrow-loader) supports loading [GeoArrow](/docs/modules/arrow/formats/geoarrow) files. +- New documentation for [Arrow](/docs/modules/arrow/formats/arrow) and [GeoArrow](/docs/modules/arrow/formats/geoarrow) formats. + +**@loaders.gl/flatgeobuf** + +- Loading flatgeobuf into a `GeoJSONTable` now extracts a Schema object from the flatgeobuf header, and uses it to infer the types of the columns in the table. + ## v4.0 Release Date: Oct 30, 2023 diff --git a/examples/get-started/bundle-with-nextjs/package.json b/examples/get-started/bundle-with-nextjs/package.json index f38135f187..757b4a0960 100644 --- a/examples/get-started/bundle-with-nextjs/package.json +++ b/examples/get-started/bundle-with-nextjs/package.json @@ -23,6 +23,6 @@ "react": "18.2.0", "react-dom": "18.2.0", "sass": "^1.58.0", - "typescript": "^5.0.4" + "typescript": "^5.3.0" } } diff --git a/examples/get-started/bundle-with-rollup/package.json b/examples/get-started/bundle-with-rollup/package.json index a2187c1721..bf5821b610 100644 --- a/examples/get-started/bundle-with-rollup/package.json +++ b/examples/get-started/bundle-with-rollup/package.json @@ -24,6 +24,6 @@ "@rollup/plugin-typescript": "^11.0.0", "rollup": "^3.17.0", "serve": "^14.2.0", - "typescript": "^5.0.4" + "typescript": "^5.3.0" } } diff --git a/examples/get-started/bundle-with-vite/package.json b/examples/get-started/bundle-with-vite/package.json index 45c9af22c4..13d29a2226 100644 --- a/examples/get-started/bundle-with-vite/package.json +++ b/examples/get-started/bundle-with-vite/package.json @@ -13,7 +13,7 @@ "@loaders.gl/gltf": "^4.0.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/get-started/bundle-with-webpack-5/package.json b/examples/get-started/bundle-with-webpack-5/package.json index bbc9278720..b2cf00f737 100644 --- a/examples/get-started/bundle-with-webpack-5/package.json +++ b/examples/get-started/bundle-with-webpack-5/package.json @@ -15,7 +15,7 @@ "devDependencies": { "html-webpack-plugin": "^5.5.0", "ts-loader": "^9.4.2", - "typescript": "^5.0.4", + "typescript": "^5.3.0", "webpack": "^5.75.0", "webpack-cli": "^5.0.1", "webpack-dev-server": "^4.11.1" diff --git a/examples/webpack.config.local.js b/examples/webpack.config.local.js deleted file mode 100644 index 04b97ed7a3..0000000000 --- a/examples/webpack.config.local.js +++ /dev/null @@ -1,275 +0,0 @@ -// This file contains webpack configuration settings that allow -// examples to be built against the source code in this repo instead -// of building against their installed version. -// -// This enables using the examples to debug the main library source -// without publishing or npm linking, with conveniences such hot reloading etc. -const webpack = require('webpack'); -const resolve = require('path').resolve; -const BundleAnalyzerPlugin = require('webpack-bundle-analyzer').BundleAnalyzerPlugin; -const {getOcularConfig} = require('ocular-dev-tools'); -const ALIASES = getOcularConfig({root: resolve(__dirname, '..')}).aliases; -const ROOT_DIR = resolve(__dirname, '..'); -const LERNA_INFO = require(resolve(ROOT_DIR, 'lerna.json')); - -const BABEL_CONFIG = { - presets: ['@babel/env'], - plugins: [ - [ - 'babel-plugin-inline-import', - { - extensions: ['.worker.js'] - } - ], - ['@babel/plugin-transform-runtime', {useESModules: true}], - 'version-inline' - ], - ignore: ['**/*.worker.js'] -}; - -const DECK_LINK_ALIASES = { - // TODO - add all aliases - '@deck.gl/core': resolve(ROOT_DIR, '../deck.gl/modules/core/src'), - '@deck.gl/layers': resolve(ROOT_DIR, '../deck.gl/modules/layers/src'), - '@deck.gl/mesh-layers': resolve(ROOT_DIR, '../deck.gl/modules/mesh-layers/src'), - '@deck.gl/geo-layers': resolve(ROOT_DIR, '../deck.gl/modules/geo-layers/src'), - '@deck.gl/react': resolve(ROOT_DIR, '../deck.gl/modules/react/src') -}; - -const MATH_LINK_ALIASES = { - '@math.gl/core': resolve(ROOT_DIR, '../math.gl/modules/core/src'), - '@math.gl/culling': resolve(ROOT_DIR, '../math.gl/modules/culling/src'), - '@math.gl/geospatial': resolve(ROOT_DIR, '../math.gl/modules/geospatial/src') -}; - -const LUMA_LINK_ALIASES = { - '@luma.gl/experimental': resolve(ROOT_DIR, '../luma.gl/modules/experimental/src'), - '@luma.gl/constants': resolve(ROOT_DIR, '../luma.gl/modules/constants/src'), - '@luma.gl/core': resolve(ROOT_DIR, '../luma.gl/modules/core/src'), - '@luma.gl/debug': resolve(ROOT_DIR, '../luma.gl/modules/debug/src'), - '@luma.gl/engine': resolve(ROOT_DIR, '../luma.gl/modules/engine/src'), - '@luma.gl/gltools': resolve(ROOT_DIR, '../luma.gl/modules/gltools/src'), - '@luma.gl/shadertools': resolve(ROOT_DIR, '../luma.gl/modules/shadertools/src'), - '@luma.gl/webgl': resolve(ROOT_DIR, '../luma.gl/modules/webgl/src') -}; - -// Support for hot reloading changes to the library: -const LOCAL_DEVELOPMENT_CONFIG = { - mode: 'development', - - devtool: 'source-map', - - // suppress warnings about bundle size - devServer: { - stats: { - warnings: false - }, - contentBase: [resolve('.'), resolve('../../'), resolve('../../../')] - }, - - // this is required by draco - node: { - fs: 'empty' - }, - - resolve: { - extensions: ['.ts', '.tsx', '.js', '.json'], - // mainFields: ['esnext', 'browser', 'module', 'main'], - // Imports the library from its src directory in this repo - alias: Object.assign({}, ALIASES) - }, - - module: { - rules: [ - { - test: /\.ts$/, - loader: 'babel-loader', - exclude: [/node_modules/], - options: { - presets: ['@babel/preset-typescript', [ - '@babel/preset-env', - { - exclude: [/transform-async-to-generator/, /transform-regenerator/] - } - ], '@babel/preset-react'] - } - }, - { - test: /\.js$/, - loader: 'babel-loader', - exclude: [/node_modules/], - options: { - presets: [ - [ - '@babel/preset-env', - { - exclude: [/transform-async-to-generator/, /transform-regenerator/] - } - ], '@babel/preset-react'] - } - }, - { - // Unfortunately, webpack doesn't import library sourcemaps on its own... - test: /\.js$/, - use: ['source-map-loader'], - enforce: 'pre' - }, - { - // This is required to handle inline worker! - test: /worker.*\.js$/, - exclude: /node_modules/, - use: [ - { - loader: 'babel-loader', - options: BABEL_CONFIG - } - ] - }, - // workers need to be completely transpiled - { - // Load worker tests - test: /\.worker\.js$/, - use: { - loader: 'worker-loader' - } - } - ] - }, - - plugins: [ - new webpack.DefinePlugin({ - __VERSION__: JSON.stringify(LERNA_INFO.version) - }) - ] -}; - -function addLocalDependency(config, dependency) { - config.resolve = config.resolve || {}; - config.resolve.alias = config.resolve.alias || {}; - - switch (dependency) { - case 'deck': - Object.assign(config.resolve.alias, DECK_LINK_ALIASES); - break; - case 'luma': - Object.assign(config.resolve.alias, LUMA_LINK_ALIASES); - break; - case 'math': - Object.assign(config.resolve.alias, MATH_LINK_ALIASES); - break; - default: - } - // We need to have 1 `React` instance when running `yarn start-local-deck` - Object.assign(config.resolve.alias, { - react: resolve(ROOT_DIR, 'node_modules/react') - }); - - return config; -} - -function addLocalDevSettings(config, opts) { - // Merge local config with module config - config = Object.assign({}, LOCAL_DEVELOPMENT_CONFIG, config); - // Generate initial resolve object. - config.resolve = config.resolve || {}; - // Generate initial alias object - config.resolve.alias = config.resolve.alias || {}; - // Merge aliases from config with loacal config aliases - Object.assign(config.resolve.alias, LOCAL_DEVELOPMENT_CONFIG.resolve.alias); - // Use extensions from local config - config.resolve.extensions = LOCAL_DEVELOPMENT_CONFIG.resolve.extensions; - // Generate initial config mudule - config.module = config.module || {}; - // Get module config rules - const configRules = config.module.rules || []; - // Merge local rules with module config rules - config.module.rules = LOCAL_DEVELOPMENT_CONFIG.module.rules.concat(configRules); - // Use initial config plugins - config.plugins = config.plugins || []; - // Do concatenation of local and module config plugins - config.plugins = config.plugins.concat(LOCAL_DEVELOPMENT_CONFIG.plugins); - // Uncomment to validate generated config - // console.log(config); - - return config; -} - -function addAnalyzerSettings(config) { - config.mode = 'production'; - - config.resolve = config.resolve || {}; - // 'esnext' picks up ES6 dist for smaller bundles - - config.plugins = config.plugins || []; - config.plugins.push(new BundleAnalyzerPlugin()); - return config; -} - -// Use non-transpiled vis.gl dependencies and disable regenerator transforms -function addESNextSettings(config) { - // Add 'esnext' to make sure vis.gl frameworks are imported with minimal transpilation - config.resolve = config.resolve || {}; - config.resolve.mainFields = config.resolve.mainFields || ['browser', 'module', 'main']; - config.resolve.mainFields.shift('esnext'); - - // Look for babel plugin - config.module = config.module || {}; - config.module.rules = config.module.rules || []; - const babelRule = config.module.rules.find((rule) => rule.loader === 'babel-loader'); - - // If found, inject excludes in @babel/present-env to prevent transpile - if (babelRule && babelRule.options && babelRule.options.presets) { - babelRule.options.presets = babelRule.options.presets.map((preset) => { - if (preset === '@babel/preset-env') { - return [ - '@babel/preset-env', - { - exclude: [/transform-async-to-generator/, /transform-regenerator/] - } - ]; - } - return preset; - }); - } - return config; -} - -module.exports = - (baseConfig, opts = {}) => - (env) => { - let config = baseConfig; - - /* eslint-disable no-console */ - if (env && env.help) { - console.log( - '--env.esnext: Use non-transpiled vis.gl dependencies and disable regenerator transforms' - ); - console.log('--env.local: Build against local src for modules in this repo'); - console.log('--env.math,luma,deck: Build against local src for external repos'); - console.log('--env.analyze: Add bundle size analyzer plugin'); - } - - console.log('For documentation on build options, run: "yarn start --env.help"'); - - /* eslint-enable no-console */ - if (env && env.esnext) { - config = addESNextSettings(config); - } - - if (env && env.local) { - config = addLocalDevSettings(config, opts); - } - - // Iterate over env keys and see if they match a local dependency - for (const key in env || {}) { - config = addLocalDependency(config, key); - } - - if (env && env.analyze) { - config = addAnalyzerSettings(config); - } - - // uncomment to debug - // console.warn(JSON.stringify(config, null, 2)); - return config; - }; diff --git a/examples/website/3d-tiles/package.json b/examples/website/3d-tiles/package.json index d4bb035d02..5695639c47 100644 --- a/examples/website/3d-tiles/package.json +++ b/examples/website/3d-tiles/package.json @@ -31,7 +31,7 @@ "styled-components": "^4.2.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/geospatial/app.tsx b/examples/website/geospatial/app.tsx index 3f3f3daa21..d390dd92de 100644 --- a/examples/website/geospatial/app.tsx +++ b/examples/website/geospatial/app.tsx @@ -5,32 +5,40 @@ import React, {useState, useEffect} from 'react'; import {createRoot} from 'react-dom/client'; import {Map} from 'react-map-gl'; -import maplibregl from 'maplibre-gl'; +import maplibregl, {Properties} from 'maplibre-gl'; import {DeckGL} from '@deck.gl/react/typed'; import {MapController} from '@deck.gl/core/typed'; import {GeoJsonLayer} from '@deck.gl/layers/typed'; import {ControlPanel} from './components/control-panel'; -import {FileUploader} from './components/file-uploader'; +// import {FileUploader} from './components/file-uploader'; import type {Example} from './examples'; import {INITIAL_LOADER_NAME, INITIAL_EXAMPLE_NAME, INITIAL_MAP_STYLE, EXAMPLES} from './examples'; import {Table, GeoJSON} from '@loaders.gl/schema'; import {Loader, load /* registerLoaders */} from '@loaders.gl/core'; -import {ParquetLoader, installBufferPolyfill} from '@loaders.gl/parquet'; +import {GeoArrowLoader} from '@loaders.gl/arrow'; +import {GeoParquetLoader, installBufferPolyfill} from '@loaders.gl/parquet'; import {FlatGeobufLoader} from '@loaders.gl/flatgeobuf'; +import {ShapefileLoader} from '@loaders.gl/shapefile'; +import {KMLLoader, GPXLoader, TCXLoader} from '@loaders.gl/kml'; + +// GeoPackage depends on sql.js which has bundling issues in docusuarus. // import {GeoPackageLoader} from '@loaders.gl/geopackage'; -import {ArrowLoader} from '@loaders.gl/arrow'; installBufferPolyfill(); const LOADERS: Loader[] = [ - ArrowLoader, - ParquetLoader, - FlatGeobufLoader + GeoArrowLoader, + GeoParquetLoader, + FlatGeobufLoader, // GeoPackageLoader + ShapefileLoader, + KMLLoader, + GPXLoader, + TCXLoader ]; const LOADER_OPTIONS = { worker: false, @@ -48,6 +56,18 @@ const LOADER_OPTIONS = { geopackage: { shape: 'geojson-table' // table: 'FEATURESriversds' + }, + shapefile: { + shape: 'geojson-table' + }, + kml: { + shape: 'geojson-table' + }, + gpx: { + shape: 'geojson-table' + }, + tcx: { + shape: 'geojson-table' } }; @@ -76,15 +96,14 @@ type AppState = { }; /** - * + * A Geospatial table map viewer */ export default function App(props: AppProps) { - const [state, setState] = useState({ // EXAMPLE STATE examples: EXAMPLES, - selectedExample: INITIAL_EXAMPLE_NAME, - selectedLoader: INITIAL_LOADER_NAME, + selectedExample: null, + selectedLoader: null, // CURRENT VIEW POINT / CAMERA POSITION viewState: INITIAL_VIEW_STATE, @@ -96,74 +115,82 @@ export default function App(props: AppProps) { useEffect(() => { let examples: Record> = {...EXAMPLES}; if (props.format) { - // Move the preferred format examples to the "top" - examples = {[props.format]: EXAMPLES[props.format], ...EXAMPLES}; - // Remove any keys - for (const key of Object.keys(examples)) { - if (key.endsWith('Test')) { - delete examples[key]; - } - } + // Keep only the preferred format examples + examples = {[props.format]: EXAMPLES[props.format]}; } const selectedLoader = props.format || INITIAL_LOADER_NAME; + let selectedExample = props.format + ? Object.keys(examples[selectedLoader])[0] + : INITIAL_EXAMPLE_NAME; - let selectedExample = INITIAL_EXAMPLE_NAME; - if (props.format) { - for (const exampleName of Object.keys(examples[selectedLoader])) { - selectedExample = exampleName; - break; - } - } - setState({...state, examples, selectedExample, selectedLoader}); + onExampleChange({ + selectedLoader, + selectedExample, + example: examples[selectedLoader][selectedExample], + state, + setState + }); + setState((state) => ({...state, examples, selectedExample, selectedLoader})); }, [props.format]); + let schema = state.loadedTable?.schema + ? {metadata: state.loadedTable?.schema.metadata, ...state.loadedTable?.schema} + : null; + return (
- onExampleChange({...props, state, setState})} + onExampleChange={(props) => onExampleChange({...props, state, setState})} > {state.error ?
{state.error}
: ''}
center long/lat: {state.viewState.longitude.toFixed(3)}, - {state.viewState.latitude.toFixed(3)}, - zoom: {state.viewState.zoom.toFixed(2)} + {state.viewState.latitude.toFixed(3)}, zoom: {state.viewState.zoom.toFixed(2)}
- setState({...state, loadedTable: null})} - onFileSelected={async (uploadedFile: File) => { + {/* TODO -restore drag and drop + setState(state => ({...state, loadedTable: null}))} + onFileSelected={async (uploadedFile: File) => { // TODO - error handling const data = (await load(uploadedFile, LOADERS, LOADER_OPTIONS)) as Table; - setState({ + setState(state => ({ ...state, selectedExample: uploadedFile.name, loadedTable: data - }); - }} + })); + }} /> + */}
setState({...state, viewState})} + onViewStateChange={({viewState}) => setState((state) => ({...state, viewState}))} controller={{type: MapController, maxPitch: 85}} - getTooltip={({object}) => - object && { - html: `\ -

${object.properties?.name}

-
${object.geometry?.coordinates?.[0]}
-
${object.geometry?.coordinates?.[1]}
`, - style: { - backgroundColor: '#ddd', - fontSize: '0.8em' + getTooltip={({object}) => { + const {name, ...properties} = object?.properties || {}; + const props = Object.entries(properties) + .map(([key, value]) => `
${key}: ${value}
`) + .join('\n'); + return ( + object && { + html: `\ +

${name}

+${props} +
Coords: ${object.geometry?.coordinates?.[0]};${object.geometry?.coordinates?.[1]}
`, + style: { + backgroundColor: '#ddd', + fontSize: '0.8em' + } } - } - } + ); + }} >
@@ -181,22 +208,26 @@ async function onExampleChange(args: { const {selectedLoader, selectedExample, example, state, setState} = args; const url = example.data; - console.log('Loading', url); try { const data = (await load(url, LOADERS, LOADER_OPTIONS)) as Table; - console.log('Loaded data', data); + console.log('Loaded data', url, data); const viewState = {...state.viewState, ...example.viewState}; - setState({...state, selectedLoader, selectedExample, viewState, loadedTable: data}); + setState((state) => ({ + ...state, + selectedLoader, + selectedExample, + viewState, + loadedTable: data + })); } catch (error) { - console.log('Failed to load data', url, error); - setState({...state, error: `Could not load ${selectedExample}: ${error.message}`}); + console.error('Failed to load data', url, error); + setState((state) => ({...state, error: `Could not load ${selectedExample}: ${error.message}`})); } } -function renderLayer({selectedExample, selectedLoader, loadedTable}) { - +function renderLayer({selectedExample, selectedLoader, examples, loadedTable}) { const geojson = loadedTable as GeoJSON; - console.warn('Rendering layer with', geojson); + const layerProps = examples[selectedLoader]?.[selectedExample]?.layerProps || {}; return [ new GeoJsonLayer({ id: `geojson-${selectedExample}(${selectedLoader})`, @@ -220,8 +251,9 @@ function renderLayer({selectedExample, selectedLoader, loadedTable}) { // point fills getFillColor: [255, 0, 0], getPointRadius: 100, - pointRadiusScale: 500 + pointRadiusScale: 500, // pointRadiusUnits: 'pixels', + ...layerProps }) ]; } diff --git a/examples/website/geospatial/components/control-panel.tsx b/examples/website/geospatial/components/control-panel.tsx index 98d8898f40..311ddc4979 100644 --- a/examples/website/geospatial/components/control-panel.tsx +++ b/examples/website/geospatial/components/control-panel.tsx @@ -1,7 +1,7 @@ import styled from 'styled-components'; -import React, {PureComponent} from 'react'; -import PropTypes from 'prop-types'; -import {Example, INITIAL_EXAMPLE_NAME, INITIAL_LOADER_NAME} from '../examples'; +import React, {useState, useEffect} from 'react'; +import MonacoEditor from '@monaco-editor/react'; +import type {Example} from '../examples'; const Container = styled.div` display: flex; @@ -19,6 +19,19 @@ const Container = styled.div` line-height: 2; outline: none; z-index: 100; + height: calc(100vh - 105px); + width: 500px; + + .loading-indicator { + margin: 0; + text-align: center; + transition: opacity 300ms ease-out; + } + + > .monaco-editor { + height: calc(100vh - 200px) !important; + width: 700px !important; + } `; const DropDown = styled.select` @@ -37,102 +50,94 @@ type PropTypes = React.PropsWithChildren<{ }) => void; }>; -export class ControlPanel extends PureComponent { - static defaultProps: PropTypes = { +/** + * Shows the example selection dropdown and some additional information + */ +export const ControlPanel: React.FC = (props: PropTypes) => { + props = { examples: {}, droppedFile: null, selectedExample: null, selectedLoader: null, - onExampleChange: () => {} + onExampleChange: () => {}, + ...props }; - _autoSelected: boolean; - - constructor(props) { - super(props); - this._autoSelected = false; + return ( + + + + {props.children} +

Table Schema

+ +
+ ); +}; + +/** + * Shows the selected example in bold font + */ +const ExampleHeader: React.FC = ({selectedLoader, selectedExample}) => { + if (!selectedLoader || !selectedExample) { + return null; } - componentDidMount() { - const {examples = {}, onExampleChange} = this.props; - - let selectedLoader = this.props.selectedLoader; - let selectedExample = this.props.selectedExample; - - if ((!selectedLoader || !selectedExample) && !this._autoSelected) { - selectedLoader = INITIAL_LOADER_NAME; - selectedExample = examples[selectedLoader][INITIAL_EXAMPLE_NAME]; - this._autoSelected = true; - } - - if (selectedLoader && selectedExample) { - const example = examples[selectedLoader][selectedExample]; - onExampleChange({selectedLoader, selectedExample, example}); - } + return ( +
+

+ {selectedExample} {selectedLoader}{' '} +

+
+ ); +}; + +/** + * Dropdown that lets user select a new example + */ +const ExampleDropDown: React.FC = ({ + examples = {}, + selectedLoader, + selectedExample, + onExampleChange +}) => { + if (!selectedLoader || !selectedExample) { + return false; } - _renderDropDown() { - const {examples = {}, selectedLoader, selectedExample, onExampleChange} = this.props; - - if (!selectedLoader || !selectedExample) { - return false; - } - - const selectedValue = `${selectedLoader}.${selectedExample}`; - - return ( - { - const loaderExample = evt.target.value as string; - const value = loaderExample.split('.'); - const loaderName = value[0]; - const exampleName = value[1]; - const example = examples[loaderName][exampleName]; - onExampleChange({selectedLoader: loaderName, selectedExample: exampleName, example}); - }} - > - {Object.keys(examples).map((loaderName, loaderIndex) => { - const loaderExamples = examples[loaderName]; - return ( - - {Object.keys(loaderExamples).map((exampleName, exampleIndex) => { - const value = `${loaderName}.${exampleName}`; - return ( - - ); - })} - - ); - })} - - ); - } - - _renderHeader() { - const {selectedLoader, selectedExample} = this.props; - if (!selectedLoader || !selectedExample) { - return null; - } - - return ( -
-

- {selectedExample} {selectedLoader}{' '} -

-
- ); - } - - render() { - return ( - - {this._renderHeader()} - {this._renderDropDown()} - {this.props.children} - - ); - } -} + const selectedValue = `${selectedLoader}.${selectedExample}`; + + return ( + { + const loaderExample = evt.target.value as string; + const value = loaderExample.split('.'); + const loaderName = value[0]; + const exampleName = value[1]; + const example = examples[loaderName][exampleName]; + onExampleChange({selectedLoader: loaderName, selectedExample: exampleName, example}); + }} + > + {Object.keys(examples).map((loaderName, loaderIndex) => { + const loaderExamples = examples[loaderName]; + return ( + + {Object.keys(loaderExamples).map((exampleName, exampleIndex) => { + const value = `${loaderName}.${exampleName}`; + return ( + + ); + })} + + ); + })} + + ); +}; diff --git a/examples/website/geospatial/examples.ts b/examples/website/geospatial/examples.ts index fa485c8f3e..6696014cc2 100644 --- a/examples/website/geospatial/examples.ts +++ b/examples/website/geospatial/examples.ts @@ -3,6 +3,7 @@ export const INITIAL_LOADER_NAME = 'GeoParquet'; export const INITIAL_EXAMPLE_NAME = 'Airports'; + // export const INITIAL_LOADER_NAME = 'GeoJSON'; // export const INITIAL_EXAMPLE_NAME = 'Vancouver'; @@ -24,84 +25,96 @@ export type Example = { format: string; data: string; viewState?: Record; + layerProps?: Record; }; export const LOADERS_URL = 'https://raw.githubusercontent.com/visgl/loaders.gl/master'; const DECKGL_DATA_URL = 'https://raw.githubusercontent.com/visgl/deck.gl-data/master'; const PARQUET_PATH = '/formats/geoparquet'; -const GEOARROW_TEST_DATA = `${LOADERS_URL}/modules/arrow/test/data`; // geoarrow +const GEOARROW_TEST_DATA = `${LOADERS_URL}/modules/arrow/test/data/geoarrow`; export const EXAMPLES: Record> = { + GeoArrow: { + multipolygon_hole: { + format: 'geoarrow', + data: `${GEOARROW_TEST_DATA}/multipolygon_hole.arrow`, + viewState: {...VIEW_STATE, longitude: 10.388, latitude: 1.447, zoom: 4} + } + }, GeoParquet: { Airports: { format: 'geoparquet', data: `${LOADERS_URL}/modules/parquet/test/data/geoparquet/airports.parquet`, - viewState: { - ...VIEW_STATE, - longitude: -4.65, - latitude: -29.76, - zoom: 1.76 - } - }, - NZBuildingFootprints: { - format: 'geoparquet', - data: 'https://storage.googleapis.com/open-geodata/linz-examples/nz-building-outlines.parquet', - viewState: { - latitude: 47.65, - longitude: 7, - zoom: 4.5, - maxZoom: 20, - maxPitch: 89, - bearing: 0 - } + viewState: {...VIEW_STATE, longitude: -4.65, latitude: -29.76, zoom: 1.76} } }, GeoJSON: { Vancouver: { format: 'geojson', data: `${DECKGL_DATA_URL}/examples/geojson/vancouver-blocks.json`, - viewState: { - ...VIEW_STATE, - latitude: 49.254, - longitude: -123.13 - } + viewState: {...VIEW_STATE, latitude: 49.254, longitude: -123.13} }, Countries: { format: 'geojson', data: `${LOADERS_URL}/modules/geojson/test/data/countries.json`, - viewState: { - ...VIEW_STATE, - longitude: -4.65, - latitude: -29.76, - zoom: 1.76 - } + viewState: {...VIEW_STATE, longitude: -4.65, latitude: -29.76, zoom: 1.76} } }, GeoPackage: { Rivers: { format: 'geopackage', data: 'https://raw.githubusercontent.com/ngageoint/geopackage-js/master/test/fixtures/rivers.gpkg', - viewState: { - ...VIEW_STATE, - longitude: -4.65, - latitude: 0, - zoom: 1.76 - } + viewState: {...VIEW_STATE, longitude: -4.65, latitude: 0, zoom: 1.76} } }, FlatGeobuf: { Countries: { format: 'flatgeobuf', data: `${LOADERS_URL}/modules/flatgeobuf/test/data/countries.fgb`, - viewState: { - ...VIEW_STATE, - longitude: -4.65, - latitude: -29.76, - zoom: 1.76 - } + viewState: {...VIEW_STATE, longitude: -4.65, latitude: -29.76, zoom: 1.76}, + layerProps: {getFillColor: (_, {index}) => [index % 255, 0, 0]} } }, + Shapefile: { + 'Countries and Graticules': { + format: 'shapefile', + data: `${LOADERS_URL}/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shp`, + viewState: {...VIEW_STATE, longitude: -4.65, latitude: -29.76, zoom: 1.76}, + layerProps: {getFillColor: (_, {index}) => [0, index % 255, 0]} + }, + 'SF Topography': { + format: 'shapefile', + data: `${DECKGL_DATA_URL}/test-data/shapefile/geo_export_14556060-0002-4a9e-8ef0-03da3e246166.shp`, + viewState: {...VIEW_STATE, latitude: 37.75, longitude: -122.4, zoom: 11} + } + }, + + KML: { + 'Congressional Districts': { + format: 'kml', + data: `${DECKGL_DATA_URL}/formats/kml/congressional-districts/cb_2022_us_cd118_20m.kml`, + viewState: {...VIEW_STATE, latitude: 14.5, longitude: -78.13, zoom: 2.6}, + layerProps: {getFillColor: (_, {index}) => [index % 255, 0, 0]} + } + }, + + TCX: { + 'TXC Sample': { + format: 'tcx', + data: `${LOADERS_URL}/modules/kml/test/data/tcx/tcx_sample.tcx`, + viewState: {...VIEW_STATE, latitude: 37.89544935, longitude: -122.4883889, zoom: 16} + } + }, + + GPX: { + Trek: { + format: 'gpx', + data: `${LOADERS_URL}/modules/kml/test/data/gpx/trek.gpx`, + viewState: {...VIEW_STATE, latitude: 44.907783722, longitude: 6.08, zoom: 13} + } + }, + GeoArrowTest: { line: {format: 'geoarrow', data: `${GEOARROW_TEST_DATA}/line.arrow`}, multiline: {format: 'geoarrow', data: `${GEOARROW_TEST_DATA}/multiline.arrow`}, @@ -117,7 +130,17 @@ export const EXAMPLES: Record> = { point: {format: 'geoarrow', data: `${GEOARROW_TEST_DATA}/point.arrow`}, polygon: {format: 'geoarrow', data: `${GEOARROW_TEST_DATA}/polygon.arrow`} }, - GeoParquetTest: getGeoParquetTestExamples() + + GeoParquetTest: getGeoParquetTestExamples(), + + KMLTests: { + // TODO - size of features is excessive. + 'KML Sample': { + format: 'kml', + data: `${LOADERS_URL}/modules/kml/test/data/kml/KML_Samples.kml`, + viewState: {...VIEW_STATE, latitude: 37.65, longitude: -121.7, zoom: 11} + } + } }; // Add Geoparquet datasets @@ -185,5 +208,18 @@ function getGeoParquetTestExamples() { } } + GeoParquet.NZBuildingFootprints = { + format: 'geoparquet', + data: 'https://storage.googleapis.com/open-geodata/linz-examples/nz-building-outlines.parquet', + viewState: { + latitude: 47.65, + longitude: 7, + zoom: 4.5, + maxZoom: 20, + maxPitch: 89, + bearing: 0 + } + } + return GeoParquet; } diff --git a/examples/website/geospatial/package.json b/examples/website/geospatial/package.json index 8e226c0c94..50d173948f 100644 --- a/examples/website/geospatial/package.json +++ b/examples/website/geospatial/package.json @@ -18,16 +18,19 @@ "@loaders.gl/flatgeobuf": "^4.0.0", "@loaders.gl/geopackage": "^4.0.0", "@loaders.gl/parquet": "^4.0.0", + "@loaders.gl/shapefile": "^4.0.0", + "@loaders.gl/kml": "^4.0.0", + "@monaco-editor/react": "^4.5.0", "mapbox-gl": "npm:empty-npm-package@^1.0.0", "maplibre-gl": "^2.4.0", "node-stdlib-browser": "^1.2.0", - "react": "^18.0.0", - "react-dom": "^18.0.0", + "react": "^18.2.0", + "react-dom": "^18.2.0", "react-map-gl": "^7.0.0", "styled-components": "^4.2.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/geospatial/tsconfig.json b/examples/website/geospatial/tsconfig.json new file mode 100644 index 0000000000..a7ca1cd93a --- /dev/null +++ b/examples/website/geospatial/tsconfig.json @@ -0,0 +1,21 @@ +{ + "include": [".", "../../../modules/*/src"], + "compilerOptions": { + "root": ["."], + // "target": "ESNext", + // "useDefineForClassFields": true, + // "lib": ["DOM", "DOM.Iterable", "ESNext"], + // "allowJs": false, + // "skipLibCheck": false, + // "esModuleInterop": false, + // "allowSyntheticDefaultImports": true, + // "strict": true, + // "forceConsistentCasingInFileNames": true, + // "module": "ESNext", + // "moduleResolution": "Node", + // "resolveJsonModule": true, + // "isolatedModules": true, + // "noEmit": true, + "jsx": "react-jsx" + } +} diff --git a/examples/website/geospatial/vite.config.ts b/examples/website/geospatial/vite.config.ts index 7cffe37aeb..45efbade4e 100644 --- a/examples/website/geospatial/vite.config.ts +++ b/examples/website/geospatial/vite.config.ts @@ -8,7 +8,7 @@ const getAliases = async (frameworkName, frameworkRootDir) => { modules.forEach((module) => { aliases[`${frameworkName}/${module}`] = `${frameworkRootDir}/modules/${module}/src`; }); - console.log(aliases); + // console.log(aliases); return aliases; }; diff --git a/examples/website/gltf/package.json b/examples/website/gltf/package.json index 13067cf699..9c73b95040 100644 --- a/examples/website/gltf/package.json +++ b/examples/website/gltf/package.json @@ -21,7 +21,7 @@ "@math.gl/core": "^4.0.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/i3s-arcgis/package.json b/examples/website/i3s-arcgis/package.json index 7f8a58fbf1..4daebd5ec8 100644 --- a/examples/website/i3s-arcgis/package.json +++ b/examples/website/i3s-arcgis/package.json @@ -23,7 +23,7 @@ "react-dom": "^18.0.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/i3s/package.json b/examples/website/i3s/package.json index 61be253b71..3a03ae64be 100644 --- a/examples/website/i3s/package.json +++ b/examples/website/i3s/package.json @@ -43,7 +43,7 @@ "styled-components": "^4.2.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/pointcloud/package.json b/examples/website/pointcloud/package.json index 460c46acea..657ad38fe5 100644 --- a/examples/website/pointcloud/package.json +++ b/examples/website/pointcloud/package.json @@ -24,7 +24,7 @@ "styled-components": "^4.2.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/textures/package.json b/examples/website/textures/package.json index 6ec7042d30..a79b0c1837 100644 --- a/examples/website/textures/package.json +++ b/examples/website/textures/package.json @@ -19,7 +19,7 @@ "styled-components": "^4.2.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/tiles/package.json b/examples/website/tiles/package.json index 36fbd87b92..01dc1fef62 100644 --- a/examples/website/tiles/package.json +++ b/examples/website/tiles/package.json @@ -29,7 +29,7 @@ "styled-components": "^4.2.0" }, "devDependencies": { - "typescript": "^5.0.4", - "vite": "^4.4.9" + "typescript": "^5.3.0", + "vite": "^5.0.0" } } diff --git a/examples/website/tiles/tsconfig.json b/examples/website/tiles/tsconfig.json index a7ca1cd93a..17f90b19a6 100644 --- a/examples/website/tiles/tsconfig.json +++ b/examples/website/tiles/tsconfig.json @@ -2,20 +2,6 @@ "include": [".", "../../../modules/*/src"], "compilerOptions": { "root": ["."], - // "target": "ESNext", - // "useDefineForClassFields": true, - // "lib": ["DOM", "DOM.Iterable", "ESNext"], - // "allowJs": false, - // "skipLibCheck": false, - // "esModuleInterop": false, - // "allowSyntheticDefaultImports": true, - // "strict": true, - // "forceConsistentCasingInFileNames": true, - // "module": "ESNext", - // "moduleResolution": "Node", - // "resolveJsonModule": true, - // "isolatedModules": true, - // "noEmit": true, "jsx": "react-jsx" } } diff --git a/examples/website/wms/package.json b/examples/website/wms/package.json index 61c807f7af..0aaf995774 100644 --- a/examples/website/wms/package.json +++ b/examples/website/wms/package.json @@ -25,6 +25,6 @@ }, "devDependencies": { "typescript": "^5.0.0", - "vite": "^4.2.0" + "vite": "^5.0.0" } } diff --git a/fruits.parquet b/fruits.parquet deleted file mode 100644 index d43b8c28c5..0000000000 --- a/fruits.parquet +++ /dev/null @@ -1 +0,0 @@ -PAR1 \ No newline at end of file diff --git a/modules/3d-tiles/package.json b/modules/3d-tiles/package.json index 89e01b0fdf..8d1fcbbf05 100644 --- a/modules/3d-tiles/package.json +++ b/modules/3d-tiles/package.json @@ -12,14 +12,17 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", "loader", "3d tile", + "3d tiles", "tile", + "massive 3D", "mesh", "gltf", "point cloud", - "pointcloud" + "pointcloud", + "webgl", + "webgpu" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/3d-tiles/src/3d-tiles-archive/3d-tiles-archive-parser.ts b/modules/3d-tiles/src/3d-tiles-archive/3d-tiles-archive-parser.ts index 49906d8497..4bb16e14c0 100644 --- a/modules/3d-tiles/src/3d-tiles-archive/3d-tiles-archive-parser.ts +++ b/modules/3d-tiles/src/3d-tiles-archive/3d-tiles-archive-parser.ts @@ -1,6 +1,6 @@ import {FileProvider} from '@loaders.gl/loader-utils'; import { - cdSignature as cdHeaderSignature, + CD_HEADER_SIGNATURE, makeHashTableFromZipHeaders, parseHashTable, parseZipCDFileHeader, @@ -19,7 +19,7 @@ export const parse3DTilesArchive = async ( fileProvider: FileProvider, cb?: (msg: string) => void ): Promise => { - const hashCDOffset = await searchFromTheEnd(fileProvider, cdHeaderSignature); + const hashCDOffset = await searchFromTheEnd(fileProvider, CD_HEADER_SIGNATURE); const cdFileHeader = await parseZipCDFileHeader(hashCDOffset, fileProvider); diff --git a/modules/3d-tiles/src/lib/filesystems/tiles-3d-archive-file-system.ts b/modules/3d-tiles/src/lib/filesystems/tiles-3d-archive-file-system.ts index 4756f289f2..2865ac8ce0 100644 --- a/modules/3d-tiles/src/lib/filesystems/tiles-3d-archive-file-system.ts +++ b/modules/3d-tiles/src/lib/filesystems/tiles-3d-archive-file-system.ts @@ -1,7 +1,7 @@ import {FileProvider} from '@loaders.gl/loader-utils'; import { ZipFileSystem, - cdSignature as cdHeaderSignature, + CD_HEADER_SIGNATURE, searchFromTheEnd, parseZipCDFileHeader, parseHashTable, @@ -66,7 +66,7 @@ export class Tiles3DArchiveFileSystem extends ZipFileSystem { throw new Error('No data detected in the zip archive'); } - const hashCDOffset = await searchFromTheEnd(fileProvider, cdHeaderSignature); + const hashCDOffset = await searchFromTheEnd(fileProvider, CD_HEADER_SIGNATURE); const cdFileHeader = await parseZipCDFileHeader(hashCDOffset, fileProvider); diff --git a/modules/arrow/package.json b/modules/arrow/package.json index 6078d5e38a..8b97a69797 100644 --- a/modules/arrow/package.json +++ b/modules/arrow/package.json @@ -12,12 +12,17 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", "loader", - "3d", - "mesh", - "point cloud", - "PLY" + "parser", + "writer", + "encoder", + "geoarrow", + "apache-arrow", + "arrow", + "binary columnar", + "cloud native", + "webgl", + "webgpu" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", @@ -50,6 +55,7 @@ "@loaders.gl/gis": "4.0.4", "@loaders.gl/loader-utils": "4.0.4", "@loaders.gl/schema": "4.0.4", + "@loaders.gl/wkt": "4.0.4", "@math.gl/polygon": "4.0.0", "apache-arrow": "^13.0.0" }, diff --git a/modules/arrow/src/geoarrow-loader.ts b/modules/arrow/src/geoarrow-loader.ts index a10ffaaeac..802c821e6a 100644 --- a/modules/arrow/src/geoarrow-loader.ts +++ b/modules/arrow/src/geoarrow-loader.ts @@ -8,10 +8,6 @@ import type {ArrowTable, ArrowTableBatch} from './lib/arrow-table'; import {parseGeoArrowSync} from './parsers/parse-geoarrow-sync'; import {parseGeoArrowInBatches} from './parsers/parse-geoarrow-in-batches'; -// __VERSION__ is injected by babel-plugin-version-inline -// @ts-ignore TS2304: Cannot find name '__VERSION__'. -const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; - export type GeoArrowLoaderOptions = LoaderOptions & { arrow?: { shape: 'arrow-table' | 'binary-geometry'; diff --git a/modules/arrow/src/geoarrow/convert-geoarrow-to-binary-geometry.ts b/modules/arrow/src/geoarrow/convert-geoarrow-to-binary-geometry.ts index 9692965614..d495947bae 100644 --- a/modules/arrow/src/geoarrow/convert-geoarrow-to-binary-geometry.ts +++ b/modules/arrow/src/geoarrow/convert-geoarrow-to-binary-geometry.ts @@ -8,6 +8,15 @@ import {GeoArrowEncoding} from '@loaders.gl/gis'; import {updateBoundsFromGeoArrowSamples} from './get-arrow-bounds'; import {TypedArray} from '@loaders.gl/loader-utils'; +/** + * Binary geometry type + */ +enum BinaryGeometryType { + points = 'points', + lines = 'lines', + polygons = 'polygons' +} + /** * Binary data from geoarrow column and can be used by e.g. deck.gl GeojsonLayer */ @@ -57,7 +66,9 @@ export type BinaryGeometriesFromArrowOptions = { /** option to specify which chunk to get binary geometries from, for progressive rendering */ chunkIndex?: number; /** option to get mean centers from geometries, for polygon filtering */ - meanCenter?: boolean; + calculateMeanCenters?: boolean; + /** option to compute the triangle indices by tesselating polygons */ + triangulate?: boolean; }; /** @@ -86,7 +97,7 @@ export function getBinaryGeometriesFromArrow( chunks.forEach((chunk) => { const {featureIds, flatCoordinateArray, nDim, geomOffset, triangles} = - getBinaryGeometriesFromChunk(chunk, geoEncoding); + getBinaryGeometriesFromChunk(chunk, geoEncoding, options); const globalFeatureIds = new Uint32Array(featureIds.length); for (let i = 0; i < featureIds.length; i++) { @@ -145,7 +156,7 @@ export function getBinaryGeometriesFromArrow( binaryGeometries, bounds, featureTypes, - ...(options?.meanCenter + ...(options?.calculateMeanCenters ? {meanCenters: getMeanCentersFromBinaryGeometries(binaryGeometries)} : {}) }; @@ -159,13 +170,13 @@ export function getBinaryGeometriesFromArrow( export function getMeanCentersFromBinaryGeometries(binaryGeometries: BinaryFeatures[]): number[][] { const globalMeanCenters: number[][] = []; binaryGeometries.forEach((binaryGeometry: BinaryFeatures) => { - let binaryGeometryType: string | null = null; + let binaryGeometryType: keyof typeof BinaryGeometryType | null = null; if (binaryGeometry.points && binaryGeometry.points.positions.value.length > 0) { - binaryGeometryType = 'points'; + binaryGeometryType = BinaryGeometryType.points; } else if (binaryGeometry.lines && binaryGeometry.lines.positions.value.length > 0) { - binaryGeometryType = 'lines'; + binaryGeometryType = BinaryGeometryType.lines; } else if (binaryGeometry.polygons && binaryGeometry.polygons.positions.value.length > 0) { - binaryGeometryType = 'polygons'; + binaryGeometryType = BinaryGeometryType.polygons; } const binaryContent = binaryGeometryType ? binaryGeometry[binaryGeometryType] : null; @@ -173,7 +184,8 @@ export function getMeanCentersFromBinaryGeometries(binaryGeometries: BinaryFeatu const featureIds = binaryContent.featureIds.value; const flatCoordinateArray = binaryContent.positions.value; const nDim = binaryContent.positions.size; - const primitivePolygonIndices = binaryContent.primitivePolygonIndices?.value; + const primitivePolygonIndices = + binaryContent.type === 'Polygon' ? binaryContent.primitivePolygonIndices?.value : undefined; const meanCenters = getMeanCentersFromGeometry( featureIds, @@ -201,30 +213,33 @@ function getMeanCentersFromGeometry( featureIds: TypedArray, flatCoordinateArray: TypedArray, nDim: number, - geometryType: string, + geometryType: keyof typeof BinaryGeometryType, primitivePolygonIndices?: TypedArray ) { const meanCenters: number[][] = []; const vertexCount = flatCoordinateArray.length; let vertexIndex = 0; + let coordIdx = 0; + let primitiveIdx = 0; while (vertexIndex < vertexCount) { const featureId = featureIds[vertexIndex / nDim]; const center = [0, 0]; let vertexCountInFeature = 0; - while (vertexIndex < vertexCount && featureIds[vertexIndex / nDim] === featureId) { + while (vertexIndex < vertexCount && featureIds[coordIdx] === featureId) { if ( - geometryType === 'polygons' && - primitivePolygonIndices && - primitivePolygonIndices.indexOf(vertexIndex / nDim) >= 0 + geometryType === BinaryGeometryType.polygons && + primitivePolygonIndices?.[primitiveIdx] === coordIdx ) { // skip the first point since it is the same as the last point in each ring for polygons vertexIndex += nDim; + primitiveIdx++; } else { center[0] += flatCoordinateArray[vertexIndex]; center[1] += flatCoordinateArray[vertexIndex + 1]; vertexIndex += nDim; vertexCountInFeature++; } + coordIdx += 1; } center[0] /= vertexCountInFeature; center[1] /= vertexCountInFeature; @@ -237,11 +252,13 @@ function getMeanCentersFromGeometry( * get binary geometries from geoarrow column * @param chunk one chunk/batch of geoarrow column * @param geoEncoding geo encoding of the geoarrow column + * @param options options for getting binary geometries * @returns BinaryGeometryContent */ function getBinaryGeometriesFromChunk( chunk: arrow.Data, - geoEncoding: GeoArrowEncoding + geoEncoding: GeoArrowEncoding, + options?: BinaryGeometriesFromArrowOptions ): BinaryGeometryContent { switch (geoEncoding) { case 'geoarrow.point': @@ -252,7 +269,7 @@ function getBinaryGeometriesFromChunk( return getBinaryLinesFromChunk(chunk, geoEncoding); case 'geoarrow.polygon': case 'geoarrow.multipolygon': - return getBinaryPolygonsFromChunk(chunk, geoEncoding); + return getBinaryPolygonsFromChunk(chunk, geoEncoding, options); default: throw Error('invalid geoarrow encoding'); } @@ -271,47 +288,62 @@ export function getTriangleIndices( primitivePolygonIndices: Int32Array, flatCoordinateArray: Float64Array, nDim: number -): Uint32Array { - let primitiveIndex = 0; - const triangles: number[] = []; - // loop polygonIndices to get triangles - for (let i = 0; i < polygonIndices.length - 1; i++) { - const startIdx = polygonIndices[i]; - const endIdx = polygonIndices[i + 1]; - // get subarray of flatCoordinateArray - const slicedFlatCoords = flatCoordinateArray.subarray(startIdx * nDim, endIdx * nDim); - // get holeIndices for earcut - const holeIndices: number[] = []; - while (primitivePolygonIndices[primitiveIndex] < endIdx) { - if (primitivePolygonIndices[primitiveIndex] > startIdx) { - holeIndices.push(primitivePolygonIndices[primitiveIndex] - startIdx); +): Uint32Array | null { + try { + let primitiveIndex = 0; + const triangles: number[] = []; + // loop polygonIndices to get triangles + for (let i = 0; i < polygonIndices.length - 1; i++) { + const startIdx = polygonIndices[i]; + const endIdx = polygonIndices[i + 1]; + // get subarray of flatCoordinateArray + const slicedFlatCoords = flatCoordinateArray.subarray(startIdx * nDim, endIdx * nDim); + // get holeIndices for earcut + const holeIndices: number[] = []; + while (primitivePolygonIndices[primitiveIndex] < endIdx) { + if (primitivePolygonIndices[primitiveIndex] > startIdx) { + holeIndices.push(primitivePolygonIndices[primitiveIndex] - startIdx); + } + primitiveIndex++; + } + const triangleIndices = earcut( + slicedFlatCoords, + holeIndices.length > 0 ? holeIndices : undefined, + nDim + ); + if (triangleIndices.length === 0) { + throw Error('can not tesselate invalid polygon'); + } + for (let j = 0; j < triangleIndices.length; j++) { + triangles.push(triangleIndices[j] + startIdx); } - primitiveIndex++; } - const triangleIndices = earcut( - slicedFlatCoords, - holeIndices.length > 0 ? holeIndices : undefined, - nDim - ); - for (let j = 0; j < triangleIndices.length; j++) { - triangles.push(triangleIndices[j] + startIdx); + // convert traingles to Uint32Array + const trianglesUint32 = new Uint32Array(triangles.length); + for (let i = 0; i < triangles.length; i++) { + trianglesUint32[i] = triangles[i]; } + return trianglesUint32; + } catch (error) { + // TODO - add logging + // there is an expection when tesselating invalid polygon, e.g. polygon with self-intersection + // return null to skip tesselating + return null; } - // convert traingles to Uint32Array - const trianglesUint32 = new Uint32Array(triangles.length); - for (let i = 0; i < triangles.length; i++) { - trianglesUint32[i] = triangles[i]; - } - return trianglesUint32; } /** * get binary polygons from geoarrow polygon column * @param chunk one chunk of geoarrow polygon column * @param geoEncoding the geo encoding of the geoarrow polygon column + * @param options options for getting binary geometries * @returns BinaryGeometryContent */ -function getBinaryPolygonsFromChunk(chunk: arrow.Data, geoEncoding: string): BinaryGeometryContent { +function getBinaryPolygonsFromChunk( + chunk: arrow.Data, + geoEncoding: string, + options?: BinaryGeometriesFromArrowOptions +): BinaryGeometryContent { const isMultiPolygon = geoEncoding === 'geoarrow.multipolygon'; const polygonData = isMultiPolygon ? chunk.children[0] : chunk; @@ -341,14 +373,17 @@ function getBinaryPolygonsFromChunk(chunk: arrow.Data, geoEncoding: string): Bin } } - const triangles = getTriangleIndices(geometryIndicies, geomOffset, flatCoordinateArray, nDim); + const triangles = options?.triangulate + ? getTriangleIndices(geometryIndicies, geomOffset, flatCoordinateArray, nDim) + : null; + return { featureIds, flatCoordinateArray, nDim, geomOffset, geometryIndicies, - triangles + ...(options?.triangulate && triangles ? {triangles} : {}) }; } diff --git a/modules/arrow/src/geoarrow/convert-geoarrow-to-geojson.ts b/modules/arrow/src/geoarrow/convert-geoarrow-to-geojson-geometry.ts similarity index 61% rename from modules/arrow/src/geoarrow/convert-geoarrow-to-geojson.ts rename to modules/arrow/src/geoarrow/convert-geoarrow-to-geojson-geometry.ts index bf48456acd..58f986ee34 100644 --- a/modules/arrow/src/geoarrow/convert-geoarrow-to-geojson.ts +++ b/modules/arrow/src/geoarrow/convert-geoarrow-to-geojson-geometry.ts @@ -1,81 +1,95 @@ // loaders.gl, MIT license // Copyright (c) vis.gl contributors -import * as arrow from 'apache-arrow'; +// import * as arrow from 'apache-arrow'; import { - Feature, MultiPolygon, Position, Polygon, MultiPoint, Point, MultiLineString, - LineString + LineString, + Geometry, + BinaryGeometry } from '@loaders.gl/schema'; -import type {GeoArrowEncoding} from '@loaders.gl/gis'; - -type RawArrowFeature = { - data: arrow.Vector; - encoding?: GeoArrowEncoding; -}; +import {binaryToGeometry, type GeoArrowEncoding} from '@loaders.gl/gis'; +import {WKBLoader, WKTLoader} from '@loaders.gl/wkt'; /** * parse geometry from arrow data that is returned from processArrowData() - * NOTE: this function could be duplicated with the binaryToFeature() in deck.gl, - * it is currently only used for picking because currently deck.gl returns only the index of the feature - * So the following functions could be deprecated once deck.gl returns the feature directly for binary geojson layer + * NOTE: this function could be deduplicated with the binaryToFeature() in deck.gl, + * it is currently used for deck.gl picking because currently deck.gl returns only the index of the feature * - * @param rawData the raw geometry data returned from processArrowData, which is an object with two properties: encoding and data - * @see processArrowData + * @param data data extraced from arrow vector representing a geometry + * @param encoding the geoarrow encoding of the geometry column * @returns Feature or null */ -export function parseGeometryFromArrow(rawData: RawArrowFeature): Feature | null { - const encoding = rawData.encoding?.toLowerCase() as typeof rawData.encoding; - const data = rawData.data; - if (!encoding || !data) { +export function parseGeometryFromArrow( + arrowCellValue: any, + encoding?: GeoArrowEncoding +): Geometry | null { + // sanity + encoding = encoding?.toLowerCase() as GeoArrowEncoding; + if (!encoding || !arrowCellValue) { return null; } - let geometry; + let geometry: Geometry; switch (encoding) { case 'geoarrow.multipolygon': - geometry = arrowMultiPolygonToFeature(data); + geometry = arrowMultiPolygonToFeature(arrowCellValue); break; case 'geoarrow.polygon': - geometry = arrowPolygonToFeature(data); + geometry = arrowPolygonToFeature(arrowCellValue); break; case 'geoarrow.multipoint': - geometry = arrowMultiPointToFeature(data); + geometry = arrowMultiPointToFeature(arrowCellValue); break; case 'geoarrow.point': - geometry = arrowPointToFeature(data); + geometry = arrowPointToFeature(arrowCellValue); break; case 'geoarrow.multilinestring': - geometry = arrowMultiLineStringToFeature(data); + geometry = arrowMultiLineStringToFeature(arrowCellValue); break; case 'geoarrow.linestring': - geometry = arrowLineStringToFeature(data); + geometry = arrowLineStringToFeature(arrowCellValue); break; case 'geoarrow.wkb': - throw Error(`GeoArrow encoding not supported ${encoding}`); + geometry = arrowWKBToFeature(arrowCellValue); + break; case 'geoarrow.wkt': - throw Error(`GeoArrow encoding not supported ${encoding}`); + geometry = arrowWKTToFeature(arrowCellValue); + break; default: { throw Error(`GeoArrow encoding not supported ${encoding}`); } } - return { - type: 'Feature', - geometry, - properties: {} - }; + + return geometry; +} + +function arrowWKBToFeature(arrowCellValue: any) { + // The actual WKB array buffer starts from byteOffset and ends at byteOffset + byteLength + const arrayBuffer: ArrayBuffer = arrowCellValue.buffer.slice( + arrowCellValue.byteOffset, + arrowCellValue.byteOffset + arrowCellValue.byteLength + ); + const binaryGeometry = WKBLoader.parseSync?.(arrayBuffer)! as BinaryGeometry; + const geometry = binaryToGeometry(binaryGeometry); + return geometry; +} + +function arrowWKTToFeature(arrowCellValue: any) { + const string: string = arrowCellValue; + return WKTLoader.parseTextSync?.(string)!; } /** * convert Arrow MultiPolygon to geojson Feature */ -function arrowMultiPolygonToFeature(arrowMultiPolygon: arrow.Vector): MultiPolygon { +function arrowMultiPolygonToFeature(arrowMultiPolygon: any): MultiPolygon { const multiPolygon: Position[][][] = []; for (let m = 0; m < arrowMultiPolygon.length; m++) { const arrowPolygon = arrowMultiPolygon.get(m); @@ -102,7 +116,7 @@ function arrowMultiPolygonToFeature(arrowMultiPolygon: arrow.Vector): MultiPolyg /** * convert Arrow Polygon to geojson Feature */ -function arrowPolygonToFeature(arrowPolygon: arrow.Vector): Polygon { +function arrowPolygonToFeature(arrowPolygon: any): Polygon { const polygon: Position[][] = []; for (let i = 0; arrowPolygon && i < arrowPolygon.length; i++) { const arrowRing = arrowPolygon.get(i); @@ -124,7 +138,7 @@ function arrowPolygonToFeature(arrowPolygon: arrow.Vector): Polygon { /** * convert Arrow MultiPoint to geojson MultiPoint */ -function arrowMultiPointToFeature(arrowMultiPoint: arrow.Vector): MultiPoint { +function arrowMultiPointToFeature(arrowMultiPoint: any): MultiPoint { const multiPoint: Position[] = []; for (let i = 0; arrowMultiPoint && i < arrowMultiPoint.length; i++) { const arrowPoint = arrowMultiPoint.get(i); @@ -133,29 +147,27 @@ function arrowMultiPointToFeature(arrowMultiPoint: arrow.Vector): MultiPoint { multiPoint.push(coord); } } - const geometry: MultiPoint = { + return { type: 'MultiPoint', coordinates: multiPoint }; - return geometry; } /** * convert Arrow Point to geojson Point */ -function arrowPointToFeature(arrowPoint: arrow.Vector): Point { +function arrowPointToFeature(arrowPoint: any): Point { const point: Position = Array.from(arrowPoint); - const geometry: Point = { + return { type: 'Point', coordinates: point }; - return geometry; } /** * convert Arrow MultiLineString to geojson MultiLineString */ -function arrowMultiLineStringToFeature(arrowMultiLineString: arrow.Vector): MultiLineString { +function arrowMultiLineStringToFeature(arrowMultiLineString: any): MultiLineString { const multiLineString: Position[][] = []; for (let i = 0; arrowMultiLineString && i < arrowMultiLineString.length; i++) { const arrowLineString = arrowMultiLineString.get(i); @@ -169,17 +181,16 @@ function arrowMultiLineStringToFeature(arrowMultiLineString: arrow.Vector): Mult } multiLineString.push(lineString); } - const geometry: MultiLineString = { + return { type: 'MultiLineString', coordinates: multiLineString }; - return geometry; } /** * convert Arrow LineString to geojson LineString */ -function arrowLineStringToFeature(arrowLineString: arrow.Vector): LineString { +function arrowLineStringToFeature(arrowLineString: any): LineString { const lineString: Position[] = []; for (let i = 0; arrowLineString && i < arrowLineString.length; i++) { const arrowCoord = arrowLineString.get(i); @@ -188,9 +199,8 @@ function arrowLineStringToFeature(arrowLineString: arrow.Vector): LineString { lineString.push(coords); } } - const geometry: LineString = { + return { type: 'LineString', coordinates: lineString }; - return geometry; } diff --git a/modules/arrow/src/index.ts b/modules/arrow/src/index.ts index 7ed054c861..f30799d123 100644 --- a/modules/arrow/src/index.ts +++ b/modules/arrow/src/index.ts @@ -60,7 +60,7 @@ export { export {updateBoundsFromGeoArrowSamples} from './geoarrow/get-arrow-bounds'; -export {parseGeometryFromArrow} from './geoarrow/convert-geoarrow-to-geojson'; +export {parseGeometryFromArrow} from './geoarrow/convert-geoarrow-to-geojson-geometry'; export {convertArrowToGeoJSONTable} from './tables/convert-arrow-to-geojson-table'; diff --git a/modules/arrow/src/parsers/parse-arrow-sync.ts b/modules/arrow/src/parsers/parse-arrow-sync.ts index f6a71f1419..43c585d88c 100644 --- a/modules/arrow/src/parsers/parse-arrow-sync.ts +++ b/modules/arrow/src/parsers/parse-arrow-sync.ts @@ -6,6 +6,7 @@ import type {ArrowTable} from '../lib/arrow-table'; import {convertTable} from '@loaders.gl/schema'; import * as arrow from 'apache-arrow'; import {convertArrowToColumnarTable} from '../tables/convert-arrow-to-columnar-table'; +import {serializeArrowSchema} from '../schema/convert-arrow-schema'; // Parses arrow to a columnar table export function parseArrowSync( @@ -13,7 +14,11 @@ export function parseArrowSync( options?: {shape?: 'arrow-table' | 'columnar-table' | 'object-row-table' | 'array-row-table'} ): ArrowTable | ColumnarTable | ObjectRowTable | ArrayRowTable { const apacheArrowTable = arrow.tableFromIPC([new Uint8Array(arrayBuffer)]); - const arrowTable: ArrowTable = {shape: 'arrow-table', data: apacheArrowTable}; + const arrowTable: ArrowTable = { + shape: 'arrow-table', + schema: serializeArrowSchema(apacheArrowTable.schema), + data: apacheArrowTable + }; const shape = options?.shape || 'arrow-table'; switch (shape) { diff --git a/modules/arrow/src/tables/convert-arrow-to-columnar-table.ts b/modules/arrow/src/tables/convert-arrow-to-columnar-table.ts index c9d609765c..f52e269ac0 100644 --- a/modules/arrow/src/tables/convert-arrow-to-columnar-table.ts +++ b/modules/arrow/src/tables/convert-arrow-to-columnar-table.ts @@ -24,6 +24,7 @@ export function convertArrowToColumnarTable(table: ArrowTable): ColumnarTable { return { shape: 'columnar-table', + schema: table.schema, data: columnarTable }; } diff --git a/modules/arrow/src/tables/convert-arrow-to-geojson-table.ts b/modules/arrow/src/tables/convert-arrow-to-geojson-table.ts index 41a9170688..7fdf8adb43 100644 --- a/modules/arrow/src/tables/convert-arrow-to-geojson-table.ts +++ b/modules/arrow/src/tables/convert-arrow-to-geojson-table.ts @@ -2,7 +2,7 @@ // Copyright (c) vis.gl contributors import type {Feature, GeoJSONTable} from '@loaders.gl/schema'; -import type * as arrow from 'apache-arrow'; +import * as arrow from 'apache-arrow'; import type {ArrowTable} from '../lib/arrow-table'; import {serializeArrowSchema, parseGeometryFromArrow} from '@loaders.gl/arrow'; import {getGeometryColumnsFromSchema} from '@loaders.gl/gis'; @@ -16,6 +16,7 @@ import {getGeometryColumnsFromSchema} from '@loaders.gl/gis'; export function convertApacheArrowToArrowTable(arrowTable: arrow.Table): ArrowTable { return { shape: 'arrow-table', + schema: serializeArrowSchema(arrowTable.schema), data: arrowTable }; } @@ -34,21 +35,31 @@ export function convertArrowToGeoJSONTable(table: ArrowTable): GeoJSONTable { const features: Feature[] = []; - for (let row = 0; row < arrowTable.numRows; row++) { - // get first geometry from arrow geometry column - const arrowGeometry = arrowTable.getChild('geometry')?.get(row); - const arrowGeometryObject = {encoding, data: arrowGeometry}; + // Remove geometry columns + const propertyColumnNames = arrowTable.schema.fields + .map((field) => field.name) + // TODO - this deletes all geometry columns + .filter((name) => !(name in geometryColumns)); + const propertiesTable = arrowTable.select(propertyColumnNames); + + const arrowGeometryColumn = arrowTable.getChild('geometry'); + for (let row = 0; row < arrowTable.numRows; row++) { + // get the geometry value from arrow geometry column + // Note that type can vary + const arrowGeometry = arrowGeometryColumn?.get(row); // parse arrow geometry to geojson feature - const feature = parseGeometryFromArrow(arrowGeometryObject); + const feature = parseGeometryFromArrow(arrowGeometry, encoding); if (feature) { - features.push(feature); + const properties = propertiesTable.get(row)?.toJSON() || {}; + features.push({type: 'Feature', geometry: feature, properties}); } } return { shape: 'geojson-table', type: 'FeatureCollection', + schema: table.schema, features }; } diff --git a/modules/arrow/src/tables/convert-columnar-to-row-table.ts b/modules/arrow/src/tables/convert-columnar-to-row-table.ts index f0d0fb841f..ad49de2c27 100644 --- a/modules/arrow/src/tables/convert-columnar-to-row-table.ts +++ b/modules/arrow/src/tables/convert-columnar-to-row-table.ts @@ -24,6 +24,7 @@ export function convertColumnarToRowFormatTable(columnarTable: ColumnarTable): O return { shape: 'object-row-table', + schema: columnarTable.schema, data: rowFormatTable }; } diff --git a/modules/arrow/src/triangulate-on-worker.ts b/modules/arrow/src/triangulate-on-worker.ts index 34fdc24f7f..3d8ce90850 100644 --- a/modules/arrow/src/triangulate-on-worker.ts +++ b/modules/arrow/src/triangulate-on-worker.ts @@ -22,7 +22,7 @@ export type TriangulateInput = { /** Result type for operation: 'triangulate' */ export type TriangulateResult = TriangulateInput & { - triangleIndices: Uint32Array; + triangleIndices?: Uint32Array; }; /** diff --git a/modules/arrow/src/workers/triangulation-worker.ts b/modules/arrow/src/workers/triangulation-worker.ts index f02c7d12d0..8e5972bbe6 100644 --- a/modules/arrow/src/workers/triangulation-worker.ts +++ b/modules/arrow/src/workers/triangulation-worker.ts @@ -35,5 +35,5 @@ function triangulateBatch(data: TriangulateInput): TriangulateResult { data.flatCoordinateArray, data.nDim ); - return {...data, triangleIndices}; + return {...data, ...(triangleIndices ? {triangleIndices} : {})}; } diff --git a/modules/arrow/test/data/README.md b/modules/arrow/test/data/README.md index 9b46113646..2d7b0f1835 100644 --- a/modules/arrow/test/data/README.md +++ b/modules/arrow/test/data/README.md @@ -3,3 +3,21 @@ - `dictionary.arrow`, `simple.arrow`, `struct.arrow` - Apache 2 License (copied from https://github.com/wesm/arrow-1) - `biogrid-nodes.arrow` - from graphistry. + + +## geoarrow + +```sh +ogr2ogr point_wkb.arrow point.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKB +ogr2ogr line_wkb.arrow line.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKB +ogr2ogr polygon_wkb.arrow polygon.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKB +ogr2ogr multipolygon_wkb.arrow multipolygon.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKB +ogr2ogr multipolygon_hole_wkb.arrow multipolygon_hole.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKB + + +ogr2ogr point_wkt.arrow point.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKT +ogr2ogr line_wkt.arrow line.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKT +ogr2ogr polygon_wkt.arrow polygon.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKT +ogr2ogr multipolygon_wkt.arrow multipolygon.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKT +ogr2ogr multipolygon_hole_wkt.arrow multipolygon_hole.arrow -f Arrow -lco COMPRESSION=NONE -lco GEOMETRY_ENCODING=WKT +``` \ No newline at end of file diff --git a/modules/arrow/test/data/geoarrow/line_wkb.arrow b/modules/arrow/test/data/geoarrow/line_wkb.arrow new file mode 100644 index 0000000000..e5897d8c98 Binary files /dev/null and b/modules/arrow/test/data/geoarrow/line_wkb.arrow differ diff --git a/modules/arrow/test/data/geoarrow/line_wkt.arrow b/modules/arrow/test/data/geoarrow/line_wkt.arrow new file mode 100644 index 0000000000..02ea449eb1 Binary files /dev/null and b/modules/arrow/test/data/geoarrow/line_wkt.arrow differ diff --git a/modules/arrow/test/data/geoarrow/multipolygon_hole_wkb.arrow b/modules/arrow/test/data/geoarrow/multipolygon_hole_wkb.arrow new file mode 100644 index 0000000000..9b84c7e452 Binary files /dev/null and b/modules/arrow/test/data/geoarrow/multipolygon_hole_wkb.arrow differ diff --git a/modules/arrow/test/data/geoarrow/multipolygon_hole_wkt.arrow b/modules/arrow/test/data/geoarrow/multipolygon_hole_wkt.arrow new file mode 100644 index 0000000000..43b7c7afb2 Binary files /dev/null and b/modules/arrow/test/data/geoarrow/multipolygon_hole_wkt.arrow differ diff --git a/modules/arrow/test/data/geoarrow/multipolygon_wkb.arrow b/modules/arrow/test/data/geoarrow/multipolygon_wkb.arrow new file mode 100644 index 0000000000..f74a9305ae Binary files /dev/null and b/modules/arrow/test/data/geoarrow/multipolygon_wkb.arrow differ diff --git a/modules/arrow/test/data/geoarrow/multipolygon_wkt.arrow b/modules/arrow/test/data/geoarrow/multipolygon_wkt.arrow new file mode 100644 index 0000000000..ac76c5fd71 Binary files /dev/null and b/modules/arrow/test/data/geoarrow/multipolygon_wkt.arrow differ diff --git a/modules/arrow/test/data/geoarrow/point_wkb.arrow b/modules/arrow/test/data/geoarrow/point_wkb.arrow new file mode 100644 index 0000000000..7201045f9a Binary files /dev/null and b/modules/arrow/test/data/geoarrow/point_wkb.arrow differ diff --git a/modules/arrow/test/data/geoarrow/point_wkt.arrow b/modules/arrow/test/data/geoarrow/point_wkt.arrow new file mode 100644 index 0000000000..09423d6d58 Binary files /dev/null and b/modules/arrow/test/data/geoarrow/point_wkt.arrow differ diff --git a/modules/arrow/test/data/geoarrow/polygon_wkb.arrow b/modules/arrow/test/data/geoarrow/polygon_wkb.arrow new file mode 100644 index 0000000000..c6f9941dad Binary files /dev/null and b/modules/arrow/test/data/geoarrow/polygon_wkb.arrow differ diff --git a/modules/arrow/test/data/geoarrow/polygon_wkt.arrow b/modules/arrow/test/data/geoarrow/polygon_wkt.arrow new file mode 100644 index 0000000000..48d4db9910 Binary files /dev/null and b/modules/arrow/test/data/geoarrow/polygon_wkt.arrow differ diff --git a/modules/arrow/test/data/geoarrow/test-cases.ts b/modules/arrow/test/data/geoarrow/test-cases.ts index 6c3b1f52b6..93f6778a3e 100644 --- a/modules/arrow/test/data/geoarrow/test-cases.ts +++ b/modules/arrow/test/data/geoarrow/test-cases.ts @@ -1,14 +1,34 @@ import {FeatureCollection} from '@loaders.gl/schema'; -export const POINT_ARROW_FILE = '@loaders.gl/arrow/test/data/geoarrow/point.arrow'; -export const MULTIPOINT_ARROW_FILE = '@loaders.gl/arrow/test/data/geoarrow/multipoint.arrow'; -export const LINE_ARROW_FILE = '@loaders.gl/arrow/test/data/geoarrow/line.arrow'; -export const MULTILINE_ARROW_FILE = '@loaders.gl/arrow/test/data/geoarrow/multiline.arrow'; -export const POLYGON_ARROW_FILE = '@loaders.gl/arrow/test/data/geoarrow/polygon.arrow'; -export const MULTIPOLYGON_ARROW_FILE = '@loaders.gl/arrow/test/data/geoarrow/multipolygon.arrow'; -export const MULTIPOLYGON_HOLE_ARROW_FILE = +export const GEOARROW_POINT_FILE = '@loaders.gl/arrow/test/data/geoarrow/point.arrow'; +export const GEOARROW_MULTIPOINT_FILE = '@loaders.gl/arrow/test/data/geoarrow/multipoint.arrow'; +export const GEOARROW_LINE_FILE = '@loaders.gl/arrow/test/data/geoarrow/line.arrow'; +export const GEOARROW_MULTILINE_FILE = '@loaders.gl/arrow/test/data/geoarrow/multiline.arrow'; +export const GEOARROW_POLYGON_FILE = '@loaders.gl/arrow/test/data/geoarrow/polygon.arrow'; +export const GEOARROW_MULTIPOLYGON_FILE = '@loaders.gl/arrow/test/data/geoarrow/multipolygon.arrow'; +export const GEOARROW_MULTIPOLYGON_HOLE_FILE = '@loaders.gl/arrow/test/data/geoarrow/multipolygon_hole.arrow'; +export const GEOARROW_POINT_WKB_FILE = '@loaders.gl/arrow/test/data/geoarrow/point_wkb.arrow'; +// export const GEOARROW_MULTIPOINT_FILE = '@loaders.gl/arrow/test/data/geoarrow/multipoint_wkb.arrow'; +export const GEOARROW_LINE_WKB_FILE = '@loaders.gl/arrow/test/data/geoarrow/line_wkb.arrow'; +// export const GEOARROW_MULTILINE_FILE = '@loaders.gl/arrow/test/data/geoarrow/multiline_wkb.arrow'; +export const GEOARROW_POLYGON_WKB_FILE = '@loaders.gl/arrow/test/data/geoarrow/polygon_wkb.arrow'; +export const GEOARROW_MULTIPOLYGON_WKB_FILE = + '@loaders.gl/arrow/test/data/geoarrow/multipolygon_wkb.arrow'; +export const GEOARROW_MULTIPOLYGON_HOLE_WKB_FILE = + '@loaders.gl/arrow/test/data/geoarrow/multipolygon_hole_wkb.arrow'; + +export const GEOARROW_POINT_WKT_FILE = '@loaders.gl/arrow/test/data/geoarrow/point_wkt.arrow'; +// export const GEOARROW_MULTIPOINT_FILE = '@loaders.gl/arrow/test/data/geoarrow/multipoint_wkt.arrow'; +export const GEOARROW_LINE_WKT_FILE = '@loaders.gl/arrow/test/data/geoarrow/line_wkt.arrow'; +// export const GEOARROW_MULTILINE_FILE = '@loaders.gl/arrow/test/data/geoarrow/multiline_wkt.arrow'; +export const GEOARROW_POLYGON_WKT_FILE = '@loaders.gl/arrow/test/data/geoarrow/polygon_wkt.arrow'; +export const GEOARROW_MULTIPOLYGON_WKT_FILE = + '@loaders.gl/arrow/test/data/geoarrow/multipolygon_wkt.arrow'; +export const GEOARROW_MULTIPOLYGON_HOLE_WKT_FILE = + '@loaders.gl/arrow/test/data/geoarrow/multipolygon_hole_wkt.arrow'; + /** Array containing all encodings */ export const GEOARROW_ENCODINGS = [ 'geoarrow.multipolygon', @@ -331,11 +351,27 @@ export const expectedMultiPolygonWithHoleGeojson: FeatureCollection = { }; export const GEOARROW_TEST_CASES: [string, FeatureCollection][] = [ - [MULTIPOLYGON_HOLE_ARROW_FILE, expectedMultiPolygonWithHoleGeojson], - [POINT_ARROW_FILE, expectedPointGeojson], - [MULTIPOINT_ARROW_FILE, expectedMultiPointGeoJson], - [LINE_ARROW_FILE, expectedLineStringGeoJson], - [MULTILINE_ARROW_FILE, expectedMultiLineStringGeoJson], - [POLYGON_ARROW_FILE, expectedPolygonGeojson], - [MULTIPOLYGON_ARROW_FILE, expectedMultiPolygonGeojson] + [GEOARROW_POINT_FILE, expectedPointGeojson], + [GEOARROW_MULTIPOINT_FILE, expectedMultiPointGeoJson], + [GEOARROW_LINE_FILE, expectedLineStringGeoJson], + [GEOARROW_MULTILINE_FILE, expectedMultiLineStringGeoJson], + [GEOARROW_POLYGON_FILE, expectedPolygonGeojson], + [GEOARROW_MULTIPOLYGON_FILE, expectedMultiPolygonGeojson], + [GEOARROW_MULTIPOLYGON_HOLE_FILE, expectedMultiPolygonWithHoleGeojson], + + [GEOARROW_POINT_WKT_FILE, expectedPointGeojson], + // [GEOARROW_MULTIPOINT_WKT_FILE, expectedMultiPointGeoJson], + [GEOARROW_LINE_WKT_FILE, expectedLineStringGeoJson], + // [GEOARROW_MULTILINE_WKT_FILE, expectedMultiLineStringGeoJson], + [GEOARROW_POLYGON_WKT_FILE, expectedPolygonGeojson], + [GEOARROW_MULTIPOLYGON_WKT_FILE, expectedMultiPolygonGeojson], + [GEOARROW_MULTIPOLYGON_HOLE_WKT_FILE, expectedMultiPolygonWithHoleGeojson], + + [GEOARROW_POINT_WKB_FILE, expectedPointGeojson], + // [GEOARROW_MULTIPOINT_WKB_FILE, expectedMultiPointGeoJson], + [GEOARROW_LINE_WKB_FILE, expectedLineStringGeoJson], + // [GEOARROW_MULTILINE_WKB_FILE, expectedMultiLineStringGeoJson], + [GEOARROW_POLYGON_WKB_FILE, expectedPolygonGeojson], + [GEOARROW_MULTIPOLYGON_WKB_FILE, expectedMultiPolygonGeojson], + [GEOARROW_MULTIPOLYGON_HOLE_WKB_FILE, expectedMultiPolygonWithHoleGeojson] ]; diff --git a/modules/arrow/test/geoarrow/convert-geoarrow-to-binary-geometry.spec.ts b/modules/arrow/test/geoarrow/convert-geoarrow-to-binary-geometry.spec.ts index 3a008472dc..2c87dc2a93 100644 --- a/modules/arrow/test/geoarrow/convert-geoarrow-to-binary-geometry.spec.ts +++ b/modules/arrow/test/geoarrow/convert-geoarrow-to-binary-geometry.spec.ts @@ -4,7 +4,7 @@ import test, {Test} from 'tape-promise/tape'; import {getGeometryColumnsFromSchema} from '@loaders.gl/gis'; -import {fetchFile, parse} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import { BINARY_GEOMETRY_TEMPLATE, ArrowLoader, @@ -13,13 +13,13 @@ import { } from '@loaders.gl/arrow'; import { - POINT_ARROW_FILE, - MULTIPOINT_ARROW_FILE, - LINE_ARROW_FILE, - MULTILINE_ARROW_FILE, - POLYGON_ARROW_FILE, - MULTIPOLYGON_ARROW_FILE, - MULTIPOLYGON_HOLE_ARROW_FILE + GEOARROW_POINT_FILE, + GEOARROW_MULTIPOINT_FILE, + GEOARROW_LINE_FILE, + GEOARROW_MULTILINE_FILE, + GEOARROW_POLYGON_FILE, + GEOARROW_MULTIPOLYGON_FILE, + GEOARROW_MULTIPOLYGON_HOLE_FILE } from '../data/geoarrow/test-cases'; const expectedPointBinaryGeometry = { @@ -299,20 +299,20 @@ const expectedMultiPolygonHolesBinaryGeometry = { ] }; -test('ArrowUtils#getBinaryGeometriesFromArrow', (t) => { +test('ArrowUtils#getBinaryGeometriesFromArrow', async (t) => { const testCases = [ - [POINT_ARROW_FILE, expectedPointBinaryGeometry], - [MULTIPOINT_ARROW_FILE, expectedMultiPointBinaryGeometry], - [LINE_ARROW_FILE, expectedLineBinaryGeometry], - [MULTILINE_ARROW_FILE, expectedMultiLineBinaryGeometry], - [POLYGON_ARROW_FILE, expectedPolygonBinaryGeometry], - [MULTIPOLYGON_ARROW_FILE, expectedMultiPolygonBinaryGeometry], - [MULTIPOLYGON_HOLE_ARROW_FILE, expectedMultiPolygonHolesBinaryGeometry] + [GEOARROW_POINT_FILE, expectedPointBinaryGeometry], + [GEOARROW_MULTIPOINT_FILE, expectedMultiPointBinaryGeometry], + [GEOARROW_LINE_FILE, expectedLineBinaryGeometry], + [GEOARROW_MULTILINE_FILE, expectedMultiLineBinaryGeometry], + [GEOARROW_POLYGON_FILE, expectedPolygonBinaryGeometry], + [GEOARROW_MULTIPOLYGON_FILE, expectedMultiPolygonBinaryGeometry], + [GEOARROW_MULTIPOLYGON_HOLE_FILE, expectedMultiPolygonHolesBinaryGeometry] ]; - testCases.forEach((testCase) => { - testGetBinaryGeometriesFromArrow(t, testCase[0], testCase[1]); - }); + for (const testCase of testCases) { + await testGetBinaryGeometriesFromArrow(t, testCase[0], testCase[1]); + } t.end(); }); @@ -322,7 +322,7 @@ async function testGetBinaryGeometriesFromArrow( arrowFile, expectedBinaryGeometries ): Promise { - const arrowTable = await parse(fetchFile(arrowFile), ArrowLoader, { + const arrowTable = await load(arrowFile, ArrowLoader, { worker: false, arrow: { shape: 'arrow-table' @@ -342,7 +342,7 @@ async function testGetBinaryGeometriesFromArrow( t.notEqual(encoding, undefined, 'encoding is not undefined'); if (geoColumn && encoding) { - const options = {meanCenter: true}; + const options = {calculateMeanCenters: true, triangulate: true}; const binaryData = getBinaryGeometriesFromArrow(geoColumn, encoding, options); t.deepEqual(binaryData, expectedBinaryGeometries, 'binary geometries are correct'); } diff --git a/modules/arrow/test/geoarrow/convert-geoarrow-to-geojson.spec.ts b/modules/arrow/test/geoarrow/convert-geoarrow-to-geojson.spec.ts index a39dc71fa7..cc4b2aa878 100644 --- a/modules/arrow/test/geoarrow/convert-geoarrow-to-geojson.spec.ts +++ b/modules/arrow/test/geoarrow/convert-geoarrow-to-geojson.spec.ts @@ -4,14 +4,14 @@ import test, {Test} from 'tape-promise/tape'; import {GEOARROW_TEST_CASES, GEOARROW_ENCODINGS} from '../data/geoarrow/test-cases'; -import {fetchFile, parse} from '@loaders.gl/core'; +import {load} from '@loaders.gl/core'; import {FeatureCollection} from '@loaders.gl/schema'; import {ArrowLoader, serializeArrowSchema, parseGeometryFromArrow} from '@loaders.gl/arrow'; import {getGeometryColumnsFromSchema} from '@loaders.gl/gis'; -test('ArrowUtils#parseGeometryFromArrow', (t) => { +test('ArrowUtils#parseGeometryFromArrow', async (t) => { for (const testCase of GEOARROW_TEST_CASES) { - testParseFromArrow(t, testCase[0], testCase[1]); + await testParseFromArrow(t, testCase[0], testCase[1]); } t.end(); }); @@ -21,7 +21,7 @@ async function testParseFromArrow( arrowFile: string, expectedGeojson: FeatureCollection ): Promise { - const arrowTable = await parse(fetchFile(arrowFile), ArrowLoader, { + const arrowTable = await load(arrowFile, ArrowLoader, { worker: false, arrow: { shape: 'arrow-table' @@ -54,27 +54,20 @@ async function testParseFromArrow( t.equal(Boolean(geometryColumns.geometry), true, 'geometryColumns has geometry column'); // get encoding from geometryColumns['geometry'] - const encoding = geometryColumns.geometry.encoding; + const encoding = geometryColumns.geometry.encoding!; // check encoding is one of GEOARROW_ENCODINGS - t.ok( - Object.values(GEOARROW_ENCODINGS).includes(encoding!), - 'encoding is one of GEOARROW_ENCODINGS' - ); + t.ok(Object.values(GEOARROW_ENCODINGS).includes(encoding), 'valid GeoArrow encoding'); // get first geometry from arrow geometry column const firstArrowGeometry = table.getChild('geometry')?.get(0); - const firstArrowGeometryObject = { - encoding, - data: firstArrowGeometry - }; // parse arrow geometry to geojson feature - const firstFeature = parseGeometryFromArrow(firstArrowGeometryObject); + const firstGeometry = parseGeometryFromArrow(firstArrowGeometry, encoding); // check if geometry in firstFeature is equal to the original geometry in expectedPointGeojson t.deepEqual( - firstFeature?.geometry, + firstGeometry, expectedGeojson.features[0].geometry, 'firstFeature.geometry is equal to expectedGeojson.features[0].geometry' ); diff --git a/modules/arrow/test/tables/convert-arrow-to-geojson-table.spec.ts b/modules/arrow/test/tables/convert-arrow-to-geojson-table.spec.ts index 55cacafe52..cc92d008a3 100644 --- a/modules/arrow/test/tables/convert-arrow-to-geojson-table.spec.ts +++ b/modules/arrow/test/tables/convert-arrow-to-geojson-table.spec.ts @@ -8,9 +8,9 @@ import {fetchFile, parse} from '@loaders.gl/core'; import {FeatureCollection} from '@loaders.gl/schema'; import {GeoArrowLoader} from '@loaders.gl/arrow'; -test('ArrowLoader#geojson-table', (t) => { +test('ArrowLoader#shape:geojson-table', async (t) => { for (const testCase of GEOARROW_TEST_CASES) { - testConversion(t, testCase[0], testCase[1]); + await testConversion(t, testCase[0], testCase[1]); } t.end(); }); diff --git a/modules/arrow/tsconfig.json b/modules/arrow/tsconfig.json index 003021f3af..828703834a 100644 --- a/modules/arrow/tsconfig.json +++ b/modules/arrow/tsconfig.json @@ -11,6 +11,7 @@ {"path": "../core"}, {"path": "../gis"}, {"path": "../loader-utils"}, - {"path": "../schema"} + {"path": "../schema"}, + {"path": "../wkt"} ] } diff --git a/modules/bson/package.json b/modules/bson/package.json index 02d003569a..dd3332b991 100644 --- a/modules/bson/package.json +++ b/modules/bson/package.json @@ -12,14 +12,11 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", - "loader", - "parser", + "BSON", + "Binary JSON", "table", - "JSON", - "Streaming JSON", - "JSON stream", - "JSON async iterator" + "loader", + "parser" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/compression/package.json b/modules/compression/package.json index 8721c81221..c5e1719dc9 100644 --- a/modules/compression/package.json +++ b/modules/compression/package.json @@ -12,11 +12,14 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", - "loader", - "3d", - "mesh", - "point cloud" + "compression", + "decompression", + "zip", + "gzip", + "deflate", + "lz4", + "zstd", + "brotli" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/core/package.json b/modules/core/package.json index 111a3c5211..382dc12890 100644 --- a/modules/core/package.json +++ b/modules/core/package.json @@ -12,11 +12,17 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", "loader", + "parser", + "writer", + "encoder", + "big data", + "table", "3d", "mesh", - "point cloud" + "point cloud", + "webgl", + "webgpu" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/crypto/package.json b/modules/crypto/package.json index 20d09b052b..4a35ad474c 100644 --- a/modules/crypto/package.json +++ b/modules/crypto/package.json @@ -12,11 +12,15 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", - "loader", - "3d", - "mesh", - "point cloud" + "crypto", + "hash", + "digest", + "encrypt", + "decrypt", + "crc32", + "crc32c", + "md5", + "sha256" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/crypto/test/crypto.spec.ts b/modules/crypto/test/crypto.spec.ts index e60d5be2e4..f362839678 100644 --- a/modules/crypto/test/crypto.spec.ts +++ b/modules/crypto/test/crypto.spec.ts @@ -82,7 +82,7 @@ test('crypto#streaming hashes', async (t) => { }); // @ts-ignore - // eslint-disable-next-line no-unused-vars, no-empty, max-depth + // eslint-disable-next-line no-unused-vars, no-empty, max-depth, @typescript-eslint/no-unused-vars for await (const batch of nullIterator) { } diff --git a/modules/csv/package.json b/modules/csv/package.json index ebbebca331..5bc74f30e2 100644 --- a/modules/csv/package.json +++ b/modules/csv/package.json @@ -12,10 +12,15 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", "loader", + "parser", + "writer", + "encoding", + "streaming", "table", - "CSV" + "CSV", + "streaming CSV", + "TSV" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/draco/package.json b/modules/draco/package.json index 5600ba6904..43048f0dfe 100644 --- a/modules/draco/package.json +++ b/modules/draco/package.json @@ -12,13 +12,19 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", "loader", + "parser", + "writer", + "encoder", "3d", "mesh", "point cloud", "draco3d", - "draco" + "draco", + "geometry", + "geometry compression", + "webgl", + "webgpu" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/draco/src/draco-loader.ts b/modules/draco/src/draco-loader.ts index d4c514f701..7b5317c828 100644 --- a/modules/draco/src/draco-loader.ts +++ b/modules/draco/src/draco-loader.ts @@ -1,5 +1,6 @@ // loaders.gl, MIT license // Copyright (c) vis.gl contributors + import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils'; import type {DracoMesh} from './lib/draco-types'; import type {DracoParseOptions} from './lib/draco-parser'; @@ -15,15 +16,6 @@ export type DracoLoaderOptions = LoaderOptions & { }; }; -const DEFAULT_DRACO_OPTIONS: DracoLoaderOptions = { - draco: { - decoderType: typeof WebAssembly === 'object' ? 'wasm' : 'js', // 'js' for IE11 - libraryPath: 'libs/', - extraAttributes: {}, - attributeNameEntry: undefined - } -}; - /** * Worker loader for Draco3D compressed geometries */ @@ -38,5 +30,12 @@ export const DracoLoader: Loader = { mimeTypes: ['application/octet-stream'], binary: true, tests: ['DRACO'], - options: DEFAULT_DRACO_OPTIONS + options: { + draco: { + decoderType: typeof WebAssembly === 'object' ? 'wasm' : 'js', // 'js' for IE11 + libraryPath: 'libs/', + extraAttributes: {}, + attributeNameEntry: undefined + } + } }; diff --git a/modules/excel/package.json b/modules/excel/package.json index 6ce5538138..f1c53a404f 100644 --- a/modules/excel/package.json +++ b/modules/excel/package.json @@ -18,8 +18,8 @@ "table", "Excel", "Sheets", - "Worksheets", - "Spreadsheets" + "Worksheet", + "Spreadsheet" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/flatgeobuf/package.json b/modules/flatgeobuf/package.json index 8ee660606e..b1b41373f4 100644 --- a/modules/flatgeobuf/package.json +++ b/modules/flatgeobuf/package.json @@ -15,8 +15,11 @@ "geometry", "loader", "parser", - "MVT", - "Mapbox Vector Tiles" + "FGB", + "flatgeobuf", + "cloud optimized", + "geospatial", + "table" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/flatgeobuf/src/flatgeobuf-loader.ts b/modules/flatgeobuf/src/flatgeobuf-loader.ts index f436797c93..73103b73ae 100644 --- a/modules/flatgeobuf/src/flatgeobuf-loader.ts +++ b/modules/flatgeobuf/src/flatgeobuf-loader.ts @@ -1,9 +1,16 @@ -import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils'; +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + +import type {Loader, LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils'; +import {parseFlatGeobuf, parseFlatGeobufInBatches} from './lib/parse-flatgeobuf'; // __VERSION__ is injected by babel-plugin-version-inline // @ts-ignore TS2304: Cannot find name '__VERSION__'. const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; +// FGB\3FGB\1 +const FGB_MAGIC_NUMBER = [0x66, 0x67, 0x62, 0x03, 0x66, 0x67, 0x62, 0x01]; + export type FlatGeobufLoaderOptions = LoaderOptions & { flatgeobuf?: { shape?: 'geojson-table' | 'columnar-table' | 'binary'; @@ -14,7 +21,7 @@ export type FlatGeobufLoaderOptions = LoaderOptions & { }; }; -export const FlatGeobufLoader: Loader = { +export const FlatGeobufWorkerLoader: Loader = { id: 'flatgeobuf', name: 'FlatGeobuf', module: 'flatgeobuf', @@ -23,6 +30,7 @@ export const FlatGeobufLoader: Loader = { extensions: ['fgb'], mimeTypes: ['application/octet-stream'], category: 'geometry', + tests: [new Uint8Array(FGB_MAGIC_NUMBER).buffer], options: { flatgeobuf: { shape: 'geojson-table' @@ -32,3 +40,12 @@ export const FlatGeobufLoader: Loader = { } } }; + +export const FlatGeobufLoader: LoaderWithParser = { + ...FlatGeobufWorkerLoader, + parse: async (arrayBuffer, options) => parseFlatGeobuf(arrayBuffer, options), + parseSync: parseFlatGeobuf, + // @ts-expect-error this is a stream parser not an async iterator parser + parseInBatchesFromStream: parseFlatGeobufInBatches, + binary: true +}; diff --git a/modules/flatgeobuf/src/index.ts b/modules/flatgeobuf/src/index.ts index 5b60bc00a5..77ed46937d 100644 --- a/modules/flatgeobuf/src/index.ts +++ b/modules/flatgeobuf/src/index.ts @@ -1,15 +1,5 @@ -import type {LoaderWithParser} from '@loaders.gl/loader-utils'; -import type {FlatGeobufLoaderOptions} from './flatgeobuf-loader'; -import {FlatGeobufLoader as FlatGeobufWorkerLoader} from './flatgeobuf-loader'; -import {parseFlatGeobuf, parseFlatGeobufInBatches} from './lib/parse-flatgeobuf'; +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors -export {FlatGeobufWorkerLoader}; - -export const FlatGeobufLoader: LoaderWithParser = { - ...FlatGeobufWorkerLoader, - parse: async (arrayBuffer, options) => parseFlatGeobuf(arrayBuffer, options), - parseSync: parseFlatGeobuf, - // @ts-expect-error this is a stream parser not an async iterator parser - parseInBatchesFromStream: parseFlatGeobufInBatches, - binary: true -}; +export type {FlatGeobufLoaderOptions} from './flatgeobuf-loader'; +export {FlatGeobufLoader, FlatGeobufWorkerLoader} from './flatgeobuf-loader'; diff --git a/modules/flatgeobuf/src/lib/get-schema-from-fgb-header.ts b/modules/flatgeobuf/src/lib/get-schema-from-fgb-header.ts new file mode 100644 index 0000000000..d189e2395c --- /dev/null +++ b/modules/flatgeobuf/src/lib/get-schema-from-fgb-header.ts @@ -0,0 +1,121 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + +import type {Schema, Field, DataType} from '@loaders.gl/schema'; +import * as fgb from 'flatgeobuf'; + +/** + * @param fgbHeader + * geometryType: GeometryType; + * columns: ColumnMeta[] | null; + * envelope: Float64Array | null; + * featuresCount: number; + * indexNodeSize: number; + * crs: CrsMeta | null; + * title: string | null; + * description: string | null; + * metadata: string | null; + */ +export function getSchemaFromFGBHeader(fgbHeader: fgb.HeaderMeta): Schema { + const metadata: Record = { + title: fgbHeader.title || '', + description: fgbHeader.description || '', + crs: JSON.stringify(fgbHeader.crs) || '', + metadata: fgbHeader.metadata || '', + geometryType: String(fgbHeader.geometryType), + indexNodeSize: String(fgbHeader.indexNodeSize), + featureCount: String(fgbHeader.featuresCount), + bounds: fgbHeader.envelope?.join(',') || '' + }; + + const fields: Field[] = fgbHeader.columns?.map((column) => getFieldFromFGBColumn(column)) || []; + return {metadata, fields}; +} + +/** + * name: string; + * type: ColumnType; + * title: string | null; + * description: string | null; + * width: number; + * precision: number; + * scale: number; + * nullable: boolean; + * unique: boolean; + * primary_key: boolean; + */ +function getFieldFromFGBColumn(fgbColumn: fgb.ColumnMeta): Field { + const metadata: Record = { + title: fgbColumn.title || '', + description: fgbColumn.description || '', + width: String(fgbColumn.width), + precision: String(fgbColumn.precision), + scale: String(fgbColumn.scale), + unique: String(fgbColumn.unique), + primary_key: String(fgbColumn.primary_key) + }; + + return { + name: fgbColumn.name, + type: getTypeFromFGBType(fgbColumn.type as unknown as fgbColumnType), + nullable: fgbColumn.nullable, + metadata + }; +} + +/** Note: fgb.ColumType does not appear to be exported */ +enum fgbColumnType { + Byte = 0, + UByte = 1, + Bool = 2, + Short = 3, + UShort = 4, + Int = 5, + UInt = 6, + Long = 7, + ULong = 8, + Float = 9, + Double = 10, + String = 11, + Json = 12, + DateTime = 13, + Binary = 14 +} + +/** Convert FGB types to arrow like types */ +function getTypeFromFGBType(fgbType: fgbColumnType /* fgb.ColumnMeta['type'] */): DataType { + switch (fgbType) { + case fgbColumnType.Byte: + return 'int8'; + case fgbColumnType.UByte: + return 'uint8'; + case fgbColumnType.Bool: + return 'bool'; + case fgbColumnType.Short: + return 'int16'; + case fgbColumnType.UShort: + return 'uint16'; + case fgbColumnType.Int: + return 'int32'; + case fgbColumnType.UInt: + return 'uint32'; + case fgbColumnType.Long: + return 'int64'; + case fgbColumnType.ULong: + return 'uint64'; + case fgbColumnType.Float: + return 'float32'; + case fgbColumnType.Double: + return 'float64'; + case fgbColumnType.String: + return 'utf8'; + case fgbColumnType.Json: + return 'null'; + case fgbColumnType.DateTime: + return 'date-millisecond'; + case fgbColumnType.Binary: + return 'binary'; + default: + return 'null'; + } +} diff --git a/modules/flatgeobuf/src/lib/parse-flatgeobuf.ts b/modules/flatgeobuf/src/lib/parse-flatgeobuf.ts index cc07674523..edfffefe9c 100644 --- a/modules/flatgeobuf/src/lib/parse-flatgeobuf.ts +++ b/modules/flatgeobuf/src/lib/parse-flatgeobuf.ts @@ -5,19 +5,22 @@ import {Proj4Projection} from '@math.gl/proj4'; import {transformGeoJsonCoords} from '@loaders.gl/gis'; import type {FlatGeobufLoaderOptions} from '../flatgeobuf-loader'; -import type {GeoJSONTable, Feature, Table} from '@loaders.gl/schema'; +import type {GeoJSONTable, Table, Schema} from '@loaders.gl/schema'; + import {fgbToBinaryGeometry} from './binary-geometries'; +import {getSchemaFromFGBHeader} from './get-schema-from-fgb-header'; -import {Feature as FBGFeature, HeaderMeta as FGBHeader} from 'flatgeobuf'; +import * as fgb from 'flatgeobuf'; import * as geojson from 'flatgeobuf/lib/mjs/geojson.js'; import * as generic from 'flatgeobuf/lib/mjs/generic.js'; import {parseProperties as parsePropertiesBinary} from 'flatgeobuf/lib/mjs/generic/feature'; + const deserializeGeoJson = geojson.deserialize; const deserializeGeneric = generic.deserialize; // const parsePropertiesBinary = FlatgeobufFeature.parseProperties; // TODO: reproject binary features -function binaryFromFeature(feature: FBGFeature, header: FGBHeader) { +function binaryFromFeature(feature: fgb.Feature, header: fgb.HeaderMeta) { const geometry = feature.geometry(); // FlatGeobuf files can only hold a single geometry type per file, otherwise @@ -47,13 +50,7 @@ export function parseFlatGeobuf( switch (shape) { case 'geojson-table': { - const features = parseFlatGeobufToGeoJSON(arrayBuffer, options); - const table: GeoJSONTable = { - shape: 'geojson-table', - type: 'FeatureCollection', - features - }; - return table; + return parseFlatGeobufToGeoJSONTable(arrayBuffer, options); } case 'columnar-table': // binary + some JS arrays @@ -79,25 +76,28 @@ function parseFlatGeobufToBinary(arrayBuffer: ArrayBuffer, options: FlatGeobufLo return deserializeGeneric(array, fgbToBinaryGeometry); } -function parseFlatGeobufToGeoJSON( +function parseFlatGeobufToGeoJSONTable( arrayBuffer: ArrayBuffer, options: FlatGeobufLoaderOptions = {} -): Feature[] { +): GeoJSONTable { if (arrayBuffer.byteLength === 0) { - return []; + return {shape: 'geojson-table', type: 'FeatureCollection', features: []}; } const {reproject = false, _targetCrs = 'WGS84'} = (options && options.gis) || {}; const arr = new Uint8Array(arrayBuffer); - let headerMeta; + let fgbHeader; + let schema: Schema | undefined; + // @ts-expect-error this looks wrong - const {features} = deserializeGeoJson(arr, undefined, (header) => { - headerMeta = header; + let {features} = deserializeGeoJson(arr, undefined, (headerMeta) => { + fgbHeader = headerMeta; + schema = getSchemaFromFGBHeader(fgbHeader); }); - const crs = headerMeta && headerMeta.crs; + const crs = fgbHeader && fgbHeader.crs; let projection; if (reproject && crs) { // Constructing the projection may fail for some invalid WKT strings @@ -109,10 +109,10 @@ function parseFlatGeobufToGeoJSON( } if (projection) { - return transformGeoJsonCoords(features, (coords) => projection.project(coords)); + features = transformGeoJsonCoords(features, (coords) => projection.project(coords)); } - return features; + return {shape: 'geojson-table', schema, type: 'FeatureCollection', features}; } /* @@ -143,13 +143,20 @@ function parseFlatGeobufInBatchesToBinary(stream, options: FlatGeobufLoaderOptio return iterator; } +/** + * @todo this does not return proper GeoJSONTable batches + * @param stream + * @param options + */ // eslint-disable-next-line complexity async function* parseFlatGeobufInBatchesToGeoJSON(stream, options: FlatGeobufLoaderOptions) { const {reproject = false, _targetCrs = 'WGS84'} = (options && options.gis) || {}; - let headerMeta; - const iterator = deserializeGeoJson(stream, undefined, (header) => { - headerMeta = header; + let fgbHeader; + // let schema: Schema | undefined; + const iterator = deserializeGeoJson(stream, undefined, (headerMeta) => { + fgbHeader = headerMeta; + // schema = getSchemaFromFGBHeader(fgbHeader); }); let projection; @@ -157,7 +164,7 @@ async function* parseFlatGeobufInBatchesToGeoJSON(stream, options: FlatGeobufLoa // @ts-expect-error this looks wrong for await (const feature of iterator) { if (firstRecord) { - const crs = headerMeta && headerMeta.crs; + const crs = fgbHeader && fgbHeader.crs; if (reproject && crs) { projection = new Proj4Projection({from: crs.wkt, to: _targetCrs}); } diff --git a/modules/flatgeobuf/src/workers/flatgeobuf-worker.ts b/modules/flatgeobuf/src/workers/flatgeobuf-worker.ts index adfedbd2d0..609ac01c8a 100644 --- a/modules/flatgeobuf/src/workers/flatgeobuf-worker.ts +++ b/modules/flatgeobuf/src/workers/flatgeobuf-worker.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import {createLoaderWorker} from '@loaders.gl/loader-utils'; import {FlatGeobufLoader} from '../index'; diff --git a/modules/flatgeobuf/test/flatgeobuf-loader.spec.ts b/modules/flatgeobuf/test/flatgeobuf-loader.spec.ts index e0f68d3a0d..57b59c6942 100644 --- a/modules/flatgeobuf/test/flatgeobuf-loader.spec.ts +++ b/modules/flatgeobuf/test/flatgeobuf-loader.spec.ts @@ -1,8 +1,53 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import test from 'tape-promise/tape'; import {FlatGeobufLoader} from '@loaders.gl/flatgeobuf'; import {setLoaderOptions, load, loadInBatches} from '@loaders.gl/core'; const FLATGEOBUF_COUNTRIES_DATA_URL = '@loaders.gl/flatgeobuf/test/data/countries.fgb'; +const FGB_METADATA = { + metadata: { + title: '', + description: '', + crs: '{"org":"EPSG","code":4326,"name":"WGS 84","description":null,"wkt":"GEOGCRS[\\"WGS 84\\",DATUM[\\"World Geodetic System 1984\\",ELLIPSOID[\\"WGS 84\\",6378137,298.257223563,LENGTHUNIT[\\"metre\\",1]]],PRIMEM[\\"Greenwich\\",0,ANGLEUNIT[\\"degree\\",0.0174532925199433]],CS[ellipsoidal,2],AXIS[\\"latitude\\",north,ORDER[1],ANGLEUNIT[\\"degree\\",0.0174532925199433]],AXIS[\\"longitude\\",east,ORDER[2],ANGLEUNIT[\\"degree\\",0.0174532925199433]],ID[\\"EPSG\\",4326]]","code_string":null}', + metadata: '', + geometryType: '6', + indexNodeSize: '16', + featureCount: '179', + bounds: '' + }, + fields: [ + { + name: 'id', + type: 'utf8', + nullable: true, + metadata: { + title: '', + description: '', + width: '-1', + precision: '-1', + scale: '-1', + unique: 'false', + primary_key: 'false' + } + }, + { + name: 'name', + type: 'utf8', + nullable: true, + metadata: { + title: '', + description: '', + width: '-1', + precision: '-1', + scale: '-1', + unique: 'false', + primary_key: 'false' + } + } + ] +}; setLoaderOptions({ _workerType: 'test' @@ -11,6 +56,8 @@ setLoaderOptions({ test('FlatGeobufLoader#load', async (t) => { const geojsonTable = await load(FLATGEOBUF_COUNTRIES_DATA_URL, FlatGeobufLoader, {worker: false}); t.equal(geojsonTable.features.length, 179); + t.equal(geojsonTable.schema.fields.length, 2); + t.deepEqual(geojsonTable.schema, FGB_METADATA); t.end(); }); diff --git a/modules/geotiff/package.json b/modules/geotiff/package.json index 4cc3c0e5b9..8a7a0b4dad 100644 --- a/modules/geotiff/package.json +++ b/modules/geotiff/package.json @@ -16,6 +16,7 @@ "loader", "3d", "texture", + "cloud native", "tiff", "geotiff" ], diff --git a/modules/geotiff/src/index.ts b/modules/geotiff/src/index.ts index 11defefe4d..c29d6ab1c0 100644 --- a/modules/geotiff/src/index.ts +++ b/modules/geotiff/src/index.ts @@ -1,2 +1,5 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + export {loadGeoTiff} from './lib/load-geotiff'; export {default as TiffPixelSource} from './lib/tiff-pixel-source'; diff --git a/modules/geotiff/src/lib/load-geotiff.ts b/modules/geotiff/src/lib/load-geotiff.ts index 2cbbc94243..1a23026bb2 100644 --- a/modules/geotiff/src/lib/load-geotiff.ts +++ b/modules/geotiff/src/lib/load-geotiff.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import {fromUrl, fromBlob, GeoTIFF} from 'geotiff'; import { diff --git a/modules/geotiff/src/lib/ome/load-ome-tiff.ts b/modules/geotiff/src/lib/ome/load-ome-tiff.ts index 518f0efca3..fa9e980f62 100644 --- a/modules/geotiff/src/lib/ome/load-ome-tiff.ts +++ b/modules/geotiff/src/lib/ome/load-ome-tiff.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {GeoTIFF, GeoTIFFImage} from 'geotiff'; import TiffPixelSource from '../tiff-pixel-source'; diff --git a/modules/geotiff/src/lib/ome/ome-indexers.ts b/modules/geotiff/src/lib/ome/ome-indexers.ts index 267bb0b84d..fd5ddbdb5d 100644 --- a/modules/geotiff/src/lib/ome/ome-indexers.ts +++ b/modules/geotiff/src/lib/ome/ome-indexers.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {GeoTIFFImage, GeoTIFF, ImageFileDirectory} from 'geotiff'; import type {OMEXML} from '../ome/omexml'; diff --git a/modules/geotiff/src/lib/ome/ome-utils.ts b/modules/geotiff/src/lib/ome/ome-utils.ts index 40491d1a94..79eb70a7a3 100644 --- a/modules/geotiff/src/lib/ome/ome-utils.ts +++ b/modules/geotiff/src/lib/ome/ome-utils.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import {getDims, getLabels} from './utils'; import type {OMEXML, UnitsLength} from './omexml'; diff --git a/modules/geotiff/src/lib/ome/omexml.ts b/modules/geotiff/src/lib/ome/omexml.ts index 8bbc4a8996..70c3f40c7d 100644 --- a/modules/geotiff/src/lib/ome/omexml.ts +++ b/modules/geotiff/src/lib/ome/omexml.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import {XMLParser} from 'fast-xml-parser'; import {ensureArray, intToRgba} from '../utils/tiff-utils'; diff --git a/modules/geotiff/src/lib/ome/utils.ts b/modules/geotiff/src/lib/ome/utils.ts index dad6ef49f0..cbc2a17b6e 100644 --- a/modules/geotiff/src/lib/ome/utils.ts +++ b/modules/geotiff/src/lib/ome/utils.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {OMEXML} from '../ome/omexml'; import type {Labels} from '../../types'; diff --git a/modules/geotiff/src/lib/tiff-pixel-source.ts b/modules/geotiff/src/lib/tiff-pixel-source.ts index 0e9ae3af71..244c4dc753 100644 --- a/modules/geotiff/src/lib/tiff-pixel-source.ts +++ b/modules/geotiff/src/lib/tiff-pixel-source.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {GeoTIFFImage, RasterOptions} from 'geotiff'; import {getImageSize, isInterleaved, SIGNAL_ABORTED} from './utils/tiff-utils'; diff --git a/modules/geotiff/src/lib/utils/Pool.ts b/modules/geotiff/src/lib/utils/Pool.ts index 9fa452ddb8..49c6fd1e29 100644 --- a/modules/geotiff/src/lib/utils/Pool.ts +++ b/modules/geotiff/src/lib/utils/Pool.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + /** eslint-disable */ import type {FileDirectory} from 'geotiff'; diff --git a/modules/geotiff/src/lib/utils/proxies.ts b/modules/geotiff/src/lib/utils/proxies.ts index 19f155ed89..48ac160487 100644 --- a/modules/geotiff/src/lib/utils/proxies.ts +++ b/modules/geotiff/src/lib/utils/proxies.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {GeoTIFF} from 'geotiff'; import type Pool from './Pool'; diff --git a/modules/geotiff/src/lib/utils/tiff-utils.ts b/modules/geotiff/src/lib/utils/tiff-utils.ts index 7e145dffb8..c574cc63c2 100644 --- a/modules/geotiff/src/lib/utils/tiff-utils.ts +++ b/modules/geotiff/src/lib/utils/tiff-utils.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {PixelSource} from '../../types'; export function ensureArray(x: T | T[]) { diff --git a/modules/geotiff/src/types.ts b/modules/geotiff/src/types.ts index cc1777cd27..d405c505ef 100644 --- a/modules/geotiff/src/types.ts +++ b/modules/geotiff/src/types.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import {DTYPE_LOOKUP} from './lib/ome/ome-utils'; export type Dtype = (typeof DTYPE_LOOKUP)[keyof typeof DTYPE_LOOKUP]; diff --git a/modules/geotiff/src/typings/geotiff.ts b/modules/geotiff/src/typings/geotiff.ts index da92b9a46f..180df93473 100644 --- a/modules/geotiff/src/typings/geotiff.ts +++ b/modules/geotiff/src/typings/geotiff.ts @@ -1,5 +1,9 @@ -declare type TypedArray = import('../types').TypedArray; +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors +import type {TypedArray} from '../types'; + +// @ts-ignore typescript 5.3 error declare module 'geotiff' { function fromUrl(url: string, headers?: Record): Promise; function fromBlob(blob: Blob): Promise; diff --git a/modules/gis/src/lib/geo/geoarrow-metadata.ts b/modules/gis/src/lib/geo/geoarrow-metadata.ts index a05ece0c07..13bfe9e12c 100644 --- a/modules/gis/src/lib/geo/geoarrow-metadata.ts +++ b/modules/gis/src/lib/geo/geoarrow-metadata.ts @@ -28,10 +28,16 @@ const GEOARROW_ENCODINGS = [ const GEOARROW_COLUMN_METADATA_ENCODING = 'ARROW:extension:name'; const GEOARROW_COLUMN_METADATA_METADATA = 'ARROW:extension:metadata'; -/** Geospatial metadata for one column, extracted from Apache Arrow metadata */ +/** + * Geospatial metadata for one column, extracted from Apache Arrow metadata + * @see https://github.com/geoarrow/geoarrow/blob/main/extension-types.md + */ export type GeoArrowMetadata = { + /** Encoding of geometry in this column */ encoding?: GeoArrowEncoding; + /** CRS in [PROJJSON](https://proj.org/specifications/projjson.html). Omitted if producer has no information about CRS */ crs?: Record; + /** Edges are either spherical or omitted */ edges?: 'spherical'; [key: string]: unknown; }; @@ -49,25 +55,26 @@ export function getGeometryColumnsFromSchema(schema: Schema): Record void ): Promise { - const hashCDOffset = await searchFromTheEnd(fileProvider, cdHeaderSignature); + const hashCDOffset = await searchFromTheEnd(fileProvider, CD_HEADER_SIGNATURE); const cdFileHeader = await parseZipCDFileHeader(hashCDOffset, fileProvider); diff --git a/modules/kml/src/tcx-loader.ts b/modules/kml/src/tcx-loader.ts index 403655046f..eef8ee3589 100644 --- a/modules/kml/src/tcx-loader.ts +++ b/modules/kml/src/tcx-loader.ts @@ -72,6 +72,7 @@ function parseTextSync( const table: GeoJSONTable = { shape: 'geojson-table', type: 'FeatureCollection', + schema: {metadata: {}, fields: []}, features: geojson.features }; return table; diff --git a/modules/las/src/index.ts b/modules/las/src/index.ts index 670c36a4aa..543cfa9160 100644 --- a/modules/las/src/index.ts +++ b/modules/las/src/index.ts @@ -11,6 +11,7 @@ export {LASWorkerLoader}; /** * Loader for the LAS (LASer) point cloud format + * @note Does not support LAS v1.4 */ export const LASLoader: LoaderWithParser = { ...LASWorkerLoader, diff --git a/modules/las/src/las-loader.ts b/modules/las/src/las-loader.ts index ddf9cc0180..7e2883d085 100644 --- a/modules/las/src/las-loader.ts +++ b/modules/las/src/las-loader.ts @@ -16,17 +16,9 @@ export type LASLoaderOptions = LoaderOptions & { onProgress?: Function; }; -const DEFAULT_LAS_OPTIONS: LASLoaderOptions = { - las: { - shape: 'mesh', - fp64: false, - skip: 1, - colorDepth: 8 - } -}; - /** * Loader for the LAS (LASer) point cloud format + * @note Does not support LAS v1.4 */ export const LASLoader: Loader = { name: 'LAS', @@ -39,5 +31,12 @@ export const LASLoader: Loader = { text: true, binary: true, tests: ['LAS'], - options: DEFAULT_LAS_OPTIONS + options: { + las: { + shape: 'mesh', + fp64: false, + skip: 1, + colorDepth: 8 + } + } }; diff --git a/modules/loader-utils/src/lib/sources/image-source.ts b/modules/loader-utils/src/lib/sources/image-source.ts index e616feaf7f..56d777086e 100644 --- a/modules/loader-utils/src/lib/sources/image-source.ts +++ b/modules/loader-utils/src/lib/sources/image-source.ts @@ -41,6 +41,8 @@ export type GetImageParameters = { styles?: unknown; /** bounding box of the requested map image */ boundingBox: [min: [x: number, y: number], max: [x: number, y: number]]; + /** @deprecated use boundingBox */ + bbox?: [number, number, number, number]; /** pixel width of returned image */ width: number; /** pixels */ diff --git a/modules/mvt/src/lib/parse-tilejson.ts b/modules/mvt/src/lib/parse-tilejson.ts index b031965c2c..45533b0a09 100644 --- a/modules/mvt/src/lib/parse-tilejson.ts +++ b/modules/mvt/src/lib/parse-tilejson.ts @@ -124,6 +124,13 @@ type TilestatsLayerAttribute = { const isObject: (x: unknown) => boolean = (x) => x !== null && typeof x === 'object'; +/** + * Parse TileJSON from metadata + * @param jsonMetadata - metadata object + * @param options - options + * @returns - parsed TileJSON + */ +// eslint-disable-next-line complexity export function parseTileJSON(jsonMetadata: any, options: TileJSONOptions): TileJSON | null { if (!jsonMetadata || !isObject(jsonMetadata)) { return null; diff --git a/modules/mvt/test/tilejson-loader.spec.ts b/modules/mvt/test/tilejson-loader.spec.ts index 35f4a4b7b0..98e6440a69 100644 --- a/modules/mvt/test/tilejson-loader.spec.ts +++ b/modules/mvt/test/tilejson-loader.spec.ts @@ -31,7 +31,6 @@ test('TileJSONLoader#load', async (t) => { test.skip('TileJSONLoader#tippecanoe', async (t) => { const metadata = await load(TIPPECANOE_TILEJSON, TileJSONLoader); const expected = await load(TIPPECANOE_EXPECTED, JSONLoader); - console.error(metadata, expected); t.deepEqual(metadata, expected, 'Tippecanoe TileJSON loaded correctly'); t.end(); }); diff --git a/modules/parquet/package.json b/modules/parquet/package.json index 8e0f56e956..814d95c48c 100644 --- a/modules/parquet/package.json +++ b/modules/parquet/package.json @@ -12,12 +12,15 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", "loader", "parser", "table", - "Parquet", - "Apache Parquet" + "parquet", + "streaming", + "cloud native", + "geoparquet", + "Apache Parquet", + "apache-parquet" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/parquet/src/index.ts b/modules/parquet/src/index.ts index f123709bdb..c9a703ef37 100644 --- a/modules/parquet/src/index.ts +++ b/modules/parquet/src/index.ts @@ -3,74 +3,21 @@ export {Buffer} from './polyfills/buffer/install-buffer-polyfill'; -import type {LoaderWithParser} from '@loaders.gl/loader-utils'; -import type { - ObjectRowTable, - ObjectRowTableBatch, - ColumnarTable, - ColumnarTableBatch, - GeoJSONTable, - GeoJSONTableBatch -} from '@loaders.gl/schema'; - // import {ArrowTable, ArrowTableBatch} from '@loaders.gl/arrow'; -// ParquetLoader - -import {BlobFile} from '@loaders.gl/loader-utils'; -import { - ParquetLoader as ParquetWorkerLoader, - ParquetColumnarLoader as ParquetColumnarWorkerLoader, - ParquetLoaderOptions +export { + ParquetWorkerLoader, + ParquetLoader, + GeoParquetWorkerLoader, + GeoParquetLoader, + ParquetColumnarWorkerLoader, + ParquetColumnarLoader } from './parquet-loader'; -import {parseParquetFile, parseParquetFileInBatches} from './lib/parsers/parse-parquet-to-rows'; -import { - parseParquetFileInColumns, - parseParquetFileInColumnarBatches -} from './lib/parsers/parse-parquet-to-columns'; // import type {ParquetWasmLoaderOptions} from './lib/wasm/parse-parquet-wasm'; // import {parseParquetWasm} from './lib/wasm/parse-parquet-wasm'; // import {ParquetWasmLoader as ParquetWasmWorkerLoader} from './parquet-wasm-loader'; -export {ParquetWorkerLoader}; -// export {ParquetWasmWorkerLoader}; - -/** ParquetJS table loader */ -export const ParquetLoader: LoaderWithParser< - ObjectRowTable | GeoJSONTable, - ObjectRowTableBatch | GeoJSONTableBatch, - ParquetLoaderOptions -> = { - ...ParquetWorkerLoader, - parse(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) { - return parseParquetFile(new BlobFile(arrayBuffer), options); - }, - parseFile: parseParquetFile, - parseFileInBatches: parseParquetFileInBatches -}; - -/** ParquetJS table loader */ -export const ParquetColumnarLoader: LoaderWithParser< - ColumnarTable, - ColumnarTableBatch, - ParquetLoaderOptions -> = { - ...ParquetColumnarWorkerLoader, - parse(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) { - return parseParquetFileInColumns(new BlobFile(arrayBuffer), options); - }, - parseFile: parseParquetFileInColumns, - parseFileInBatches: parseParquetFileInColumnarBatches -}; - -// export const ParquetWasmLoader: LoaderWithParser = { -// ...ParquetWasmWorkerLoader, -// parse: parseParquetWasm -// }; - -// ParquetWriter - export {ParquetWriter as _ParquetWriter} from './parquet-writer'; // export {ParquetWasmWriter} from './parquet-wasm-writer'; diff --git a/modules/parquet/src/lib/parsers/get-parquet-schema.ts b/modules/parquet/src/lib/parsers/get-parquet-schema.ts index 64fff63eb7..d2fa1aba83 100644 --- a/modules/parquet/src/lib/parsers/get-parquet-schema.ts +++ b/modules/parquet/src/lib/parsers/get-parquet-schema.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + // loaders.gl import {Schema} from '@loaders.gl/schema'; import {ParquetReader} from '../../parquetjs/parser/parquet-reader'; diff --git a/modules/parquet/src/lib/parsers/parse-geoparquet.ts b/modules/parquet/src/lib/parsers/parse-geoparquet.ts new file mode 100644 index 0000000000..53fdde4be7 --- /dev/null +++ b/modules/parquet/src/lib/parsers/parse-geoparquet.ts @@ -0,0 +1,87 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + +import type {ReadableFile} from '@loaders.gl/loader-utils'; +import type { + GeoJSONTable, + GeoJSONTableBatch, + ObjectRowTable, + ObjectRowTableBatch +} from '@loaders.gl/schema'; +import {convertWKBTableToGeoJSON} from '@loaders.gl/gis'; +import {WKTLoader, WKBLoader} from '@loaders.gl/wkt'; + +import type {ParquetLoaderOptions} from '../../parquet-loader'; + +import {parseParquetFile, parseParquetFileInBatches} from './parse-parquet'; + +export async function parseGeoParquetFile( + file: ReadableFile, + options?: ParquetLoaderOptions +): Promise { + const table = await parseParquetFile(file, {...options, shape: 'object-row-table'}); + const shape = options?.parquet?.shape; + return convertTable(table, shape); +} + +export async function* parseGeoParquetFileInBatches( + file: ReadableFile, + options?: ParquetLoaderOptions +): AsyncIterable { + const tableBatches = parseParquetFileInBatches(file, {...options, shape: 'object-row-table'}); + + for await (const batch of tableBatches) { + const shape = options?.parquet?.shape; + yield convertBatch(batch, shape); + } +} + +function convertTable( + objectRowTable: ObjectRowTable, + shape?: 'object-row-table' | 'geojson-table' +): ObjectRowTable | GeoJSONTable { + switch (shape) { + case 'object-row-table': + return objectRowTable; + + case 'geojson-table': + try { + return convertWKBTableToGeoJSON(objectRowTable, objectRowTable.schema!, [ + WKTLoader, + WKBLoader + ]); + } catch (error) { + return objectRowTable; + } + + default: + throw new Error(shape); + } +} + +function convertBatch( + objectRowBatch: ObjectRowTableBatch, + shape?: 'object-row-table' | 'geojson-table' +): ObjectRowTableBatch | GeoJSONTableBatch { + switch (shape) { + case 'object-row-table': + return objectRowBatch; + + case 'geojson-table': + try { + const geojsonTable = convertWKBTableToGeoJSON(objectRowBatch, objectRowBatch.schema!, [ + WKTLoader, + WKBLoader + ]); + return { + ...objectRowBatch, + ...geojsonTable + }; + } catch (error) { + return objectRowBatch; + } + + default: + throw new Error(shape); + } +} diff --git a/modules/parquet/src/lib/parsers/parse-parquet-to-columns.ts b/modules/parquet/src/lib/parsers/parse-parquet-to-columns.ts index 6c7c4fd6a3..92b5f6f84c 100644 --- a/modules/parquet/src/lib/parsers/parse-parquet-to-columns.ts +++ b/modules/parquet/src/lib/parsers/parse-parquet-to-columns.ts @@ -11,6 +11,9 @@ import {materializeColumns} from '../../parquetjs/schema/shred'; import {getSchemaFromParquetReader} from './get-parquet-schema'; import {installBufferPolyfill} from '../../polyfills/buffer'; +/** + * @deprecated + */ export async function parseParquetFileInColumns( file: ReadableFile, options?: ParquetLoaderOptions @@ -26,6 +29,9 @@ export async function parseParquetFileInColumns( throw new Error('empty table'); } +/** + * @deprecated + */ export async function* parseParquetFileInColumnarBatches( file: ReadableFile, options?: ParquetLoaderOptions diff --git a/modules/parquet/src/lib/parsers/parse-parquet-to-rows.ts b/modules/parquet/src/lib/parsers/parse-parquet.ts similarity index 74% rename from modules/parquet/src/lib/parsers/parse-parquet-to-rows.ts rename to modules/parquet/src/lib/parsers/parse-parquet.ts index 0473b1df20..6e421c3d0c 100644 --- a/modules/parquet/src/lib/parsers/parse-parquet-to-rows.ts +++ b/modules/parquet/src/lib/parsers/parse-parquet.ts @@ -1,14 +1,8 @@ -// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils'; -// import {ColumnarTableBatch} from '@loaders.gl/schema'; +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {ReadableFile} from '@loaders.gl/loader-utils'; -import type { - GeoJSONTable, - GeoJSONTableBatch, - ObjectRowTable, - ObjectRowTableBatch -} from '@loaders.gl/schema'; -import {convertWKBTableToGeoJSON} from '@loaders.gl/gis'; -import {WKTLoader, WKBLoader} from '@loaders.gl/wkt'; +import type {ObjectRowTable, ObjectRowTableBatch} from '@loaders.gl/schema'; import type {ParquetLoaderOptions} from '../../parquet-loader'; import type {ParquetRow} from '../../parquetjs/schema/declare'; @@ -16,10 +10,16 @@ import {ParquetReader} from '../../parquetjs/parser/parquet-reader'; import {getSchemaFromParquetReader} from './get-parquet-schema'; import {installBufferPolyfill} from '../../polyfills/buffer'; +/** + * * Parse a parquet file using parquetjs + * @param file + * @param options + * @returns + */ export async function parseParquetFile( file: ReadableFile, options?: ParquetLoaderOptions -): Promise { +): Promise { installBufferPolyfill(); const reader = new ParquetReader(file, { @@ -47,10 +47,15 @@ export async function parseParquetFile( return convertTable(objectRowTable, shape); } +/** + * Parse a parquet file in batches using parquetjs + * @param file + * @param options + */ export async function* parseParquetFileInBatches( file: ReadableFile, options?: ParquetLoaderOptions -): AsyncIterable { +): AsyncIterable { const reader = new ParquetReader(file, { preserveBinary: options?.parquet?.preserveBinary }); @@ -78,20 +83,14 @@ export async function* parseParquetFileInBatches( function convertTable( objectRowTable: ObjectRowTable, shape?: 'object-row-table' | 'geojson-table' -): ObjectRowTable | GeoJSONTable { +): ObjectRowTable { switch (shape) { case 'object-row-table': return objectRowTable; + // Hack until geoparquet fixes up forwarded shape case 'geojson-table': - try { - return convertWKBTableToGeoJSON(objectRowTable, objectRowTable.schema!, [ - WKTLoader, - WKBLoader - ]); - } catch (error) { - return objectRowTable; - } + return objectRowTable; default: throw new Error(shape); diff --git a/modules/parquet/src/lib/wasm/encode-parquet-wasm.ts b/modules/parquet/src/lib/wasm/encode-parquet-wasm.ts index ed752b87d0..d1a14c272e 100644 --- a/modules/parquet/src/lib/wasm/encode-parquet-wasm.ts +++ b/modules/parquet/src/lib/wasm/encode-parquet-wasm.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import type {WriterOptions} from '@loaders.gl/loader-utils'; import type {ArrowTable} from '@loaders.gl/arrow'; diff --git a/modules/parquet/src/lib/wasm/load-wasm-browser.ts b/modules/parquet/src/lib/wasm/load-wasm-browser.ts index 1256f0eea0..a7be8d12b3 100644 --- a/modules/parquet/src/lib/wasm/load-wasm-browser.ts +++ b/modules/parquet/src/lib/wasm/load-wasm-browser.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import * as wasmEsm from 'parquet-wasm/esm2/arrow1'; let cached: typeof wasmEsm | null = null; diff --git a/modules/parquet/src/lib/wasm/load-wasm-node.ts b/modules/parquet/src/lib/wasm/load-wasm-node.ts index 216d03b56b..89a701e2e9 100644 --- a/modules/parquet/src/lib/wasm/load-wasm-node.ts +++ b/modules/parquet/src/lib/wasm/load-wasm-node.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + import * as wasmNode from 'parquet-wasm/node/arrow1'; export async function loadWasm(wasmUrl?: string) { diff --git a/modules/parquet/src/lib/wasm/load-wasm.ts b/modules/parquet/src/lib/wasm/load-wasm.ts index 81e328cb26..e98dee8529 100644 --- a/modules/parquet/src/lib/wasm/load-wasm.ts +++ b/modules/parquet/src/lib/wasm/load-wasm.ts @@ -1 +1,4 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + export {loadWasm} from './load-wasm-node'; diff --git a/modules/parquet/src/lib/wasm/parse-parquet-wasm.ts b/modules/parquet/src/lib/wasm/parse-parquet-wasm.ts index 44c3f3f7a8..a68dfb70b1 100644 --- a/modules/parquet/src/lib/wasm/parse-parquet-wasm.ts +++ b/modules/parquet/src/lib/wasm/parse-parquet-wasm.ts @@ -1,3 +1,6 @@ +// loaders.gl, MIT license +// Copyright (c) vis.gl contributors + // eslint-disable import type {LoaderOptions} from '@loaders.gl/loader-utils'; import type {ArrowTable} from '@loaders.gl/arrow'; diff --git a/modules/parquet/src/parquet-loader.ts b/modules/parquet/src/parquet-loader.ts index 89ba506704..14061d0300 100644 --- a/modules/parquet/src/parquet-loader.ts +++ b/modules/parquet/src/parquet-loader.ts @@ -1,13 +1,23 @@ // loaders.gl, MIT license // Copyright (c) vis.gl contributors -import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils'; +import type {Loader, LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils'; import type { ObjectRowTable, ObjectRowTableBatch, + GeoJSONTable, + GeoJSONTableBatch, ColumnarTable, ColumnarTableBatch } from '@loaders.gl/schema'; +import {BlobFile} from '@loaders.gl/loader-utils'; + +import {parseParquetFile, parseParquetFileInBatches} from './lib/parsers/parse-parquet'; +import {parseGeoParquetFile, parseGeoParquetFileInBatches} from './lib/parsers/parse-geoparquet'; +import { + parseParquetFileInColumns, + parseParquetFileInColumnarBatches +} from './lib/parsers/parse-parquet-to-columns'; export {Buffer} from './polyfills/buffer/install-buffer-polyfill'; @@ -32,8 +42,14 @@ export type ParquetLoaderOptions = LoaderOptions & { }; }; -/** ParquetJS table loader */ -export const ParquetLoader: Loader = { +/** + * ParquetJS table loader + */ +export const ParquetWorkerLoader: Loader< + ObjectRowTable, + ObjectRowTableBatch, + ParquetLoaderOptions +> = { name: 'Apache Parquet', id: 'parquet', module: 'parquet', @@ -55,7 +71,63 @@ export const ParquetLoader: Loader = { + ...ParquetWorkerLoader, + parse: (arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) => + parseParquetFile(new BlobFile(arrayBuffer), options), + + parseFile: parseParquetFile, + parseFileInBatches: parseParquetFileInBatches +}; + +// Defeat tree shaking +// @ts-ignore +ParquetLoader.Buffer = Buffer; + +export const GeoParquetWorkerLoader: Loader = + { + name: 'Apache Parquet', + id: 'parquet', + module: 'parquet', + version: VERSION, + worker: true, + category: 'table', + extensions: ['parquet'], + mimeTypes: ['application/octet-stream'], + binary: true, + tests: ['PAR1', 'PARE'], + options: { + parquet: { + shape: 'geojson-table', + columnList: [], + geoparquet: true, + url: undefined, + preserveBinary: false + } + } + }; + +/** ParquetJS table loader */ +export const GeoParquetLoader: LoaderWithParser< + ObjectRowTable | GeoJSONTable, + ObjectRowTableBatch | GeoJSONTableBatch, + ParquetLoaderOptions +> = { + ...GeoParquetWorkerLoader, + parse(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) { + return parseGeoParquetFile(new BlobFile(arrayBuffer), options); + }, + parseFile: parseGeoParquetFile, + parseFileInBatches: parseGeoParquetFileInBatches +}; + +/** @deprecated Test to see if we can improve perf of parquetjs loader */ +export const ParquetColumnarWorkerLoader: Loader< ColumnarTable, ColumnarTableBatch, ParquetLoaderOptions @@ -73,8 +145,16 @@ export const ParquetColumnarLoader: Loader< options: ParquetLoader.options }; -// Defeat tree shaking -// @ts-ignore -ParquetLoader.Buffer = Buffer; -// @ts-ignore -ParquetColumnarLoader.Buffer = Buffer; +/** @deprecated Test to see if we can improve perf of parquetjs loader */ +export const ParquetColumnarLoader: LoaderWithParser< + ColumnarTable, + ColumnarTableBatch, + ParquetLoaderOptions +> = { + ...ParquetColumnarWorkerLoader, + parse(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) { + return parseParquetFileInColumns(new BlobFile(arrayBuffer), options); + }, + parseFile: parseParquetFileInColumns, + parseFileInBatches: parseParquetFileInColumnarBatches +}; diff --git a/modules/parquet/src/parquet-wasm-loader.ts b/modules/parquet/src/parquet-wasm-loader.ts index 53ef10f5ab..246e6c4867 100644 --- a/modules/parquet/src/parquet-wasm-loader.ts +++ b/modules/parquet/src/parquet-wasm-loader.ts @@ -1,9 +1,11 @@ // loaders.gl, MIT license // Copyright (c) vis.gl contributors -import type {Loader, LoaderOptions} from '@loaders.gl/loader-utils'; +import type {Loader, LoaderWithParser, LoaderOptions} from '@loaders.gl/loader-utils'; import type {ArrowTable} from '@loaders.gl/arrow'; +import {parseParquetWasm} from './lib/wasm/parse-parquet-wasm'; + // __VERSION__ is injected by babel-plugin-version-inline // @ts-ignore TS2304: Cannot find name '__VERSION__'. const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; @@ -17,7 +19,7 @@ export type ParquetWasmLoaderOptions = LoaderOptions & { }; /** Parquet WASM table loader */ -export const ParquetWasmLoader: Loader = { +export const ParquetWasmWorkerLoader: Loader = { name: 'Apache Parquet', id: 'parquet-wasm', module: 'parquet', @@ -35,3 +37,9 @@ export const ParquetWasmLoader: Loader = { + ...ParquetWasmWorkerLoader, + parse: parseParquetWasm +}; diff --git a/modules/parquet/src/workers/parquet-worker.ts b/modules/parquet/src/workers/parquet-worker.ts index da1f0586bf..5813be9586 100644 --- a/modules/parquet/src/workers/parquet-worker.ts +++ b/modules/parquet/src/workers/parquet-worker.ts @@ -2,6 +2,6 @@ // Copyright (c) vis.gl contributors import {createLoaderWorker} from '@loaders.gl/loader-utils'; -import {ParquetLoader} from '../index'; +import {ParquetLoader} from '../parquet-loader'; createLoaderWorker(ParquetLoader); diff --git a/modules/pmtiles/package.json b/modules/pmtiles/package.json index cc50a9b444..468f89190f 100644 --- a/modules/pmtiles/package.json +++ b/modules/pmtiles/package.json @@ -12,12 +12,17 @@ "url": "https://github.com/visgl/loaders.gl" }, "keywords": [ - "webgl", "loader", - "3d", - "mesh", - "point cloud", - "PCD" + "tile", + "mvt", + "image tile", + "tilejson", + "range request", + "cloud native", + "PM Tiles", + "pmtiles", + "webgl", + "webgpu" ], "types": "dist/index.d.ts", "main": "dist/index.cjs", diff --git a/modules/shapefile/src/lib/parsers/parse-shapefile.ts b/modules/shapefile/src/lib/parsers/parse-shapefile.ts index c8d38bf694..723660e58b 100644 --- a/modules/shapefile/src/lib/parsers/parse-shapefile.ts +++ b/modules/shapefile/src/lib/parsers/parse-shapefile.ts @@ -1,7 +1,12 @@ // import type {Feature} from '@loaders.gl/gis'; import {LoaderContext, parseInBatchesFromContext, parseFromContext} from '@loaders.gl/loader-utils'; import {binaryToGeometry, transformGeoJsonCoords} from '@loaders.gl/gis'; -import type {BinaryGeometry, Geometry, ObjectRowTableBatch} from '@loaders.gl/schema'; +import type { + BinaryGeometry, + Geometry, + ObjectRowTable, + ObjectRowTableBatch +} from '@loaders.gl/schema'; import {Proj4Projection} from '@math.gl/proj4'; import type {SHXOutput} from './parse-shx'; @@ -134,30 +139,45 @@ export async function parseShapefile( const geojsonGeometries = parseGeometries(geometries); // parse properties - let properties = []; + let propertyTable: ObjectRowTable | undefined; const dbfResponse = await context?.fetch(replaceExtension(context?.url!, 'dbf')); if (dbfResponse?.ok) { - properties = await parseFromContext( + propertyTable = await parseFromContext( dbfResponse as any, DBFLoader, - {dbf: {encoding: cpg || 'latin1'}}, + {dbf: {shape: 'object-row-table', encoding: cpg || 'latin1'}}, context! ); } - let features = joinProperties(geojsonGeometries, properties); + let features = joinProperties(geojsonGeometries, propertyTable?.data || []); if (reproject) { features = reprojectFeatures(features, prj, _targetCrs); } - return { - encoding: cpg, - prj, - shx, - header, - data: features - }; + switch (options?.shapefile?.shape) { + case 'geojson-table': + return { + // @ts-expect-error + shape: 'geojson-table', + type: 'FeatureCollection', + encoding: cpg, + schema: propertyTable?.schema || {metadata: {}, fields: []}, + prj, + shx, + header, + features + }; + default: + return { + encoding: cpg, + prj, + shx, + header, + data: features + }; + } } /** diff --git a/modules/shapefile/src/shapefile-loader.ts b/modules/shapefile/src/shapefile-loader.ts index 24980e2d23..8e0f34f737 100644 --- a/modules/shapefile/src/shapefile-loader.ts +++ b/modules/shapefile/src/shapefile-loader.ts @@ -1,16 +1,23 @@ import type {LoaderOptions, LoaderWithParser} from '@loaders.gl/loader-utils'; import {SHP_MAGIC_NUMBER} from './shp-loader'; import {parseShapefile, parseShapefileInBatches} from './lib/parsers/parse-shapefile'; +import {Batch, GeoJSONTable} from '@loaders.gl/schema'; // __VERSION__ is injected by babel-plugin-version-inline // @ts-ignore TS2304: Cannot find name '__VERSION__'. const VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest'; +export type ShapefileLoaderOptions = LoaderOptions & { + shapefile?: { + shape?: 'geojson-table' | 'v3'; + }; +}; + /** * Shapefile loader * @note Shapefile is multifile format and requires providing additional files */ -export const ShapefileLoader: LoaderWithParser = { +export const ShapefileLoader: LoaderWithParser = { name: 'Shapefile', id: 'shapefile', module: 'shapefile', @@ -20,11 +27,15 @@ export const ShapefileLoader: LoaderWithParser = { mimeTypes: ['application/octet-stream'], tests: [new Uint8Array(SHP_MAGIC_NUMBER).buffer], options: { - shapefile: {}, + shapefile: { + shape: 'v3' + }, shp: { _maxDimensions: 4 } }, + // @ts-expect-error parse: parseShapefile, + // @ts-expect-error parseInBatches: parseShapefileInBatches }; diff --git a/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.cpg b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.cpg new file mode 100644 index 0000000000..3ad133c048 --- /dev/null +++ b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.cpg @@ -0,0 +1 @@ +UTF-8 \ No newline at end of file diff --git a/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.dbf b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.dbf new file mode 100644 index 0000000000..4610a469d7 Binary files /dev/null and b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.dbf differ diff --git a/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.prj b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.prj new file mode 100644 index 0000000000..a30c00a55d --- /dev/null +++ b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.prj @@ -0,0 +1 @@ +GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]] \ No newline at end of file diff --git a/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shp b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shp new file mode 100644 index 0000000000..4949f1bdce Binary files /dev/null and b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shp differ diff --git a/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shx b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shx new file mode 100644 index 0000000000..13f83cb2aa Binary files /dev/null and b/modules/shapefile/test/data/graticules-and-countries/99bfd9e7-bb42-4728-87b5-07f8c8ac631c2020328-1-1vef4ev.lu5nk.shx differ diff --git a/modules/shapefile/test/shapefile-loader.spec.js b/modules/shapefile/test/shapefile-loader.spec.js index 0cc2cb0566..777d73a71a 100644 --- a/modules/shapefile/test/shapefile-loader.spec.js +++ b/modules/shapefile/test/shapefile-loader.spec.js @@ -95,6 +95,7 @@ test('ShapefileLoader#load and reproject (from files or URLs)', async (t) => { const projection = new Proj4Projection({from: 'WGS84', to: 'EPSG:3857'}); for (let i = 0; i < json.features.length; i++) { + // @ts-ignore const shpFeature = data.data[i]; const jsonFeature = json.features[i]; const jsonPointGeom = projection.project(jsonFeature.geometry.coordinates); diff --git a/modules/tile-converter/src/i3s-server/routes/slpk-router.ts b/modules/tile-converter/src/i3s-server/routes/slpk-router.ts index 73ca3d7b62..145d1b6759 100644 --- a/modules/tile-converter/src/i3s-server/routes/slpk-router.ts +++ b/modules/tile-converter/src/i3s-server/routes/slpk-router.ts @@ -19,7 +19,7 @@ sceneServerRouter.get('*', async function (req, res, next) { export const router = express.Router(); router.get('*', async function (req, res, next) { - const file = await getFileByUrl(req.path); + const file = await getFileByUrl(req.path.replace(/\/+$/, '')); if (file) { res.send(Buffer.from(file)); } else { diff --git a/modules/wkt/src/lib/parse-wkb.ts b/modules/wkt/src/lib/parse-wkb.ts index 7410f25db4..3af4482fbe 100644 --- a/modules/wkt/src/lib/parse-wkb.ts +++ b/modules/wkt/src/lib/parse-wkb.ts @@ -23,7 +23,10 @@ export function parseWKB( switch (shape) { case 'binary-geometry': return binaryGeometry; + case 'geojson-geometry': + return binaryToGeometry(binaryGeometry); case 'geometry': + console.error('WKBLoader: "geometry" shape is deprecated, use "binary-geometry" instead'); return binaryToGeometry(binaryGeometry); default: throw new Error(shape); diff --git a/modules/wkt/src/twkb-loader.ts b/modules/wkt/src/twkb-loader.ts index 45add0b394..53961abd02 100644 --- a/modules/wkt/src/twkb-loader.ts +++ b/modules/wkt/src/twkb-loader.ts @@ -8,7 +8,7 @@ import {parseTWKBGeometry, isTWKB} from './lib/parse-twkb'; export type WKBLoaderOptions = LoaderOptions & { wkb?: { - shape: 'binary-geometry' | 'geometry'; + shape: 'geojson-geometry' | 'binary-geometry'; }; }; @@ -28,7 +28,7 @@ export const TWKBWorkerLoader: Loader = { tests: [isTWKB], options: { wkb: { - shape: 'binary-geometry' + shape: 'binary-geometry' // 'geojson-geometry' } } }; diff --git a/modules/wkt/src/wkb-loader.ts b/modules/wkt/src/wkb-loader.ts index 9e05a4e2c5..17c6b6305a 100644 --- a/modules/wkt/src/wkb-loader.ts +++ b/modules/wkt/src/wkb-loader.ts @@ -9,14 +9,15 @@ import {isWKB} from './lib/parse-wkb-header'; export type WKBLoaderOptions = LoaderOptions & { wkb?: { - shape: 'binary-geometry' | 'geometry'; + /** 'geometry' is deprecated use 'geojson-geometry' */ + shape: 'geojson-geometry' | 'binary-geometry' | 'geometry'; }; }; /** * Worker loader for WKB (Well-Known Binary) */ -export const WKBWorkerLoader: Loader = { +export const WKBWorkerLoader: Loader = { name: 'WKB', id: 'wkb', module: 'wkt', @@ -29,7 +30,7 @@ export const WKBWorkerLoader: Loader = { name: 'WKT (Well-Known Text)', @@ -32,6 +33,7 @@ export const WKTWorkerLoader: Loader = { testText: isWKT, options: { wkt: { + shape: 'geojson-geometry', crs: true } } diff --git a/modules/wkt/test/hex-wkb-loader.spec.ts b/modules/wkt/test/hex-wkb-loader.spec.ts index 90a37ee66b..5207c3e319 100644 --- a/modules/wkt/test/hex-wkb-loader.spec.ts +++ b/modules/wkt/test/hex-wkb-loader.spec.ts @@ -17,12 +17,18 @@ test('HexWKBLoader#2D', async (t) => { for (const testCase of Object.values(TEST_CASES)) { // Little endian if (testCase.wkbHex && testCase.binary) { - t.deepEqual(parseSync(testCase.wkbHex, HexWKBLoader), testCase.binary); + t.deepEqual( + parseSync(testCase.wkbHex, HexWKBLoader, {wkb: {shape: 'binary-geometry'}}), + testCase.binary + ); } // Big endian if (testCase.wkbHexXdr && testCase.binary) { - t.deepEqual(parseSync(testCase.wkbHexXdr, HexWKBLoader), testCase.binary); + t.deepEqual( + parseSync(testCase.wkbHexXdr, HexWKBLoader, {wkb: {shape: 'binary-geometry'}}), + testCase.binary + ); } } @@ -38,7 +44,7 @@ test('HexWKBLoader#Z', async (t) => { // Little endian if (testCase.wkbHex && testCase.binary) { t.deepEqual( - parseSync(testCase.wkbHex, HexWKBLoader), + parseSync(testCase.wkbHex, HexWKBLoader, {wkb: {shape: 'binary-geometry'}}), testCase.binary, testCase.wkbHex.slice(0, 60) ); @@ -47,7 +53,7 @@ test('HexWKBLoader#Z', async (t) => { // Big endian if (testCase.wkbHexXdr && testCase.binary) { t.deepEqual( - parseSync(testCase.wkbHexXdr, HexWKBLoader), + parseSync(testCase.wkbHexXdr, HexWKBLoader, {wkb: {shape: 'binary-geometry'}}), testCase.binary, testCase.wkbHexXdr.slice(0, 60) ); diff --git a/modules/wkt/test/twkb-loader.spec.ts b/modules/wkt/test/twkb-loader.spec.ts index 974f252b2c..2b694677c0 100644 --- a/modules/wkt/test/twkb-loader.spec.ts +++ b/modules/wkt/test/twkb-loader.spec.ts @@ -51,7 +51,7 @@ test('TWKBLoader#2D', async (t) => { // } // if (testCase.wkbXdr && testCase.binary && testCase.geoJSON) { -// t.deepEqual(parseSync(testCase.twkbXdr, TWKBLoader, {wkb: {shape: 'geometry'}}), testCase.geoJSON); +// t.deepEqual(parseSync(testCase.twkbXdr, TWKBLoader, {wkb: {shape: 'geojson-geometry'}}), testCase.geoJSON); // } // } diff --git a/modules/wkt/test/wkb-loader.spec.ts b/modules/wkt/test/wkb-loader.spec.ts index c34e89e40c..1f9974a45d 100644 --- a/modules/wkt/test/wkb-loader.spec.ts +++ b/modules/wkt/test/wkb-loader.spec.ts @@ -18,13 +18,19 @@ test('WKBLoader#2D', async (t) => { // Little endian if (testCase.wkb && testCase.binary) { t.ok(isWKB(testCase.wkb), 'isWKB(2D)'); - t.deepEqual(parseSync(testCase.wkb, WKBLoader), testCase.binary); + t.deepEqual( + parseSync(testCase.wkb, WKBLoader, {wkb: {shape: 'binary-geometry'}}), + testCase.binary + ); } // Big endian if (testCase.wkbXdr && testCase.binary) { t.ok(isWKB(testCase.wkbXdr), 'isWKB(2D)'); - t.deepEqual(parseSync(testCase.wkbXdr, WKBLoader), testCase.binary); + t.deepEqual( + parseSync(testCase.wkbXdr, WKBLoader, {wkb: {shape: 'binary-geometry'}}), + testCase.binary + ); } } diff --git a/modules/wms/src/services/ogc/wms-service.ts b/modules/wms/src/services/ogc/wms-service.ts index 13b78f3cfe..099deb19dd 100644 --- a/modules/wms/src/services/ogc/wms-service.ts +++ b/modules/wms/src/services/ogc/wms-service.ts @@ -234,8 +234,13 @@ export class WMSSource extends ImageSource { } async getImage(parameters: GetImageParameters): Promise { - // @ts-expect-error - return await this.getMap(parameters); + // Replace the GetImage `boundingBox` parameter with the WMS flat `bbox` parameter. + const {boundingBox, bbox, ...rest} = parameters; + const wmsParameters: WMSGetMapParameters = { + bbox: boundingBox ? [...boundingBox[0], ...boundingBox[1]] : bbox!, + ...rest + }; + return await this.getMap(wmsParameters); } normalizeMetadata(capabilities: WMSCapabilities): ImageSourceMetadata { @@ -498,16 +503,6 @@ export class WMSSource extends ImageSource { } break; - case 'boundingBox': - // Coordinate order is flipped for certain CRS in WMS 1.3.0 - const boundingBox = value as [[number, number], [number, number]]; - let bbox2: number[] | null = [...boundingBox[0], ...boundingBox[1]]; - bbox2 = this._flipBoundingBox(boundingBox, wmsParameters); - if (bbox2) { - value = bbox2; - } - break; - case 'bbox': // Coordinate order is flipped for certain CRS in WMS 1.3.0 const bbox = this._flipBoundingBox(value, wmsParameters); diff --git a/modules/wms/test/services/wms-service.spec.ts b/modules/wms/test/services/wms-service.spec.ts index 33d5a3e139..e1faae4ed4 100644 --- a/modules/wms/test/services/wms-service.spec.ts +++ b/modules/wms/test/services/wms-service.spec.ts @@ -151,3 +151,35 @@ test('WMSSource#fetch override', async (t) => { t.end(); }); }); + +test('WMSSource#getImage', async (t) => { + const wmsService = new WMSSource({url: WMS_SERVICE_URL}); + let getMapParameters; + + // @ts-ignore + wmsService.getMap = (parameters) => { + getMapParameters = parameters; + }; + + await wmsService.getImage({ + width: 800, + height: 600, + boundingBox: [ + [30, 70], + [35, 75] + ], + layers: ['oms'] + }); + + t.deepEqual( + getMapParameters, + { + width: 800, + height: 600, + bbox: [30, 70, 35, 75], + layers: ['oms'] + }, + 'boundingBox transformed to bbox' + ); + t.end(); +}); diff --git a/modules/zip/src/index.ts b/modules/zip/src/index.ts index d95eda3725..7f6635098b 100644 --- a/modules/zip/src/index.ts +++ b/modules/zip/src/index.ts @@ -8,7 +8,7 @@ export {TarBuilder} from './tar-builder'; export { parseZipCDFileHeader, makeZipCDHeaderIterator, - signature as cdSignature, + signature as CD_HEADER_SIGNATURE, generateCDHeader } from './parse-zip/cd-file-header'; export { diff --git a/modules/zip/src/parse-zip/cd-file-header.ts b/modules/zip/src/parse-zip/cd-file-header.ts index a6b9beb4dc..ec75d9cfaf 100644 --- a/modules/zip/src/parse-zip/cd-file-header.ts +++ b/modules/zip/src/parse-zip/cd-file-header.ts @@ -249,107 +249,123 @@ export function generateCDHeader(options: GenerateCDOptions): ArrayBuffer { /** Fields map */ const ZIP_HEADER_FIELDS = [ + // Central directory file header signature = 0x02014b50 { offset: 0, size: 4, - description: 'Central directory file header signature = 0x02014b50', default: new DataView(signature.buffer).getUint32(0, true) }, + + // Version made by { offset: 4, size: 2, - description: 'Version made by', default: 45 }, + + // Version needed to extract (minimum) { offset: 6, size: 2, - description: 'Version needed to extract (minimum)', default: 45 }, + + // General purpose bit flag { offset: 8, size: 2, - description: 'General purpose bit flag', default: 0 }, + + // Compression method { offset: 10, size: 2, - description: 'Compression method', default: 0 }, + + // File last modification time { offset: 12, size: 2, - description: 'File last modification time', default: 0 }, + + // File last modification date { offset: 14, size: 2, - description: 'File last modification date', default: 0 }, + + // CRC-32 of uncompressed data { offset: 16, size: 4, - description: 'CRC-32 of uncompressed data', name: 'crc32' }, + + // Compressed size (or 0xffffffff for ZIP64) { offset: 20, size: 4, - description: 'Compressed size (or 0xffffffff for ZIP64)', name: 'length' }, + + // Uncompressed size (or 0xffffffff for ZIP64) { offset: 24, size: 4, - description: 'Uncompressed size (or 0xffffffff for ZIP64)', name: 'length' }, + + // File name length (n) { offset: 28, size: 2, - description: 'File name length (n)', name: 'fnlength' }, + + // Extra field length (m) { offset: 30, size: 2, - description: 'Extra field length (m)', default: 0, name: 'extraLength' }, + + // File comment length (k) { offset: 32, size: 2, - description: 'File comment length (k)', default: 0 }, + + // Disk number where file starts (or 0xffff for ZIP64) { offset: 34, size: 2, - description: 'Disk number where file starts (or 0xffff for ZIP64)', default: 0 }, + + // Internal file attributes { offset: 36, size: 2, - description: 'Internal file attributes', default: 0 }, + + // External file attributes { offset: 38, size: 4, - description: 'External file attributes', default: 0 }, + + // Relative offset of local file header { offset: 42, size: 4, - description: 'Relative offset of local file header ', name: 'offset' } ]; diff --git a/modules/zip/src/parse-zip/zip64-info-generation.ts b/modules/zip/src/parse-zip/zip64-info-generation.ts index bfdde03333..aa197f51ac 100644 --- a/modules/zip/src/parse-zip/zip64-info-generation.ts +++ b/modules/zip/src/parse-zip/zip64-info-generation.ts @@ -58,29 +58,33 @@ export const NUMBER_SETTERS: {[key: number]: NumberSetter} = { /** zip64 info fields description, we need it as a pattern to build a zip64 info */ const ZIP64_FIELDS = [ + // Header ID 0x0001 { size: 2, - description: 'Header ID 0x0001', default: new DataView(signature.buffer).getUint16(0, true) }, + + // Size of the extra field chunk (8, 16, 24 or 28) { size: 2, - description: 'Size of the extra field chunk (8, 16, 24 or 28)', name: 'zip64Length' }, + + // Original uncompressed file size { size: 8, - description: 'Original uncompressed file size', name: 'size' }, + + // Size of compressed data { size: 8, - description: 'Size of compressed data', name: 'size' }, + + // Offset of local header record { size: 8, - description: 'Offset of local header record', name: 'offset' } ]; diff --git a/package.json b/package.json index 2582db00da..de50f4a433 100644 --- a/package.json +++ b/package.json @@ -62,7 +62,7 @@ "@typescript-eslint/eslint-plugin": "^6.0.0", "@typescript-eslint/parser" : "^6.0.0", "prettier": "3.0.3", - "typescript": "^5.2.2" + "typescript": "^5.3.0" }, "volta": { "node": "18.18.2", diff --git a/test/apps/typescript-test/package.json b/test/apps/typescript-test/package.json index 03cbaedc52..5826979b18 100644 --- a/test/apps/typescript-test/package.json +++ b/test/apps/typescript-test/package.json @@ -28,7 +28,7 @@ "rimraf": "^3.0.0", "run-script-os": "^1.1.1", "ts-node": "^8.3.0", - "typescript": "^5.0.4" + "typescript": "^5.3.0" }, "dependencies": { "@loaders.gl/shapefile": "^4.0.0" diff --git a/website/src/examples-sidebar.js b/website/src/examples-sidebar.js index 50f71d0c8e..f6916787ed 100644 --- a/website/src/examples-sidebar.js +++ b/website/src/examples-sidebar.js @@ -17,12 +17,17 @@ }, { type: 'category', - label: 'Geospatial Loaders', + label: 'Geospatial Table Loaders', items: [ + 'geoarrow', 'geoparquet', - // 'geopackage', sql.js bundling issue... + 'geopackage', 'flatgeobuf', - 'geojson' + 'geojson', + 'geospatial/shapefile', + 'geospatial/kml', + 'geospatial/gpx', + 'geospatial/tcx' ], }, { @@ -30,15 +35,8 @@ label: 'Geospatial Tile Loaders', items: [ 'pmtiles', - 'wms' - ] - }, - { - type: 'category', - label: '3D Tile Loaders', - items: [ + 'wms', 'i3s', - // 'i3s-arcgis', '3d-tiles' ] }, diff --git a/website/src/examples/geoarrow.mdx b/website/src/examples/geoarrow.mdx new file mode 100644 index 0000000000..03dee21b48 --- /dev/null +++ b/website/src/examples/geoarrow.mdx @@ -0,0 +1,7 @@ +# GeoArrow + +import Demo from 'examples/website/geospatial/app'; + +
+ +
diff --git a/website/src/examples/geopackage.mdx b/website/src/examples/geopackage.mdx index 492aa23008..4b71140a06 100644 --- a/website/src/examples/geopackage.mdx +++ b/website/src/examples/geopackage.mdx @@ -1,7 +1,14 @@ # GeoPackage +

+The GeoPackage module uses sql.js which has bundling issues with docusaurus, +however the `GeoPackageLoader` has been used successfully with vite / esbuild. +

+ + \ No newline at end of file diff --git a/website/src/examples/geospatial/gpx.mdx b/website/src/examples/geospatial/gpx.mdx new file mode 100644 index 0000000000..ca9023c522 --- /dev/null +++ b/website/src/examples/geospatial/gpx.mdx @@ -0,0 +1,7 @@ +# GPX + +import Demo from 'examples/website/geospatial/app'; + +
+ +
diff --git a/website/src/examples/geospatial/kml.mdx b/website/src/examples/geospatial/kml.mdx new file mode 100644 index 0000000000..8de22f8115 --- /dev/null +++ b/website/src/examples/geospatial/kml.mdx @@ -0,0 +1,7 @@ +# KML + +import Demo from 'examples/website/geospatial/app'; + +
+ +
diff --git a/website/src/examples/geospatial/shapefile.mdx b/website/src/examples/geospatial/shapefile.mdx new file mode 100644 index 0000000000..679eaf10f7 --- /dev/null +++ b/website/src/examples/geospatial/shapefile.mdx @@ -0,0 +1,7 @@ +# Shapefile + +import Demo from 'examples/website/geospatial/app'; + +
+ +
diff --git a/website/src/examples/geospatial/tcx.mdx b/website/src/examples/geospatial/tcx.mdx new file mode 100644 index 0000000000..06b375513b --- /dev/null +++ b/website/src/examples/geospatial/tcx.mdx @@ -0,0 +1,7 @@ +# TCX + +import Demo from 'examples/website/geospatial/app'; + +
+ +
diff --git a/website/src/examples/index.mdx b/website/src/examples/index.mdx index 53c03965cd..5446a32856 100644 --- a/website/src/examples/index.mdx +++ b/website/src/examples/index.mdx @@ -2,4 +2,7 @@ import {ExamplesIndex} from '../components'; +*Notes: These examples use [deck.gl](https://deck.gl) to visualize loaded data and only cover a subset of the available loaders. +Check the [loader catalog](/docs/modules/arrow/api-reference/arrow-loader) to see if the format you need is available.* + `/images/examples/${item.docId || item.label.toLowerCase()}.jpg`} /> diff --git a/website/static/images/examples/flatgeobuf.jpg b/website/static/images/examples/flatgeobuf.jpg index 7bc34d041e..e52fdd67bf 100644 Binary files a/website/static/images/examples/flatgeobuf.jpg and b/website/static/images/examples/flatgeobuf.jpg differ diff --git a/website/static/images/examples/geoarrow.jpg b/website/static/images/examples/geoarrow.jpg new file mode 100644 index 0000000000..205c9c2bc4 Binary files /dev/null and b/website/static/images/examples/geoarrow.jpg differ diff --git a/website/static/images/examples/geojson.jpg b/website/static/images/examples/geojson.jpg index ae79906471..bc628d74ab 100644 Binary files a/website/static/images/examples/geojson.jpg and b/website/static/images/examples/geojson.jpg differ diff --git a/website/static/images/examples/geospatial/gpx.jpg b/website/static/images/examples/geospatial/gpx.jpg new file mode 100644 index 0000000000..0c90803697 Binary files /dev/null and b/website/static/images/examples/geospatial/gpx.jpg differ diff --git a/website/static/images/examples/geospatial/kml.jpg b/website/static/images/examples/geospatial/kml.jpg new file mode 100644 index 0000000000..7f651cc0d8 Binary files /dev/null and b/website/static/images/examples/geospatial/kml.jpg differ diff --git a/website/static/images/examples/geospatial/shapefile.jpg b/website/static/images/examples/geospatial/shapefile.jpg new file mode 100644 index 0000000000..c540d21719 Binary files /dev/null and b/website/static/images/examples/geospatial/shapefile.jpg differ diff --git a/website/static/images/examples/geospatial/tcx.jpg b/website/static/images/examples/geospatial/tcx.jpg new file mode 100644 index 0000000000..3ed328c5a3 Binary files /dev/null and b/website/static/images/examples/geospatial/tcx.jpg differ diff --git a/yarn.lock b/yarn.lock index 095a409763..441147a1b7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -11384,10 +11384,10 @@ typedarray@^0.0.6: resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777" integrity sha512-/aCDEGatGvZ2BIk+HmLf4ifCJFwvKFNb9/JeZPMulfgFracn9QFcAf5GO8B/mweUjSoblS5In0cWhqpfs/5PQA== -typescript@^5.2.2, typescript@~4.6.0: - version "5.2.2" - resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.2.2.tgz#5ebb5e5a5b75f085f22bc3f8460fba308310fa78" - integrity sha512-mI4WrpHsbCIcwT9cF4FZvr80QUeKvsUsUvKDoR+X/7XHQH98xYD8YHZg7ANtz2GtZt/CBq2QJ0thkGJMHfqc1w== +typescript@^5.3.0, typescript@~4.6.0: + version "5.3.2" + resolved "https://registry.yarnpkg.com/typescript/-/typescript-5.3.2.tgz#00d1c7c1c46928c5845c1ee8d0cc2791031d4c43" + integrity sha512-6l+RyNy7oAHDfxC4FzSJcz9vnjTKxrLpDG5M2Vu4SHRVNg6xzqZp6LYSR9zjqQTu8DU/f5xwxUdADOkbrIX2gQ== typical@^4.0.0: version "4.0.0"