diff --git a/docs/config.json b/docs/config.json
index b5029ae5f3..01d7369b9c 100644
--- a/docs/config.json
+++ b/docs/config.json
@@ -48,6 +48,7 @@
"OrientedImageLayer",
"PointCloudLayer",
"PotreeLayer",
+ "CopcLayer",
"C3DTilesLayer",
"LabelLayer",
"GlobeLayer",
@@ -72,7 +73,8 @@
"OrientedImageSource",
"PotreeSource",
"VectorTilesSource",
- "EntwinePointTileSource"
+ "EntwinePointTileSource",
+ "CopcSource"
],
"Provider": [
diff --git a/examples/config.json b/examples/config.json
index d1014d01dd..8dabb2c9a1 100644
--- a/examples/config.json
+++ b/examples/config.json
@@ -27,7 +27,8 @@
"potree_3d_map": "Potree 3D map",
"laz_dragndrop": "LAS/LAZ viewer",
"entwine_simple_loader": "Entwine loader",
- "entwine_3d_loader": "Entwine 3D loader"
+ "entwine_3d_loader": "Entwine 3D loader",
+ "copc_simple_loader": "COPC loader"
},
"Vector tiles": {
diff --git a/examples/copc_simple_loader.html b/examples/copc_simple_loader.html
new file mode 100644
index 0000000000..da58880913
--- /dev/null
+++ b/examples/copc_simple_loader.html
@@ -0,0 +1,128 @@
+
+
+
+
+ Itowns - COPC loader
+
+
+
+
+
+
+
+
+
+
+ Specify the URL of a COPC file to load:
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/src/Core/CopcNode.js b/src/Core/CopcNode.js
new file mode 100644
index 0000000000..0dd3735b2a
--- /dev/null
+++ b/src/Core/CopcNode.js
@@ -0,0 +1,194 @@
+import * as THREE from 'three';
+import { Hierarchy } from 'copc';
+import PointCloudNode from 'Core/PointCloudNode';
+
+const size = new THREE.Vector3();
+const position = new THREE.Vector3();
+const translation = new THREE.Vector3();
+
+function buildId(depth, x, y, z) {
+ return `${depth}-${x}-${y}-${z}`;
+}
+
+class CopcNode extends PointCloudNode {
+ /**
+ * Constructs a new instance of a COPC Octree node
+ *
+ * @param {number} depth - Depth within the octree
+ * @param {number} x - X position within the octree
+ * @param {number} y - Y position within the octree
+ * @param {number} z - Z position with the octree
+ * @param {number} entryOffset - Offset from the beginning of the file of
+ * the node entry
+ * @param {number} entryLength - Size of the node entry
+ * @param {CopcLayer} layer - Parent COPC layer
+ * @param {number} [numPoints=0] - Number of points given by this entry
+ */
+ constructor(depth, x, y, z, entryOffset, entryLength, layer, numPoints = 0) {
+ super(numPoints, layer);
+ this.isCopcNode = true;
+
+ this.entryOffset = entryOffset;
+ this.entryLength = entryLength;
+ this.layer = layer;
+ this.depth = depth;
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ }
+
+ get id() {
+ return buildId(this.depth, this.x, this.y, this.z);
+ }
+
+ get octreeIsLoaded() {
+ return this.numPoints >= 0;
+ }
+
+ /**
+ * @param {number} offset
+ * @param {number} size
+ */
+ async _fetch(offset, size) {
+ return this.layer.source.fetcher(this.layer.source.url, {
+ ...this.layer.source.networkOptions,
+ headers: {
+ ...this.layer.source.networkOptions.headers,
+ range: `bytes=${offset}-${offset + size - 1}`,
+ },
+ });
+ }
+
+ /**
+ * Create an (A)xis (A)ligned (B)ounding (B)ox for the given node given
+ * `this` is its parent.
+ * @param {CopcNode} node - The child node
+ */
+ createChildAABB(node) {
+ // factor to apply, based on the depth difference (can be > 1)
+ const f = 2 ** (node.depth - this.depth);
+
+ // size of the child node bbox (Vector3), based on the size of the
+ // parent node, and divided by the factor
+ this.bbox.getSize(size).divideScalar(f);
+
+ // initialize the child node bbox at the location of the parent node bbox
+ node.bbox.min.copy(this.bbox.min);
+
+ // position of the parent node, if it was at the same depth as the
+ // child, found by multiplying the tree position by the factor
+ position.copy(this).multiplyScalar(f);
+
+ // difference in position between the two nodes, at child depth, and
+ // scale it using the size
+ translation.subVectors(node, position).multiply(size);
+
+ // apply the translation to the child node bbox
+ node.bbox.min.add(translation);
+
+ // use the size computed above to set the max
+ node.bbox.max.copy(node.bbox.min).add(size);
+ }
+
+ /**
+ * Create a CopcNode from the provided subtree and add it as child
+ * of the current node.
+ * @param {number} depth - Child node depth in the octree
+ * @param {number} x - Child node x position in the octree
+ * @param {number} y - Child node y position in the octree
+ * @param {number} z - Child node z position in the octree
+ * @param {Hierarchy.Subtree} hierarchy - Octree's subtree
+ * @param {CopcNode[]} stack - Stack of node candidates for traversal
+ */
+ findAndCreateChild(depth, x, y, z, hierarchy, stack) {
+ const id = buildId(depth, x, y, z);
+
+ let pointCount;
+ let offset;
+ let byteSize;
+
+ const node = hierarchy.nodes[id];
+ if (node) {
+ pointCount = node.pointCount;
+ offset = node.pointDataOffset;
+ byteSize = node.pointDataLength;
+ } else {
+ const page = hierarchy.pages[id];
+ if (!page) { return; }
+ pointCount = -1;
+ offset = page.pageOffset;
+ byteSize = page.pageLength;
+ }
+
+ const child = new CopcNode(
+ depth,
+ x,
+ y,
+ z,
+ offset,
+ byteSize,
+ this.layer,
+ pointCount,
+ );
+ this.add(child);
+ stack.push(child);
+ }
+
+ async loadOctree() {
+ // Load hierarchy
+ const buffer = await this._fetch(this.entryOffset, this.entryLength);
+ const hierarchy = await Hierarchy.parse(new Uint8Array(buffer));
+
+ // Update current node entry from loaded subtree
+ const node = hierarchy.nodes[this.id];
+ if (!node) {
+ return Promise.reject('[CopcNode]: Ill-formed data, entry not found in hierarchy.');
+ }
+ this.numPoints = node.pointCount;
+ this.entryOffset = node.pointDataOffset;
+ this.entryLength = node.pointDataLength;
+
+ // Load subtree entries
+ const stack = [];
+ stack.push(this);
+ while (stack.length) {
+ const node = stack.shift();
+ const depth = node.depth + 1;
+ const x = node.x * 2;
+ const y = node.y * 2;
+ const z = node.z * 2;
+
+ node.findAndCreateChild(depth, x, y, z, hierarchy, stack);
+ node.findAndCreateChild(depth, x + 1, y, z, hierarchy, stack);
+ node.findAndCreateChild(depth, x, y + 1, z, hierarchy, stack);
+ node.findAndCreateChild(depth, x + 1, y + 1, z, hierarchy, stack);
+ node.findAndCreateChild(depth, x, y, z + 1, hierarchy, stack);
+ node.findAndCreateChild(depth, x + 1, y, z + 1, hierarchy, stack);
+ node.findAndCreateChild(depth, x, y + 1, z + 1, hierarchy, stack);
+ node.findAndCreateChild(depth, x + 1, y + 1, z + 1, hierarchy, stack);
+ }
+ }
+
+ /**
+ * Load the COPC Buffer geometry for this node.
+ * @returns {Promise}
+ */
+ async load() {
+ if (!this.octreeIsLoaded) {
+ await this.loadOctree();
+ }
+
+ const buffer = await this._fetch(this.entryOffset, this.entryLength);
+ const geometry = await this.layer.source.parser(buffer, {
+ in: {
+ ...this.layer.source,
+ pointCount: this.numPoints,
+ },
+ out: this.layer,
+ });
+
+ return geometry;
+ }
+}
+
+export default CopcNode;
diff --git a/src/Layer/CopcLayer.js b/src/Layer/CopcLayer.js
new file mode 100644
index 0000000000..a40ec1f3ff
--- /dev/null
+++ b/src/Layer/CopcLayer.js
@@ -0,0 +1,60 @@
+import * as THREE from 'three';
+import CopcNode from 'Core/CopcNode';
+import PointCloudLayer from 'Layer/PointCloudLayer';
+
+/**
+ * @classdesc
+ * A layer for [Cloud Optimised Point Cloud](https://copc.io) (COPC) datasets.
+ * See {@link PointCloudLayer} class for documentation on base properties.
+ *
+ * @extends {PointCloudLayer}
+ *
+ * @example
+ * // Create a new COPC layer
+ * const copcSource = new CopcSource({
+ * url: 'https://s3.amazonaws.com/hobu-lidar/autzen-classified.copc.laz',
+ * crs: 'EPSG:4978',
+ * colorDepth: 16, // bit-depth of 'color' attribute (either 8 or 16 bits)
+ * });
+ *
+ * const copcLayer = new CopcLayer('COPC', {
+ * source: copcSource,
+ * });
+ *
+ * View.prototype.addLayer.call(view, copcLayer);
+ */
+class CopcLayer extends PointCloudLayer {
+ /**
+ * @param {string} id - Unique id of the layer.
+ * @param {Object} config - See {@link PointCloudLayer} for base pointcloud
+ * options.
+ */
+ constructor(id, config) {
+ super(id, config);
+ this.isCopcLayer = true;
+
+ const resolve = () => this;
+ this.whenReady = this.source.whenReady.then((/** @type {CopcSource} */ source) => {
+ const { cube, rootHierarchyPage } = source.info;
+ const { pageOffset, pageLength } = rootHierarchyPage;
+
+ this.root = new CopcNode(0, 0, 0, 0, pageOffset, pageLength, this, -1);
+ this.root.bbox.min.fromArray(cube, 0);
+ this.root.bbox.max.fromArray(cube, 3);
+
+ this.minElevationRange = source.header.min[2];
+ this.maxElevationRange = source.header.max[2];
+
+ this.scale = new THREE.Vector3(1.0, 1.0, 1.0);
+ this.offset = new THREE.Vector3(0.0, 0.0, 0.0);
+
+ return this.root.loadOctree().then(resolve);
+ });
+ }
+
+ get spacing() {
+ return this.source.info.spacing;
+ }
+}
+
+export default CopcLayer;
diff --git a/src/Main.js b/src/Main.js
index 26b484dd58..4969bbff5c 100644
--- a/src/Main.js
+++ b/src/Main.js
@@ -61,6 +61,7 @@ export { default as GlobeLayer } from 'Core/Prefab/Globe/GlobeLayer';
export { default as PlanarLayer } from 'Core/Prefab/Planar/PlanarLayer';
export { default as LabelLayer } from 'Layer/LabelLayer';
export { default as EntwinePointTileLayer } from 'Layer/EntwinePointTileLayer';
+export { default as CopcLayer } from 'Layer/CopcLayer';
export { default as GeoidLayer } from 'Layer/GeoidLayer';
// Sources provided by default in iTowns
@@ -79,6 +80,7 @@ export { default as C3DTilesSource } from 'Source/C3DTilesSource';
export { default as C3DTilesIonSource } from 'Source/C3DTilesIonSource';
export { default as C3DTilesGoogleSource } from 'Source/C3DTilesGoogleSource';
export { default as EntwinePointTileSource } from 'Source/EntwinePointTileSource';
+export { default as CopcSource } from 'Source/CopcSource';
// Parsers provided by default in iTowns
// Custom parser can be implemented as wanted, as long as the main function
diff --git a/src/Parser/LASLoader.js b/src/Parser/LASLoader.js
index 44bf8ec92d..ce0ca491df 100644
--- a/src/Parser/LASLoader.js
+++ b/src/Parser/LASLoader.js
@@ -15,6 +15,10 @@ import { Las } from 'copc';
* xOffset, zOffset]`) added to the scaled X, Y, Z point record values.
*/
+function defaultColorEncoding(header) {
+ return (header.majorVersion === 1 && header.minorVersion <= 2) ? 8 : 16;
+}
+
/**
* @classdesc
* Loader for LAS and LAZ (LASZip) point clouds. It uses the copc.js library and
@@ -131,6 +135,38 @@ class LASLoader {
this._wasmPromise = null;
}
+ /**
+ * Parses a LAS or LAZ (LASZip) chunk. Note that this function is
+ * **CPU-bound** and shall be parallelised in a dedicated worker.
+ * @param {Uint8Array} data - File chunk data.
+ * @param {Object} options - Parsing options.
+ * @param {Header} options.header - Partial LAS header.
+ * @param {number} options.pointCount - Number of points encoded in this
+ * data chunk.
+ * @param {Las.ExtraBytes[]} [options.eb] - Extra bytes LAS VLRs
+ * headers.
+ * @param {8 | 16} [options.colorDepth] - Color depth encoding (in bits).
+ * Either 8 or 16 bits. Defaults to 8 bits for LAS 1.2 and 16 bits for later
+ * versions (as mandatory by the specification).
+ */
+ async parseChunk(data, options) {
+ const { header, eb, pointCount } = options;
+ const { pointDataRecordFormat, pointDataRecordLength } = header;
+
+ const colorDepth = options.colorDepth ?? defaultColorEncoding(header);
+
+ const bytes = new Uint8Array(data);
+ const pointData = await Las.PointData.decompressChunk(bytes, {
+ pointCount,
+ pointDataRecordFormat,
+ pointDataRecordLength,
+ }, this._initDecoder());
+
+ const view = Las.View.create(pointData, header, eb);
+ const attributes = this._parseView(view, { colorDepth });
+ return { attributes };
+ }
+
/**
* Parses a LAS or LAZ (LASZip) file. Note that this function is
* **CPU-bound** and shall be parallelised in a dedicated worker.
@@ -146,8 +182,7 @@ class LASLoader {
const pointData = await Las.PointData.decompressFile(bytes, this._initDecoder());
const header = Las.Header.parse(bytes);
- const colorDepth = options.colorDepth ??
- ((header.majorVersion === 1 && header.minorVersion <= 2) ? 8 : 16);
+ const colorDepth = options.colorDepth ?? defaultColorEncoding(header);
const getter = async (begin, end) => bytes.slice(begin, end);
const vlrs = await Las.Vlr.walk(getter, header);
diff --git a/src/Parser/LASParser.js b/src/Parser/LASParser.js
index 12a6510e86..9d1f7f630b 100644
--- a/src/Parser/LASParser.js
+++ b/src/Parser/LASParser.js
@@ -3,6 +3,39 @@ import LASLoader from 'Parser/LASLoader';
const lasLoader = new LASLoader();
+function buildBufferGeometry(attributes) {
+ const geometry = new THREE.BufferGeometry();
+
+ const positionBuffer = new THREE.BufferAttribute(attributes.position, 3);
+ geometry.setAttribute('position', positionBuffer);
+
+ const intensityBuffer = new THREE.BufferAttribute(attributes.intensity, 1);
+ geometry.setAttribute('intensity', intensityBuffer);
+
+ const returnNumber = new THREE.BufferAttribute(attributes.returnNumber, 1);
+ geometry.setAttribute('returnNumber', returnNumber);
+
+ const numberOfReturns = new THREE.BufferAttribute(attributes.numberOfReturns, 1);
+ geometry.setAttribute('numberOfReturns', numberOfReturns);
+
+ const classBuffer = new THREE.BufferAttribute(attributes.classification, 1);
+ geometry.setAttribute('classification', classBuffer);
+
+ const pointSourceID = new THREE.BufferAttribute(attributes.pointSourceID, 1);
+ geometry.setAttribute('pointSourceID', pointSourceID);
+
+ if (attributes.color) {
+ const colorBuffer = new THREE.BufferAttribute(attributes.color, 4, true);
+ geometry.setAttribute('color', colorBuffer);
+ }
+ const scanAngle = new THREE.BufferAttribute(attributes.scanAngle, 1);
+ geometry.setAttribute('scanAngle', scanAngle);
+
+ geometry.userData.origin = new THREE.Vector3().fromArray(attributes.origin);
+
+ return geometry;
+}
+
/** The LASParser module provides a [parse]{@link
* module:LASParser.parse} method that takes a LAS or LAZ (LASZip) file in, and
* gives a `THREE.BufferGeometry` containing all the necessary attributes to be
@@ -22,6 +55,38 @@ export default {
}
lasLoader.lazPerf = path;
},
+
+
+ /**
+ * Parses a chunk of a LAS or LAZ (LASZip) and returns the corresponding
+ * `THREE.BufferGeometry`.
+ *
+ * @param {ArrayBuffer} data - The file content to parse.
+ * @param {Object} options
+ * @param {Object} options.in - Options to give to the parser.
+ * @param {number} options.in.pointCount - Number of points encoded in this
+ * data chunk.
+ * @param {Object} options.in.header - Partial LAS file header.
+ * @param {number} options.in.header.pointDataRecordFormat - Type of Point
+ * Data Record contained in the LAS file.
+ * @param {number} options.in.header.pointDataRecordLength - Size (in bytes)
+ * of the Point Data Record.
+ * @param {Object} [options.eb] - Extra bytes LAS VLRs headers.
+ * @param { 8 | 16 } [options.in.colorDepth] - Color depth (in bits).
+ * Defaults to 8 bits for LAS 1.2 and 16 bits for later versions
+ * (as mandatory by the specification)
+ *
+ * @return {Promise} A promise resolving with a
+ * `THREE.BufferGeometry`.
+ */
+ parseChunk(data, options = {}) {
+ return lasLoader.parseChunk(data, options.in).then((parsedData) => {
+ const geometry = buildBufferGeometry(parsedData.attributes);
+ geometry.computeBoundingBox();
+ return geometry;
+ });
+ },
+
/**
* Parses a LAS file or a LAZ (LASZip) file and return the corresponding
* `THREE.BufferGeometry`.
@@ -43,37 +108,9 @@ export default {
return lasLoader.parseFile(data, {
colorDepth: options.in?.colorDepth,
}).then((parsedData) => {
- const geometry = new THREE.BufferGeometry();
- const attributes = parsedData.attributes;
- geometry.userData = parsedData.header;
-
- const positionBuffer = new THREE.BufferAttribute(attributes.position, 3);
- geometry.setAttribute('position', positionBuffer);
-
- const intensityBuffer = new THREE.BufferAttribute(attributes.intensity, 1);
- geometry.setAttribute('intensity', intensityBuffer);
-
- const returnNumber = new THREE.BufferAttribute(attributes.returnNumber, 1);
- geometry.setAttribute('returnNumber', returnNumber);
-
- const numberOfReturns = new THREE.BufferAttribute(attributes.numberOfReturns, 1);
- geometry.setAttribute('numberOfReturns', numberOfReturns);
-
- const classBuffer = new THREE.BufferAttribute(attributes.classification, 1);
- geometry.setAttribute('classification', classBuffer);
-
- const pointSourceID = new THREE.BufferAttribute(attributes.pointSourceID, 1);
- geometry.setAttribute('pointSourceID', pointSourceID);
-
- if (attributes.color) {
- const colorBuffer = new THREE.BufferAttribute(attributes.color, 4, true);
- geometry.setAttribute('color', colorBuffer);
- }
- const scanAngle = new THREE.BufferAttribute(attributes.scanAngle, 1);
- geometry.setAttribute('scanAngle', scanAngle);
-
+ const geometry = buildBufferGeometry(parsedData.attributes);
+ geometry.userData.header = parsedData.header;
geometry.computeBoundingBox();
- geometry.userData.origin = new THREE.Vector3().fromArray(attributes.origin);
return geometry;
});
},
diff --git a/src/Source/CopcSource.js b/src/Source/CopcSource.js
new file mode 100644
index 0000000000..306e84ac64
--- /dev/null
+++ b/src/Source/CopcSource.js
@@ -0,0 +1,121 @@
+import { Binary, Info, Las } from 'copc';
+import Extent from 'Core/Geographic/Extent';
+import Fetcher from 'Provider/Fetcher';
+import LASParser from 'Parser/LASParser';
+import Source from 'Source/Source';
+import * as THREE from 'three';
+
+/**
+ * @param {function(number, number):Promise} fetcher
+ */
+async function getHeaders(fetcher) {
+ const header =
+ Las.Header.parse(await fetcher(0, Las.Constants.minHeaderLength));
+ const vlrs = await Las.Vlr.walk(fetcher, header);
+
+ // info VLR: required by COPC
+ const infoVlr = Las.Vlr.find(vlrs, 'copc', 1);
+ if (!infoVlr) { return Promise.reject('COPC info VLR is required'); }
+ const info = Info.parse(await Las.Vlr.fetch(fetcher, infoVlr));
+
+ // OGC Coordinate System WKT: required by LAS1.4
+ const wktVlr = Las.Vlr.find(vlrs, 'LASF_Projection', 2112);
+ if (!wktVlr) { return Promise.reject('LAS1.4 WKT VLR is required'); }
+ const wkt = Binary.toCString(await Las.Vlr.fetch(fetcher, wktVlr));
+
+ // Extra bytes: optional by LAS1.4
+ const ebVlr = Las.Vlr.find(vlrs, 'LASF_Spec', 4);
+ const eb = ebVlr ?
+ Las.ExtraBytes.parse(await Las.Vlr.fetch(fetcher, ebVlr)) :
+ [];
+
+ return { header, info, wkt, eb };
+}
+
+/**
+ * @classdesc
+ * A source for [Cloud Optimised Point Cloud](https://copc.io/) (COPC) data.
+ * Such data consists of a [LAZ 1.4](https://www.ogc.org/standard/las/) file
+ * that stores compressed points data organized in a clustered octree.
+ *
+ * A freshly created source fetches and parses portions of the file
+ * corresponding to the LAS 1.4 header, all the Variable Length Record (VLR)
+ * headers as well the following VLRs:
+ * - COPC [`info`](https://copc.io/#info-vlr) record (mandatory)
+ * - LAS 1.4 `OGC Coordinate System WKT` record (mandatory, see [Las 1.4
+ * spec](https://portal.ogc.org/files/?artifact_id=74523))
+ * - LAS 1.4 `Extra Bytes` record (optional, see [Las 1.4
+ * spec](https://portal.ogc.org/files/?artifact_id=74523))
+ *
+ * @extends {Source}
+ *
+ * @property {boolean} isCopcSource - Read-only flag to check that a given
+ * object is of type CopcSource.
+ * @property {Object} header - LAS header of the source.
+ * @property {Object[]} eb - List of headers of each Variable Length Records
+ * (VLRs).
+ * @property {Object} info - COPC `info` VLR.
+ * @property {number[]} info.cube - Bounding box of the octree as a 6-elements.
+ * tuple `[minX, minY, minZ, maxX, maxY, maxZ]`. Computed from `center_x`,
+ * `center_y`, `center_z` and `halfSize` properties.
+ * @property {Object} info.rootHierarchyPage - Hierarchy page of the root node.
+ * @property {number} info.rootHierarchyPage.pageOffset - Absolute Offset to the
+ * root node data chunk.
+ * @property {number} info.rootHierarchyPage.pageOffset - Size (in bytes) of the
+ * root node data chunk.
+ * @property {number[]} gpsTimeRange - A 2-element tuple denoting the minimum
+ * and maximum values of attribute `gpsTime`.
+ */
+class CopcSource extends Source {
+ /**
+ * @param {Object} config - Source configuration
+ * @param {string} config.url - URL of the COPC resource.
+ * @param {8 | 16} [config.colorDepth=16] - Encoding of the `color`
+ * attribute. Either `8` or `16` bits.
+ * @param {string} [config._lazPerfBaseUrl] - (experimental) Overrides base
+ * url of the `las-zip.wasm` file of the `laz-perf` library.
+ * @param {string} [config.crs='EPSG:4326'] - Native CRS of the COPC
+ * ressource. Note that this is not for now inferred from the COPC header.
+ * @param {RequestInit} [config.networkOptions] - Fetch options (passed
+ * directly to `fetch()`), see [the syntax for more information]{@link
+ * https://developer.mozilla.org/en-US/docs/Web/API/WindowOrWorkerGlobalScope/fetch#Syntax}.
+ * @param {Object} [config.attribution] - Attribution of the data.
+ *
+ * @constructor
+ */
+ constructor(config) {
+ super(config);
+
+ this.isCopcSource = true;
+
+ this.parser = LASParser.parseChunk;
+ this.fetcher = Fetcher.arrayBuffer;
+
+ this.colorDepth = config.colorDepth ?? 16;
+
+ const get = (/** @type {number} */ begin, /** @type {number} */ end) =>
+ this.fetcher(this.url, {
+ ...this.networkOptions,
+ headers: {
+ ...this.networkOptions.headers,
+ range: `bytes=${begin}-${end - 1}`,
+ },
+ }).then(buffer => new Uint8Array(buffer));
+ this.whenReady = getHeaders(get).then((metadata) => {
+ this.header = metadata.header;
+ this.info = metadata.info;
+ this.eb = metadata.eb;
+ // TODO: use wkt definition in `metadata.wkt` to infer/define crs
+ this.crs = config.crs || 'EPSG:4326';
+
+ const bbox = new THREE.Box3();
+ bbox.min.fromArray(this.info.cube, 0);
+ bbox.max.fromArray(this.info.cube, 3);
+ this.extent = Extent.fromBox3(this.crs, bbox);
+
+ return this;
+ });
+ }
+}
+
+export default CopcSource;
diff --git a/src/Source/Source.js b/src/Source/Source.js
index 21fe5ca5d5..feadfa8c5c 100644
--- a/src/Source/Source.js
+++ b/src/Source/Source.js
@@ -64,6 +64,8 @@ let uid = 0;
* To extend a Source, it is necessary to implement two functions:
* `urlFromExtent` and `extentInsideLimit`.
*
+ * @extends InformationsData
+ *
* @property {boolean} isSource - Used to checkout whether this source is a
* Source. Default is true. You should not change this, as it is used internally
* for optimisation.
@@ -105,7 +107,6 @@ class Source extends InformationsData {
* Source. Only the `url` property is mandatory.
*
* @constructor
- * @extends InformationsData
*/
constructor(source) {
super(source);
@@ -124,6 +125,7 @@ class Source extends InformationsData {
this.isVectorSource = (source.parser || supportedParsers.get(source.format)) != undefined;
this.networkOptions = source.networkOptions || { crossOrigin: 'anonymous' };
this.attribution = source.attribution;
+ /** @type {Promise} */
this.whenReady = Promise.resolve();
this._featuresCaches = {};
if (source.extent && !(source.extent.isExtent)) {
diff --git a/test/unit/lasparser.js b/test/unit/lasparser.js
index 0f5d5687a5..3cb111f42a 100644
--- a/test/unit/lasparser.js
+++ b/test/unit/lasparser.js
@@ -26,35 +26,39 @@ describe('LASParser', function () {
it('parses a las file to a THREE.BufferGeometry', async function () {
if (!lasData) { this.skip(); }
const bufferGeometry = await LASParser.parse(lasData);
- assert.strictEqual(bufferGeometry.userData.pointCount, 106);
- assert.strictEqual(bufferGeometry.attributes.position.count, bufferGeometry.userData.pointCount);
- assert.strictEqual(bufferGeometry.attributes.intensity.count, bufferGeometry.userData.pointCount);
- assert.strictEqual(bufferGeometry.attributes.classification.count, bufferGeometry.userData.pointCount);
+ const header = bufferGeometry.userData.header;
+ const origin = bufferGeometry.userData.origin;
+ assert.strictEqual(header.pointCount, 106);
+ assert.strictEqual(bufferGeometry.attributes.position.count, header.pointCount);
+ assert.strictEqual(bufferGeometry.attributes.intensity.count, header.pointCount);
+ assert.strictEqual(bufferGeometry.attributes.classification.count, header.pointCount);
assert.strictEqual(bufferGeometry.attributes.color, undefined);
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.x + bufferGeometry.userData.origin.x, bufferGeometry.userData.min[0], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.y + bufferGeometry.userData.origin.y, bufferGeometry.userData.min[1], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.z + bufferGeometry.userData.origin.z, bufferGeometry.userData.min[2], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.x + bufferGeometry.userData.origin.x, bufferGeometry.userData.max[0], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.y + bufferGeometry.userData.origin.y, bufferGeometry.userData.max[1], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.z + bufferGeometry.userData.origin.z, bufferGeometry.userData.max[2], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.x + origin.x, header.min[0], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.y + origin.y, header.min[1], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.z + origin.z, header.min[2], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.x + origin.x, header.max[0], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.y + origin.y, header.max[1], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.z + origin.z, header.max[2], epsilon));
});
it('parses a laz file to a THREE.BufferGeometry', async function () {
if (!lazV14Data) { this.skip(); }
const bufferGeometry = await LASParser.parse(lazV14Data);
- assert.strictEqual(bufferGeometry.userData.pointCount, 100000);
- assert.strictEqual(bufferGeometry.attributes.position.count, bufferGeometry.userData.pointCount);
- assert.strictEqual(bufferGeometry.attributes.intensity.count, bufferGeometry.userData.pointCount);
- assert.strictEqual(bufferGeometry.attributes.classification.count, bufferGeometry.userData.pointCount);
- assert.strictEqual(bufferGeometry.attributes.color.count, bufferGeometry.userData.pointCount);
-
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.x + bufferGeometry.userData.origin.x, bufferGeometry.userData.min[0], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.y + bufferGeometry.userData.origin.y, bufferGeometry.userData.min[1], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.z + bufferGeometry.userData.origin.z, bufferGeometry.userData.min[2], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.x + bufferGeometry.userData.origin.x, bufferGeometry.userData.max[0], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.y + bufferGeometry.userData.origin.y, bufferGeometry.userData.max[1], epsilon));
- assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.z + bufferGeometry.userData.origin.z, bufferGeometry.userData.max[2], epsilon));
+ const header = bufferGeometry.userData.header;
+ const origin = bufferGeometry.userData.origin;
+ assert.strictEqual(header.pointCount, 100000);
+ assert.strictEqual(bufferGeometry.attributes.position.count, header.pointCount);
+ assert.strictEqual(bufferGeometry.attributes.intensity.count, header.pointCount);
+ assert.strictEqual(bufferGeometry.attributes.classification.count, header.pointCount);
+ assert.strictEqual(bufferGeometry.attributes.color.count, header.pointCount);
+
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.x + origin.x, header.min[0], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.y + origin.y, header.min[1], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.min.z + origin.z, header.min[2], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.x + origin.x, header.max[0], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.y + origin.y, header.max[1], epsilon));
+ assert.ok(compareWithEpsilon(bufferGeometry.boundingBox.max.z + origin.z, header.max[2], epsilon));
});
});
});