three.js
-
-
+
+
diff --git a/docs/02-gps-and-sensors/index.html b/docs/02-gps-and-sensors/index.html
index 5f9ce19..3a643c8 100644
--- a/docs/02-gps-and-sensors/index.html
+++ b/docs/02-gps-and-sensors/index.html
@@ -2,8 +2,8 @@
three.js
-
-
+
+
diff --git a/docs/03-ar-objects/index.html b/docs/03-api-communication/index.html
similarity index 54%
rename from docs/03-ar-objects/index.html
rename to docs/03-api-communication/index.html
index 2e994a5..534bcd5 100644
--- a/docs/03-ar-objects/index.html
+++ b/docs/03-api-communication/index.html
@@ -2,8 +2,8 @@
three.js
-
-
+
+
diff --git a/docs/04-api-communication/index.html b/docs/04-api-communication/index.html
deleted file mode 100644
index 2443130..0000000
--- a/docs/04-api-communication/index.html
+++ /dev/null
@@ -1,10 +0,0 @@
-
-
-
-three.js
-
-
-
-
-
-
diff --git a/docs/assets/01-helloworld-BK54cGeW.js b/docs/assets/01-helloworld-DomqHtFg.js
similarity index 78%
rename from docs/assets/01-helloworld-BK54cGeW.js
rename to docs/assets/01-helloworld-DomqHtFg.js
index 3b710cc..c21a638 100644
--- a/docs/assets/01-helloworld-BK54cGeW.js
+++ b/docs/assets/01-helloworld-DomqHtFg.js
@@ -1 +1 @@
-import{S as o,P as r,W as a,B as d,M as w,a as s,_ as c,E as h}from"./locar.es-D2pHxP8J.js";const i=new o,n=new r(60,window.innerWidth/window.innerHeight,.001,100),e=new a;e.setSize(window.innerWidth,window.innerHeight);document.body.appendChild(e.domElement);window.addEventListener("resize",l=>{e.setSize(window.innerWidth,window.innerHeight),n.aspect=window.innerWidth/window.innerHeight,n.updateProjectionMatrix()});const m=new d(2,2,2),p=new w(m,new s({color:16711680})),t=new c(i,n),W=new h(e);t.fakeGps(-.72,51.05);t.add(p,-.72,51.0501);e.setAnimationLoop(f);function f(){W.update(),e.render(i,n)}
+import{S as o,P as r,W as a,B as d,M as w,a as s,E as c,_ as h}from"./locar.es-C0jLPF0Z.js";const i=new o,n=new r(60,window.innerWidth/window.innerHeight,.001,100),e=new a;e.setSize(window.innerWidth,window.innerHeight);document.body.appendChild(e.domElement);window.addEventListener("resize",l=>{e.setSize(window.innerWidth,window.innerHeight),n.aspect=window.innerWidth/window.innerHeight,n.updateProjectionMatrix()});const m=new d(2,2,2),p=new w(m,new s({color:16711680})),t=new c(i,n),W=new h(e);t.fakeGps(-.72,51.05);t.add(p,-.72,51.0501);e.setAnimationLoop(f);function f(){W.update(),e.render(i,n)}
diff --git a/docs/assets/02-gps-and-sensors-La4_GML4.js b/docs/assets/02-gps-and-sensors-DkTaubFC.js
similarity index 88%
rename from docs/assets/02-gps-and-sensors-La4_GML4.js
rename to docs/assets/02-gps-and-sensors-DkTaubFC.js
index 5f97ae7..631bcd0 100644
--- a/docs/assets/02-gps-and-sensors-La4_GML4.js
+++ b/docs/assets/02-gps-and-sensors-DkTaubFC.js
@@ -1 +1 @@
-import{P as c,W as w,S as u,_ as f,E as h,b as m,B as g,M as D,a as p}from"./locar.es-D2pHxP8J.js";const t=new c(80,window.innerWidth/window.innerHeight,.001,1e3),o=new w;o.setSize(window.innerWidth,window.innerHeight);document.body.appendChild(o.domElement);const a=new u,i=new f(a,t);window.addEventListener("resize",e=>{o.setSize(window.innerWidth,window.innerHeight),t.aspect=window.innerWidth/window.innerHeight,t.updateProjectionMatrix()});const x=new h(o);let s=!0;const M=new m(t);i.on("gpsupdate",(e,W)=>{if(s){alert(`Got the initial location: longitude ${e.coords.longitude}, latitude ${e.coords.latitude}`);const d=[{latDis:.001,lonDis:0,colour:16711680},{latDis:-.001,lonDis:0,colour:16776960},{latDis:0,lonDis:-.001,colour:65535},{latDis:0,lonDis:.001,colour:65280}],r=new g(20,20,20);for(const n of d){const l=new D(r,new p({color:n.colour}));console.log(`adding at ${e.coords.longitude+n.lonDis},${e.coords.latitude+n.latDis}`),i.add(l,e.coords.longitude+n.lonDis,e.coords.latitude+n.latDis)}s=!1}});i.startGps();o.setAnimationLoop(P);function P(){x.update(),M.update(),o.render(a,t)}
+import{P as c,W as w,S as u,E as f,_ as h,b as m,B as g,M as D,a as p}from"./locar.es-C0jLPF0Z.js";const t=new c(80,window.innerWidth/window.innerHeight,.001,1e3),o=new w;o.setSize(window.innerWidth,window.innerHeight);document.body.appendChild(o.domElement);const a=new u,i=new f(a,t);window.addEventListener("resize",e=>{o.setSize(window.innerWidth,window.innerHeight),t.aspect=window.innerWidth/window.innerHeight,t.updateProjectionMatrix()});const x=new h(o);let s=!0;const M=new m(t);i.on("gpsupdate",(e,W)=>{if(s){alert(`Got the initial location: longitude ${e.coords.longitude}, latitude ${e.coords.latitude}`);const d=[{latDis:.001,lonDis:0,colour:16711680},{latDis:-.001,lonDis:0,colour:16776960},{latDis:0,lonDis:-.001,colour:65535},{latDis:0,lonDis:.001,colour:65280}],r=new g(20,20,20);for(const n of d){const l=new D(r,new p({color:n.colour}));console.log(`adding at ${e.coords.longitude+n.lonDis},${e.coords.latitude+n.latDis}`),i.add(l,e.coords.longitude+n.lonDis,e.coords.latitude+n.latDis)}s=!1}});i.startGps();o.setAnimationLoop(P);function P(){x.update(),M.update(),o.render(a,t)}
diff --git a/docs/assets/04-api-communication-CSUchSy2.js b/docs/assets/03-api-communication-DdMSg92Q.js
similarity index 88%
rename from docs/assets/04-api-communication-CSUchSy2.js
rename to docs/assets/03-api-communication-DdMSg92Q.js
index 0e828b4..97e58c2 100644
--- a/docs/assets/04-api-communication-CSUchSy2.js
+++ b/docs/assets/03-api-communication-DdMSg92Q.js
@@ -1 +1 @@
-import{P as w,W as p,S as h,_ as m,b as l,E as u,B as f,y as g,M as b,a as y}from"./locar.es-D2pHxP8J.js";const t=new w(80,window.innerWidth/window.innerHeight,.001,1e3),n=new p;n.setSize(window.innerWidth,window.innerHeight);const i=new h;document.body.appendChild(n.domElement);window.addEventListener("resize",e=>{n.setSize(window.innerWidth,window.innerHeight),t.aspect=window.innerWidth/window.innerHeight,t.updateProjectionMatrix()});const s=new m(i,t),j=new l(t),P=new u(n);let a=!0;const d={},W=new f(20,20,20),x=new g(n);s.on("gpsupdate",async(e,c)=>{(a||c>100)&&((await(await fetch(`https://hikar.org/webapp/map?bbox=${e.coords.longitude-.02},${e.coords.latitude-.02},${e.coords.longitude+.02},${e.coords.latitude+.02}&layers=poi&outProj=4326`)).json()).features.forEach(o=>{if(!d[o.properties.osm_id]){const r=new b(W,new y({color:16711680}));s.add(r,o.geometry.coordinates[0],o.geometry.coordinates[1],0,o.properties),d[o.properties.osm_id]=r}}),a=!1)});s.startGps();n.setAnimationLoop(H);function H(){P.update(),j.update();const e=x.raycast(t,i);e.length&&alert(`This is ${e[0].object.properties.name}`),n.render(i,t)}
+import{P as w,W as p,S as h,E as m,b as l,_ as u,B as f,y as g,M as b,a as y}from"./locar.es-C0jLPF0Z.js";const t=new w(80,window.innerWidth/window.innerHeight,.001,1e3),n=new p;n.setSize(window.innerWidth,window.innerHeight);const i=new h;document.body.appendChild(n.domElement);window.addEventListener("resize",e=>{n.setSize(window.innerWidth,window.innerHeight),t.aspect=window.innerWidth/window.innerHeight,t.updateProjectionMatrix()});const s=new m(i,t),j=new l(t),P=new u(n);let a=!0;const d={},W=new f(20,20,20),x=new g(n);s.on("gpsupdate",async(e,c)=>{(a||c>100)&&((await(await fetch(`https://hikar.org/webapp/map?bbox=${e.coords.longitude-.02},${e.coords.latitude-.02},${e.coords.longitude+.02},${e.coords.latitude+.02}&layers=poi&outProj=4326`)).json()).features.forEach(o=>{if(!d[o.properties.osm_id]){const r=new b(W,new y({color:16711680}));s.add(r,o.geometry.coordinates[0],o.geometry.coordinates[1],0,o.properties),d[o.properties.osm_id]=r}}),a=!1)});s.startGps();n.setAnimationLoop(H);function H(){P.update(),j.update();const e=x.raycast(t,i);e.length&&alert(`This is ${e[0].object.properties.name}`),n.render(i,t)}
diff --git a/docs/assets/03-ar-objects-D3M74PcG.js b/docs/assets/03-ar-objects-D3M74PcG.js
deleted file mode 100644
index d673278..0000000
--- a/docs/assets/03-ar-objects-D3M74PcG.js
+++ /dev/null
@@ -1 +0,0 @@
-import{P as u,W as p,S as M,_ as g,E,B as P,y as v,M as d,a as r,c as L}from"./locar.es-D2pHxP8J.js";const i=new u(80,window.innerWidth/window.innerHeight,.001,1e3),e=new p;e.setSize(window.innerWidth,window.innerHeight);const s=new M;document.body.appendChild(e.domElement);window.addEventListener("resize",n=>{e.setSize(window.innerWidth,window.innerHeight),i.aspect=window.innerWidth/window.innerHeight,i.updateProjectionMatrix()});const o=new g(s,i),W=new E(e);let c=!0;const l=L.degToRad(1),w=new P(20,20,20);let a=!1,m,t=0;const b=new v(e);e.domElement.addEventListener("mousedown",n=>{a=!0,m=n.clientX});e.domElement.addEventListener("mousemove",n=>{a&&(t+=n.clientX>m?l*10:-l*10,t>Math.PI?t-=2*Math.PI:t<-Math.PI&&(t+=2*Math.PI),i.rotation.set(0,t,0))});e.domElement.addEventListener("mouseup",n=>{a=!1});o.on("gpsupdate",async(n,H)=>{if(c){c=!1;const h=new d(w,new r({color:65535})),f=new d(w,new r({color:16711680}));o.add(h,-1.406392,50.908042,0,{name:"Guildhall"}),o.add(f,-1.40434,50.90733,0,{name:"O'Neills"})}});o.fakeGps(-1.404555,50.908015);e.setAnimationLoop(y);function y(){W.update();const n=b.raycast(i,s);n.length&&alert(`This is ${n[0].object.properties.name}`),e.render(s,i)}
diff --git a/docs/assets/locar.es-D2pHxP8J.js b/docs/assets/locar.es-C0jLPF0Z.js
similarity index 99%
rename from docs/assets/locar.es-D2pHxP8J.js
rename to docs/assets/locar.es-C0jLPF0Z.js
index 4f8e929..56ba459 100644
--- a/docs/assets/locar.es-D2pHxP8J.js
+++ b/docs/assets/locar.es-C0jLPF0Z.js
@@ -3838,4 +3838,4 @@ void main() {
}`;class zf{constructor(){this.texture=null,this.mesh=null,this.depthNear=0,this.depthFar=0}init(e,t,n){if(this.texture===null){const r=new ft,s=e.properties.get(r);s.__webglTexture=t.texture,(t.depthNear!=n.depthNear||t.depthFar!=n.depthFar)&&(this.depthNear=t.depthNear,this.depthFar=t.depthFar),this.texture=r}}getMesh(e){if(this.texture!==null&&this.mesh===null){const t=e.cameras[0].viewport,n=new cn({vertexShader:Of,fragmentShader:Bf,uniforms:{depthColor:{value:this.texture},depthWidth:{value:t.z},depthHeight:{value:t.w}}});this.mesh=new Ft(new gi(20,20),n)}return this.mesh}reset(){this.texture=null,this.mesh=null}getDepthTexture(){return this.texture}}class Hf extends wn{constructor(e,t){super();const n=this;let r=null,s=1,a=null,o="local-floor",l=1,c=null,u=null,p=null,f=null,m=null,x=null;const S=new zf,d=t.getContextAttributes();let h=null,A=null;const E=[],T=[],O=new Ye;let C=null;const b=new Lt;b.layers.enable(1),b.viewport=new et;const F=new Lt;F.layers.enable(2),F.viewport=new et;const ee=[b,F],_=new Nf;_.layers.enable(1),_.layers.enable(2);let M=null,V=null;this.cameraAutoUpdate=!0,this.enabled=!1,this.isPresenting=!1,this.getController=function(W){let $=E[W];return $===void 0&&($=new Fr,E[W]=$),$.getTargetRaySpace()},this.getControllerGrip=function(W){let $=E[W];return $===void 0&&($=new Fr,E[W]=$),$.getGripSpace()},this.getHand=function(W){let $=E[W];return $===void 0&&($=new Fr,E[W]=$),$.getHandSpace()};function z(W){const $=T.indexOf(W.inputSource);if($===-1)return;const pe=E[$];pe!==void 0&&(pe.update(W.inputSource,W.frame,c||a),pe.dispatchEvent({type:W.type,data:W.inputSource}))}function k(){r.removeEventListener("select",z),r.removeEventListener("selectstart",z),r.removeEventListener("selectend",z),r.removeEventListener("squeeze",z),r.removeEventListener("squeezestart",z),r.removeEventListener("squeezeend",z),r.removeEventListener("end",k),r.removeEventListener("inputsourceschange",Z);for(let W=0;W=0&&(T[ce]=null,E[ce].disconnect(pe))}for(let $=0;$=T.length){T.push(pe),ce=Se;break}else if(T[Se]===null){T[Se]=pe,ce=Se;break}if(ce===-1)break}const Re=E[ce];Re&&Re.connect(pe)}}const B=new N,J=new N;function G(W,$,pe){B.setFromMatrixPosition($.matrixWorld),J.setFromMatrixPosition(pe.matrixWorld);const ce=B.distanceTo(J),Re=$.projectionMatrix.elements,Se=pe.projectionMatrix.elements,Ue=Re[14]/(Re[10]-1),Xe=Re[14]/(Re[10]+1),Ie=(Re[9]+1)/Re[5],R=(Re[9]-1)/Re[5],gt=(Re[8]-1)/Re[0],Le=(Se[8]+1)/Se[0],Fe=Ue*gt,ye=Ue*Le,Ze=ce/(-gt+Le),be=Ze*-gt;if($.matrixWorld.decompose(W.position,W.quaternion,W.scale),W.translateX(be),W.translateZ(Ze),W.matrixWorld.compose(W.position,W.quaternion,W.scale),W.matrixWorldInverse.copy(W.matrixWorld).invert(),Re[10]===-1)W.projectionMatrix.copy($.projectionMatrix),W.projectionMatrixInverse.copy($.projectionMatrixInverse);else{const y=Ue+Ze,g=Xe+Ze,D=Fe-be,q=ye+(ce-be),K=Ie*Xe/g*y,X=R*Xe/g*y;W.projectionMatrix.makePerspective(D,q,K,X,y,g),W.projectionMatrixInverse.copy(W.projectionMatrix).invert()}}function oe(W,$){$===null?W.matrixWorld.copy(W.matrix):W.matrixWorld.multiplyMatrices($.matrixWorld,W.matrix),W.matrixWorldInverse.copy(W.matrixWorld).invert()}this.updateCamera=function(W){if(r===null)return;let $=W.near,pe=W.far;S.texture!==null&&(S.depthNear>0&&($=S.depthNear),S.depthFar>0&&(pe=S.depthFar)),_.near=F.near=b.near=$,_.far=F.far=b.far=pe,(M!==_.near||V!==_.far)&&(r.updateRenderState({depthNear:_.near,depthFar:_.far}),M=_.near,V=_.far);const ce=W.parent,Re=_.cameras;oe(_,ce);for(let Se=0;Se0&&(d.alphaTest.value=h.alphaTest);const A=e.get(h),E=A.envMap,T=A.envMapRotation;E&&(d.envMap.value=E,vn.copy(T),vn.x*=-1,vn.y*=-1,vn.z*=-1,E.isCubeTexture&&E.isRenderTargetTexture===!1&&(vn.y*=-1,vn.z*=-1),d.envMapRotation.value.setFromMatrix4(Gf.makeRotationFromEuler(vn)),d.flipEnvMap.value=E.isCubeTexture&&E.isRenderTargetTexture===!1?-1:1,d.reflectivity.value=h.reflectivity,d.ior.value=h.ior,d.refractionRatio.value=h.refractionRatio),h.lightMap&&(d.lightMap.value=h.lightMap,d.lightMapIntensity.value=h.lightMapIntensity,t(h.lightMap,d.lightMapTransform)),h.aoMap&&(d.aoMap.value=h.aoMap,d.aoMapIntensity.value=h.aoMapIntensity,t(h.aoMap,d.aoMapTransform))}function a(d,h){d.diffuse.value.copy(h.color),d.opacity.value=h.opacity,h.map&&(d.map.value=h.map,t(h.map,d.mapTransform))}function o(d,h){d.dashSize.value=h.dashSize,d.totalSize.value=h.dashSize+h.gapSize,d.scale.value=h.scale}function l(d,h,A,E){d.diffuse.value.copy(h.color),d.opacity.value=h.opacity,d.size.value=h.size*A,d.scale.value=E*.5,h.map&&(d.map.value=h.map,t(h.map,d.uvTransform)),h.alphaMap&&(d.alphaMap.value=h.alphaMap,t(h.alphaMap,d.alphaMapTransform)),h.alphaTest>0&&(d.alphaTest.value=h.alphaTest)}function c(d,h){d.diffuse.value.copy(h.color),d.opacity.value=h.opacity,d.rotation.value=h.rotation,h.map&&(d.map.value=h.map,t(h.map,d.mapTransform)),h.alphaMap&&(d.alphaMap.value=h.alphaMap,t(h.alphaMap,d.alphaMapTransform)),h.alphaTest>0&&(d.alphaTest.value=h.alphaTest)}function u(d,h){d.specular.value.copy(h.specular),d.shininess.value=Math.max(h.shininess,1e-4)}function p(d,h){h.gradientMap&&(d.gradientMap.value=h.gradientMap)}function f(d,h){d.metalness.value=h.metalness,h.metalnessMap&&(d.metalnessMap.value=h.metalnessMap,t(h.metalnessMap,d.metalnessMapTransform)),d.roughness.value=h.roughness,h.roughnessMap&&(d.roughnessMap.value=h.roughnessMap,t(h.roughnessMap,d.roughnessMapTransform)),h.envMap&&(d.envMapIntensity.value=h.envMapIntensity)}function m(d,h,A){d.ior.value=h.ior,h.sheen>0&&(d.sheenColor.value.copy(h.sheenColor).multiplyScalar(h.sheen),d.sheenRoughness.value=h.sheenRoughness,h.sheenColorMap&&(d.sheenColorMap.value=h.sheenColorMap,t(h.sheenColorMap,d.sheenColorMapTransform)),h.sheenRoughnessMap&&(d.sheenRoughnessMap.value=h.sheenRoughnessMap,t(h.sheenRoughnessMap,d.sheenRoughnessMapTransform))),h.clearcoat>0&&(d.clearcoat.value=h.clearcoat,d.clearcoatRoughness.value=h.clearcoatRoughness,h.clearcoatMap&&(d.clearcoatMap.value=h.clearcoatMap,t(h.clearcoatMap,d.clearcoatMapTransform)),h.clearcoatRoughnessMap&&(d.clearcoatRoughnessMap.value=h.clearcoatRoughnessMap,t(h.clearcoatRoughnessMap,d.clearcoatRoughnessMapTransform)),h.clearcoatNormalMap&&(d.clearcoatNormalMap.value=h.clearcoatNormalMap,t(h.clearcoatNormalMap,d.clearcoatNormalMapTransform),d.clearcoatNormalScale.value.copy(h.clearcoatNormalScale),h.side===_t&&d.clearcoatNormalScale.value.negate())),h.dispersion>0&&(d.dispersion.value=h.dispersion),h.iridescence>0&&(d.iridescence.value=h.iridescence,d.iridescenceIOR.value=h.iridescenceIOR,d.iridescenceThicknessMinimum.value=h.iridescenceThicknessRange[0],d.iridescenceThicknessMaximum.value=h.iridescenceThicknessRange[1],h.iridescenceMap&&(d.iridescenceMap.value=h.iridescenceMap,t(h.iridescenceMap,d.iridescenceMapTransform)),h.iridescenceThicknessMap&&(d.iridescenceThicknessMap.value=h.iridescenceThicknessMap,t(h.iridescenceThicknessMap,d.iridescenceThicknessMapTransform))),h.transmission>0&&(d.transmission.value=h.transmission,d.transmissionSamplerMap.value=A.texture,d.transmissionSamplerSize.value.set(A.width,A.height),h.transmissionMap&&(d.transmissionMap.value=h.transmissionMap,t(h.transmissionMap,d.transmissionMapTransform)),d.thickness.value=h.thickness,h.thicknessMap&&(d.thicknessMap.value=h.thicknessMap,t(h.thicknessMap,d.thicknessMapTransform)),d.attenuationDistance.value=h.attenuationDistance,d.attenuationColor.value.copy(h.attenuationColor)),h.anisotropy>0&&(d.anisotropyVector.value.set(h.anisotropy*Math.cos(h.anisotropyRotation),h.anisotropy*Math.sin(h.anisotropyRotation)),h.anisotropyMap&&(d.anisotropyMap.value=h.anisotropyMap,t(h.anisotropyMap,d.anisotropyMapTransform))),d.specularIntensity.value=h.specularIntensity,d.specularColor.value.copy(h.specularColor),h.specularColorMap&&(d.specularColorMap.value=h.specularColorMap,t(h.specularColorMap,d.specularColorMapTransform)),h.specularIntensityMap&&(d.specularIntensityMap.value=h.specularIntensityMap,t(h.specularIntensityMap,d.specularIntensityMapTransform))}function x(d,h){h.matcap&&(d.matcap.value=h.matcap)}function S(d,h){const A=e.get(h).light;d.referencePosition.value.setFromMatrixPosition(A.matrixWorld),d.nearDistance.value=A.shadow.camera.near,d.farDistance.value=A.shadow.camera.far}return{refreshFogUniforms:n,refreshMaterialUniforms:r}}function kf(i,e,t,n){let r={},s={},a=[];const o=i.getParameter(i.MAX_UNIFORM_BUFFER_BINDINGS);function l(A,E){const T=E.program;n.uniformBlockBinding(A,T)}function c(A,E){let T=r[A.id];T===void 0&&(x(A),T=u(A),r[A.id]=T,A.addEventListener("dispose",d));const O=E.program;n.updateUBOMapping(A,O);const C=e.render.frame;s[A.id]!==C&&(f(A),s[A.id]=C)}function u(A){const E=p();A.__bindingPointIndex=E;const T=i.createBuffer(),O=A.__size,C=A.usage;return i.bindBuffer(i.UNIFORM_BUFFER,T),i.bufferData(i.UNIFORM_BUFFER,O,C),i.bindBuffer(i.UNIFORM_BUFFER,null),i.bindBufferBase(i.UNIFORM_BUFFER,E,T),T}function p(){for(let A=0;A0&&(T+=O-C),A.__size=T,A.__cache={},this}function S(A){const E={boundary:0,storage:0};return typeof A=="number"||typeof A=="boolean"?(E.boundary=4,E.storage=4):A.isVector2?(E.boundary=8,E.storage=8):A.isVector3||A.isColor?(E.boundary=16,E.storage=12):A.isVector4?(E.boundary=16,E.storage=16):A.isMatrix3?(E.boundary=48,E.storage=48):A.isMatrix4?(E.boundary=64,E.storage=64):A.isTexture?console.warn("THREE.WebGLRenderer: Texture samplers can not be part of an uniforms group."):console.warn("THREE.WebGLRenderer: Unsupported uniform value type.",A),E}function d(A){const E=A.target;E.removeEventListener("dispose",d);const T=a.indexOf(E.__bindingPointIndex);a.splice(T,1),i.deleteBuffer(r[E.id]),delete r[E.id],delete s[E.id]}function h(){for(const A in r)i.deleteBuffer(r[A]);a=[],r={},s={}}return{bind:l,update:c,dispose:h}}class Jf{constructor(e={}){const{canvas:t=Pl(),context:n=null,depth:r=!0,stencil:s=!1,alpha:a=!1,antialias:o=!1,premultipliedAlpha:l=!0,preserveDrawingBuffer:c=!1,powerPreference:u="default",failIfMajorPerformanceCaveat:p=!1}=e;this.isWebGLRenderer=!0;let f;if(n!==null){if(typeof WebGLRenderingContext<"u"&&n instanceof WebGLRenderingContext)throw new Error("THREE.WebGLRenderer: WebGL 1 is not supported since r163.");f=n.getContextAttributes().alpha}else f=a;const m=new Uint32Array(4),x=new Int32Array(4);let S=null,d=null;const h=[],A=[];this.domElement=t,this.debug={checkShaderErrors:!0,onShaderError:null},this.autoClear=!0,this.autoClearColor=!0,this.autoClearDepth=!0,this.autoClearStencil=!0,this.sortObjects=!0,this.clippingPlanes=[],this.localClippingEnabled=!1,this._outputColorSpace=It,this.toneMapping=on,this.toneMappingExposure=1;const E=this;let T=!1,O=0,C=0,b=null,F=-1,ee=null;const _=new et,M=new et;let V=null;const z=new qe(0);let k=0,Z=t.width,B=t.height,J=1,G=null,oe=null;const le=new et(0,0,Z,B),_e=new et(0,0,Z,B);let Be=!1;const ke=new po;let W=!1,$=!1;const pe=new tt,ce=new tt,Re=new N,Se=new et,Ue={background:null,fog:null,environment:null,overrideMaterial:null,isScene:!0};let Xe=!1;function Ie(){return b===null?J:1}let R=n;function gt(v,P){return t.getContext(v,P)}try{const v={alpha:!0,depth:r,stencil:s,antialias:o,premultipliedAlpha:l,preserveDrawingBuffer:c,powerPreference:u,failIfMajorPerformanceCaveat:p};if("setAttribute"in t&&t.setAttribute("data-engine",`three.js r${Ts}`),t.addEventListener("webglcontextlost",Y,!1),t.addEventListener("webglcontextrestored",ie,!1),t.addEventListener("webglcontextcreationerror",ae,!1),R===null){const P="webgl2";if(R=gt(P,v),R===null)throw gt(P)?new Error("Error creating WebGL context with your selected attributes."):new Error("Error creating WebGL context.")}}catch(v){throw console.error("THREE.WebGLRenderer: "+v.message),v}let Le,Fe,ye,Ze,be,y,g,D,q,K,X,ge,ne,ue,Oe,j,he,Te,Ae,de,De,we,Ke,w;function se(){Le=new Kh(R),Le.init(),we=new If(R,Le),Fe=new Vh(R,Le,e,we),ye=new Lf(R),Fe.reverseDepthBuffer&&ye.buffers.depth.setReversed(!0),Ze=new jh(R),be=new _f,y=new Uf(R,Le,ye,be,Fe,we,Ze),g=new Wh(E),D=new Yh(E),q=new rc(R),Ke=new Hh(R,q),K=new Zh(R,q,Ze,Ke),X=new Qh(R,K,q,Ze),Ae=new Jh(R,Fe,y),j=new kh(be),ge=new mf(E,g,D,Le,Fe,Ke,j),ne=new Vf(E,be),ue=new vf,Oe=new Tf(Le),Te=new zh(E,g,D,ye,X,f,l),he=new Cf(E,X,Fe),w=new kf(R,Ze,Fe,ye),de=new Gh(R,Le,Ze),De=new $h(R,Le,Ze),Ze.programs=ge.programs,E.capabilities=Fe,E.extensions=Le,E.properties=be,E.renderLists=ue,E.shadowMap=he,E.state=ye,E.info=Ze}se();const H=new Hf(E,R);this.xr=H,this.getContext=function(){return R},this.getContextAttributes=function(){return R.getContextAttributes()},this.forceContextLoss=function(){const v=Le.get("WEBGL_lose_context");v&&v.loseContext()},this.forceContextRestore=function(){const v=Le.get("WEBGL_lose_context");v&&v.restoreContext()},this.getPixelRatio=function(){return J},this.setPixelRatio=function(v){v!==void 0&&(J=v,this.setSize(Z,B,!1))},this.getSize=function(v){return v.set(Z,B)},this.setSize=function(v,P,U=!0){if(H.isPresenting){console.warn("THREE.WebGLRenderer: Can't change size while VR device is presenting.");return}Z=v,B=P,t.width=Math.floor(v*J),t.height=Math.floor(P*J),U===!0&&(t.style.width=v+"px",t.style.height=P+"px"),this.setViewport(0,0,v,P)},this.getDrawingBufferSize=function(v){return v.set(Z*J,B*J).floor()},this.setDrawingBufferSize=function(v,P,U){Z=v,B=P,J=U,t.width=Math.floor(v*U),t.height=Math.floor(P*U),this.setViewport(0,0,v,P)},this.getCurrentViewport=function(v){return v.copy(_)},this.getViewport=function(v){return v.copy(le)},this.setViewport=function(v,P,U,I){v.isVector4?le.set(v.x,v.y,v.z,v.w):le.set(v,P,U,I),ye.viewport(_.copy(le).multiplyScalar(J).round())},this.getScissor=function(v){return v.copy(_e)},this.setScissor=function(v,P,U,I){v.isVector4?_e.set(v.x,v.y,v.z,v.w):_e.set(v,P,U,I),ye.scissor(M.copy(_e).multiplyScalar(J).round())},this.getScissorTest=function(){return Be},this.setScissorTest=function(v){ye.setScissorTest(Be=v)},this.setOpaqueSort=function(v){G=v},this.setTransparentSort=function(v){oe=v},this.getClearColor=function(v){return v.copy(Te.getClearColor())},this.setClearColor=function(){Te.setClearColor.apply(Te,arguments)},this.getClearAlpha=function(){return Te.getClearAlpha()},this.setClearAlpha=function(){Te.setClearAlpha.apply(Te,arguments)},this.clear=function(v=!0,P=!0,U=!0){let I=0;if(v){let L=!1;if(b!==null){const Q=b.texture.format;L=Q===Ps||Q===Cs||Q===ws}if(L){const Q=b.texture.type,re=Q===Zt||Q===bn||Q===di||Q===Qn||Q===bs||Q===Rs,fe=Te.getClearColor(),me=Te.getClearAlpha(),Me=fe.r,Ee=fe.g,ve=fe.b;re?(m[0]=Me,m[1]=Ee,m[2]=ve,m[3]=me,R.clearBufferuiv(R.COLOR,0,m)):(x[0]=Me,x[1]=Ee,x[2]=ve,x[3]=me,R.clearBufferiv(R.COLOR,0,x))}else I|=R.COLOR_BUFFER_BIT}P&&(I|=R.DEPTH_BUFFER_BIT,R.clearDepth(this.capabilities.reverseDepthBuffer?0:1)),U&&(I|=R.STENCIL_BUFFER_BIT,this.state.buffers.stencil.setMask(4294967295)),R.clear(I)},this.clearColor=function(){this.clear(!0,!1,!1)},this.clearDepth=function(){this.clear(!1,!0,!1)},this.clearStencil=function(){this.clear(!1,!1,!0)},this.dispose=function(){t.removeEventListener("webglcontextlost",Y,!1),t.removeEventListener("webglcontextrestored",ie,!1),t.removeEventListener("webglcontextcreationerror",ae,!1),ue.dispose(),Oe.dispose(),be.dispose(),g.dispose(),D.dispose(),X.dispose(),Ke.dispose(),w.dispose(),ge.dispose(),H.dispose(),H.removeEventListener("sessionstart",Os),H.removeEventListener("sessionend",Bs),dn.stop()};function Y(v){v.preventDefault(),console.log("THREE.WebGLRenderer: Context Lost."),T=!0}function ie(){console.log("THREE.WebGLRenderer: Context Restored."),T=!1;const v=Ze.autoReset,P=he.enabled,U=he.autoUpdate,I=he.needsUpdate,L=he.type;se(),Ze.autoReset=v,he.enabled=P,he.autoUpdate=U,he.needsUpdate=I,he.type=L}function ae(v){console.error("THREE.WebGLRenderer: A WebGL context could not be created. Reason: ",v.statusMessage)}function Ne(v){const P=v.target;P.removeEventListener("dispose",Ne),nt(P)}function nt(v){pt(v),be.remove(v)}function pt(v){const P=be.get(v).programs;P!==void 0&&(P.forEach(function(U){ge.releaseProgram(U)}),v.isShaderMaterial&&ge.releaseShaderCache(v))}this.renderBufferDirect=function(v,P,U,I,L,Q){P===null&&(P=Ue);const re=L.isMesh&&L.matrixWorld.determinant()<0,fe=Eo(v,P,U,I,L);ye.setMaterial(I,re);let me=U.index,Me=1;if(I.wireframe===!0){if(me=K.getWireframeAttribute(U),me===void 0)return;Me=2}const Ee=U.drawRange,ve=U.attributes.position;let We=Ee.start*Me,$e=(Ee.start+Ee.count)*Me;Q!==null&&(We=Math.max(We,Q.start*Me),$e=Math.min($e,(Q.start+Q.count)*Me)),me!==null?(We=Math.max(We,0),$e=Math.min($e,me.count)):ve!=null&&(We=Math.max(We,0),$e=Math.min($e,ve.count));const Je=$e-We;if(Je<0||Je===1/0)return;Ke.setup(L,I,fe,U,me);let vt,He=de;if(me!==null&&(vt=q.get(me),He=De,He.setIndex(vt)),L.isMesh)I.wireframe===!0?(ye.setLineWidth(I.wireframeLinewidth*Ie()),He.setMode(R.LINES)):He.setMode(R.TRIANGLES);else if(L.isLine){let xe=I.linewidth;xe===void 0&&(xe=1),ye.setLineWidth(xe*Ie()),L.isLineSegments?He.setMode(R.LINES):L.isLineLoop?He.setMode(R.LINE_LOOP):He.setMode(R.LINE_STRIP)}else L.isPoints?He.setMode(R.POINTS):L.isSprite&&He.setMode(R.TRIANGLES);if(L.isBatchedMesh)if(L._multiDrawInstances!==null)He.renderMultiDrawInstances(L._multiDrawStarts,L._multiDrawCounts,L._multiDrawCount,L._multiDrawInstances);else if(Le.get("WEBGL_multi_draw"))He.renderMultiDraw(L._multiDrawStarts,L._multiDrawCounts,L._multiDrawCount);else{const xe=L._multiDrawStarts,ot=L._multiDrawCounts,Ge=L._multiDrawCount,Rt=me?q.get(me).bytesPerElement:1,Pn=be.get(I).currentProgram.getUniforms();for(let xt=0;xt{function Q(){if(I.forEach(function(re){be.get(re).currentProgram.isReady()&&I.delete(re)}),I.size===0){L(v);return}setTimeout(Q,10)}Le.get("KHR_parallel_shader_compile")!==null?Q():setTimeout(Q,10)})};let mt=null;function zt(v){mt&&mt(v)}function Os(){dn.stop()}function Bs(){dn.start()}const dn=new mo;dn.setAnimationLoop(zt),typeof self<"u"&&dn.setContext(self),this.setAnimationLoop=function(v){mt=v,H.setAnimationLoop(v),v===null?dn.stop():dn.start()},H.addEventListener("sessionstart",Os),H.addEventListener("sessionend",Bs),this.render=function(v,P){if(P!==void 0&&P.isCamera!==!0){console.error("THREE.WebGLRenderer.render: camera is not an instance of THREE.Camera.");return}if(T===!0)return;if(v.matrixWorldAutoUpdate===!0&&v.updateMatrixWorld(),P.parent===null&&P.matrixWorldAutoUpdate===!0&&P.updateMatrixWorld(),H.enabled===!0&&H.isPresenting===!0&&(H.cameraAutoUpdate===!0&&H.updateCamera(P),P=H.getCamera()),v.isScene===!0&&v.onBeforeRender(E,v,P,b),d=Oe.get(v,A.length),d.init(P),A.push(d),ce.multiplyMatrices(P.projectionMatrix,P.matrixWorldInverse),ke.setFromProjectionMatrix(ce),$=this.localClippingEnabled,W=j.init(this.clippingPlanes,$),S=ue.get(v,h.length),S.init(),h.push(S),H.enabled===!0&&H.isPresenting===!0){const Q=E.xr.getDepthSensingMesh();Q!==null&&rr(Q,P,-1/0,E.sortObjects)}rr(v,P,0,E.sortObjects),S.finish(),E.sortObjects===!0&&S.sort(G,oe),Xe=H.enabled===!1||H.isPresenting===!1||H.hasDepthSensing()===!1,Xe&&Te.addToRenderList(S,v),this.info.render.frame++,W===!0&&j.beginShadows();const U=d.state.shadowsArray;he.render(U,v,P),W===!0&&j.endShadows(),this.info.autoReset===!0&&this.info.reset();const I=S.opaque,L=S.transmissive;if(d.setupLights(),P.isArrayCamera){const Q=P.cameras;if(L.length>0)for(let re=0,fe=Q.length;re0&&Hs(I,L,v,P),Xe&&Te.render(v),zs(S,v,P);b!==null&&(y.updateMultisampleRenderTarget(b),y.updateRenderTargetMipmap(b)),v.isScene===!0&&v.onAfterRender(E,v,P),Ke.resetDefaultState(),F=-1,ee=null,A.pop(),A.length>0?(d=A[A.length-1],W===!0&&j.setGlobalState(E.clippingPlanes,d.state.camera)):d=null,h.pop(),h.length>0?S=h[h.length-1]:S=null};function rr(v,P,U,I){if(v.visible===!1)return;if(v.layers.test(P.layers)){if(v.isGroup)U=v.renderOrder;else if(v.isLOD)v.autoUpdate===!0&&v.update(P);else if(v.isLight)d.pushLight(v),v.castShadow&&d.pushShadow(v);else if(v.isSprite){if(!v.frustumCulled||ke.intersectsSprite(v)){I&&Se.setFromMatrixPosition(v.matrixWorld).applyMatrix4(ce);const re=X.update(v),fe=v.material;fe.visible&&S.push(v,re,fe,U,Se.z,null)}}else if((v.isMesh||v.isLine||v.isPoints)&&(!v.frustumCulled||ke.intersectsObject(v))){const re=X.update(v),fe=v.material;if(I&&(v.boundingSphere!==void 0?(v.boundingSphere===null&&v.computeBoundingSphere(),Se.copy(v.boundingSphere.center)):(re.boundingSphere===null&&re.computeBoundingSphere(),Se.copy(re.boundingSphere.center)),Se.applyMatrix4(v.matrixWorld).applyMatrix4(ce)),Array.isArray(fe)){const me=re.groups;for(let Me=0,Ee=me.length;Me0&&vi(L,P,U),Q.length>0&&vi(Q,P,U),re.length>0&&vi(re,P,U),ye.buffers.depth.setTest(!0),ye.buffers.depth.setMask(!0),ye.buffers.color.setMask(!0),ye.setPolygonOffset(!1)}function Hs(v,P,U,I){if((U.isScene===!0?U.overrideMaterial:null)!==null)return;d.state.transmissionRenderTarget[I.id]===void 0&&(d.state.transmissionRenderTarget[I.id]=new Rn(1,1,{generateMipmaps:!0,type:Le.has("EXT_color_buffer_half_float")||Le.has("EXT_color_buffer_float")?pi:Zt,minFilter:Tn,samples:4,stencilBuffer:s,resolveDepthBuffer:!1,resolveStencilBuffer:!1,colorSpace:Ve.workingColorSpace}));const Q=d.state.transmissionRenderTarget[I.id],re=I.viewport||_;Q.setSize(re.z,re.w);const fe=E.getRenderTarget();E.setRenderTarget(Q),E.getClearColor(z),k=E.getClearAlpha(),k<1&&E.setClearColor(16777215,.5),E.clear(),Xe&&Te.render(U);const me=E.toneMapping;E.toneMapping=on;const Me=I.viewport;if(I.viewport!==void 0&&(I.viewport=void 0),d.setupLightsView(I),W===!0&&j.setGlobalState(E.clippingPlanes,I),vi(v,U,I),y.updateMultisampleRenderTarget(Q),y.updateRenderTargetMipmap(Q),Le.has("WEBGL_multisampled_render_to_texture")===!1){let Ee=!1;for(let ve=0,We=P.length;ve0),ve=!!U.morphAttributes.position,We=!!U.morphAttributes.normal,$e=!!U.morphAttributes.color;let Je=on;I.toneMapped&&(b===null||b.isXRRenderTarget===!0)&&(Je=E.toneMapping);const vt=U.morphAttributes.position||U.morphAttributes.normal||U.morphAttributes.color,He=vt!==void 0?vt.length:0,xe=be.get(I),ot=d.state.lights;if(W===!0&&($===!0||v!==ee)){const Tt=v===ee&&I.id===F;j.setState(I,v,Tt)}let Ge=!1;I.version===xe.__version?(xe.needsLights&&xe.lightsStateVersion!==ot.state.version||xe.outputColorSpace!==fe||L.isBatchedMesh&&xe.batching===!1||!L.isBatchedMesh&&xe.batching===!0||L.isBatchedMesh&&xe.batchingColor===!0&&L.colorTexture===null||L.isBatchedMesh&&xe.batchingColor===!1&&L.colorTexture!==null||L.isInstancedMesh&&xe.instancing===!1||!L.isInstancedMesh&&xe.instancing===!0||L.isSkinnedMesh&&xe.skinning===!1||!L.isSkinnedMesh&&xe.skinning===!0||L.isInstancedMesh&&xe.instancingColor===!0&&L.instanceColor===null||L.isInstancedMesh&&xe.instancingColor===!1&&L.instanceColor!==null||L.isInstancedMesh&&xe.instancingMorph===!0&&L.morphTexture===null||L.isInstancedMesh&&xe.instancingMorph===!1&&L.morphTexture!==null||xe.envMap!==me||I.fog===!0&&xe.fog!==Q||xe.numClippingPlanes!==void 0&&(xe.numClippingPlanes!==j.numPlanes||xe.numIntersection!==j.numIntersection)||xe.vertexAlphas!==Me||xe.vertexTangents!==Ee||xe.morphTargets!==ve||xe.morphNormals!==We||xe.morphColors!==$e||xe.toneMapping!==Je||xe.morphTargetsCount!==He)&&(Ge=!0):(Ge=!0,xe.__version=I.version);let Rt=xe.currentProgram;Ge===!0&&(Rt=xi(I,P,L));let Pn=!1,xt=!1,sr=!1;const Qe=Rt.getUniforms(),$t=xe.uniforms;if(ye.useProgram(Rt.program)&&(Pn=!0,xt=!0,sr=!0),I.id!==F&&(F=I.id,xt=!0),Pn||ee!==v){Fe.reverseDepthBuffer?(pe.copy(v.projectionMatrix),Dl(pe),Ul(pe),Qe.setValue(R,"projectionMatrix",pe)):Qe.setValue(R,"projectionMatrix",v.projectionMatrix),Qe.setValue(R,"viewMatrix",v.matrixWorldInverse);const Tt=Qe.map.cameraPosition;Tt!==void 0&&Tt.setValue(R,Re.setFromMatrixPosition(v.matrixWorld)),Fe.logarithmicDepthBuffer&&Qe.setValue(R,"logDepthBufFC",2/(Math.log(v.far+1)/Math.LN2)),(I.isMeshPhongMaterial||I.isMeshToonMaterial||I.isMeshLambertMaterial||I.isMeshBasicMaterial||I.isMeshStandardMaterial||I.isShaderMaterial)&&Qe.setValue(R,"isOrthographic",v.isOrthographicCamera===!0),ee!==v&&(ee=v,xt=!0,sr=!0)}if(L.isSkinnedMesh){Qe.setOptional(R,L,"bindMatrix"),Qe.setOptional(R,L,"bindMatrixInverse");const Tt=L.skeleton;Tt&&(Tt.boneTexture===null&&Tt.computeBoneTexture(),Qe.setValue(R,"boneTexture",Tt.boneTexture,y))}L.isBatchedMesh&&(Qe.setOptional(R,L,"batchingTexture"),Qe.setValue(R,"batchingTexture",L._matricesTexture,y),Qe.setOptional(R,L,"batchingIdTexture"),Qe.setValue(R,"batchingIdTexture",L._indirectTexture,y),Qe.setOptional(R,L,"batchingColorTexture"),L._colorsTexture!==null&&Qe.setValue(R,"batchingColorTexture",L._colorsTexture,y));const ar=U.morphAttributes;if((ar.position!==void 0||ar.normal!==void 0||ar.color!==void 0)&&Ae.update(L,U,Rt),(xt||xe.receiveShadow!==L.receiveShadow)&&(xe.receiveShadow=L.receiveShadow,Qe.setValue(R,"receiveShadow",L.receiveShadow)),I.isMeshGouraudMaterial&&I.envMap!==null&&($t.envMap.value=me,$t.flipEnvMap.value=me.isCubeTexture&&me.isRenderTargetTexture===!1?-1:1),I.isMeshStandardMaterial&&I.envMap===null&&P.environment!==null&&($t.envMapIntensity.value=P.environmentIntensity),xt&&(Qe.setValue(R,"toneMappingExposure",E.toneMappingExposure),xe.needsLights&&yo($t,sr),Q&&I.fog===!0&&ne.refreshFogUniforms($t,Q),ne.refreshMaterialUniforms($t,I,J,B,d.state.transmissionRenderTarget[v.id]),Ki.upload(R,Vs(xe),$t,y)),I.isShaderMaterial&&I.uniformsNeedUpdate===!0&&(Ki.upload(R,Vs(xe),$t,y),I.uniformsNeedUpdate=!1),I.isSpriteMaterial&&Qe.setValue(R,"center",L.center),Qe.setValue(R,"modelViewMatrix",L.modelViewMatrix),Qe.setValue(R,"normalMatrix",L.normalMatrix),Qe.setValue(R,"modelMatrix",L.matrixWorld),I.isShaderMaterial||I.isRawShaderMaterial){const Tt=I.uniformsGroups;for(let or=0,Ao=Tt.length;or0&&y.useMultisampledRTT(v)===!1?L=be.get(v).__webglMultisampledFramebuffer:Array.isArray(Ee)?L=Ee[U]:L=Ee,_.copy(v.viewport),M.copy(v.scissor),V=v.scissorTest}else _.copy(le).multiplyScalar(J).floor(),M.copy(_e).multiplyScalar(J).floor(),V=Be;if(ye.bindFramebuffer(R.FRAMEBUFFER,L)&&I&&ye.drawBuffers(v,L),ye.viewport(_),ye.scissor(M),ye.setScissorTest(V),Q){const me=be.get(v.texture);R.framebufferTexture2D(R.FRAMEBUFFER,R.COLOR_ATTACHMENT0,R.TEXTURE_CUBE_MAP_POSITIVE_X+P,me.__webglTexture,U)}else if(re){const me=be.get(v.texture),Me=P||0;R.framebufferTextureLayer(R.FRAMEBUFFER,R.COLOR_ATTACHMENT0,me.__webglTexture,U||0,Me)}F=-1},this.readRenderTargetPixels=function(v,P,U,I,L,Q,re){if(!(v&&v.isWebGLRenderTarget)){console.error("THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.");return}let fe=be.get(v).__webglFramebuffer;if(v.isWebGLCubeRenderTarget&&re!==void 0&&(fe=fe[re]),fe){ye.bindFramebuffer(R.FRAMEBUFFER,fe);try{const me=v.texture,Me=me.format,Ee=me.type;if(!Fe.textureFormatReadable(Me)){console.error("THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in RGBA or implementation defined format.");return}if(!Fe.textureTypeReadable(Ee)){console.error("THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not in UnsignedByteType or implementation defined type.");return}P>=0&&P<=v.width-I&&U>=0&&U<=v.height-L&&R.readPixels(P,U,I,L,we.convert(Me),we.convert(Ee),Q)}finally{const me=b!==null?be.get(b).__webglFramebuffer:null;ye.bindFramebuffer(R.FRAMEBUFFER,me)}}},this.readRenderTargetPixelsAsync=async function(v,P,U,I,L,Q,re){if(!(v&&v.isWebGLRenderTarget))throw new Error("THREE.WebGLRenderer.readRenderTargetPixels: renderTarget is not THREE.WebGLRenderTarget.");let fe=be.get(v).__webglFramebuffer;if(v.isWebGLCubeRenderTarget&&re!==void 0&&(fe=fe[re]),fe){const me=v.texture,Me=me.format,Ee=me.type;if(!Fe.textureFormatReadable(Me))throw new Error("THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in RGBA or implementation defined format.");if(!Fe.textureTypeReadable(Ee))throw new Error("THREE.WebGLRenderer.readRenderTargetPixelsAsync: renderTarget is not in UnsignedByteType or implementation defined type.");if(P>=0&&P<=v.width-I&&U>=0&&U<=v.height-L){ye.bindFramebuffer(R.FRAMEBUFFER,fe);const ve=R.createBuffer();R.bindBuffer(R.PIXEL_PACK_BUFFER,ve),R.bufferData(R.PIXEL_PACK_BUFFER,Q.byteLength,R.STREAM_READ),R.readPixels(P,U,I,L,we.convert(Me),we.convert(Ee),0);const We=b!==null?be.get(b).__webglFramebuffer:null;ye.bindFramebuffer(R.FRAMEBUFFER,We);const $e=R.fenceSync(R.SYNC_GPU_COMMANDS_COMPLETE,0);return R.flush(),await Ll(R,$e,4),R.bindBuffer(R.PIXEL_PACK_BUFFER,ve),R.getBufferSubData(R.PIXEL_PACK_BUFFER,0,Q),R.deleteBuffer(ve),R.deleteSync($e),Q}else throw new Error("THREE.WebGLRenderer.readRenderTargetPixelsAsync: requested read bounds are out of range.")}},this.copyFramebufferToTexture=function(v,P=null,U=0){v.isTexture!==!0&&(Yi("WebGLRenderer: copyFramebufferToTexture function signature has changed."),P=arguments[0]||null,v=arguments[1]);const I=Math.pow(2,-U),L=Math.floor(v.image.width*I),Q=Math.floor(v.image.height*I),re=P!==null?P.x:0,fe=P!==null?P.y:0;y.setTexture2D(v,0),R.copyTexSubImage2D(R.TEXTURE_2D,U,0,0,re,fe,L,Q),ye.unbindTexture()},this.copyTextureToTexture=function(v,P,U=null,I=null,L=0){v.isTexture!==!0&&(Yi("WebGLRenderer: copyTextureToTexture function signature has changed."),I=arguments[0]||null,v=arguments[1],P=arguments[2],L=arguments[3]||0,U=null);let Q,re,fe,me,Me,Ee;U!==null?(Q=U.max.x-U.min.x,re=U.max.y-U.min.y,fe=U.min.x,me=U.min.y):(Q=v.image.width,re=v.image.height,fe=0,me=0),I!==null?(Me=I.x,Ee=I.y):(Me=0,Ee=0);const ve=we.convert(P.format),We=we.convert(P.type);y.setTexture2D(P,0),R.pixelStorei(R.UNPACK_FLIP_Y_WEBGL,P.flipY),R.pixelStorei(R.UNPACK_PREMULTIPLY_ALPHA_WEBGL,P.premultiplyAlpha),R.pixelStorei(R.UNPACK_ALIGNMENT,P.unpackAlignment);const $e=R.getParameter(R.UNPACK_ROW_LENGTH),Je=R.getParameter(R.UNPACK_IMAGE_HEIGHT),vt=R.getParameter(R.UNPACK_SKIP_PIXELS),He=R.getParameter(R.UNPACK_SKIP_ROWS),xe=R.getParameter(R.UNPACK_SKIP_IMAGES),ot=v.isCompressedTexture?v.mipmaps[L]:v.image;R.pixelStorei(R.UNPACK_ROW_LENGTH,ot.width),R.pixelStorei(R.UNPACK_IMAGE_HEIGHT,ot.height),R.pixelStorei(R.UNPACK_SKIP_PIXELS,fe),R.pixelStorei(R.UNPACK_SKIP_ROWS,me),v.isDataTexture?R.texSubImage2D(R.TEXTURE_2D,L,Me,Ee,Q,re,ve,We,ot.data):v.isCompressedTexture?R.compressedTexSubImage2D(R.TEXTURE_2D,L,Me,Ee,ot.width,ot.height,ve,ot.data):R.texSubImage2D(R.TEXTURE_2D,L,Me,Ee,Q,re,ve,We,ot),R.pixelStorei(R.UNPACK_ROW_LENGTH,$e),R.pixelStorei(R.UNPACK_IMAGE_HEIGHT,Je),R.pixelStorei(R.UNPACK_SKIP_PIXELS,vt),R.pixelStorei(R.UNPACK_SKIP_ROWS,He),R.pixelStorei(R.UNPACK_SKIP_IMAGES,xe),L===0&&P.generateMipmaps&&R.generateMipmap(R.TEXTURE_2D),ye.unbindTexture()},this.copyTextureToTexture3D=function(v,P,U=null,I=null,L=0){v.isTexture!==!0&&(Yi("WebGLRenderer: copyTextureToTexture3D function signature has changed."),U=arguments[0]||null,I=arguments[1]||null,v=arguments[2],P=arguments[3],L=arguments[4]||0);let Q,re,fe,me,Me,Ee,ve,We,$e;const Je=v.isCompressedTexture?v.mipmaps[L]:v.image;U!==null?(Q=U.max.x-U.min.x,re=U.max.y-U.min.y,fe=U.max.z-U.min.z,me=U.min.x,Me=U.min.y,Ee=U.min.z):(Q=Je.width,re=Je.height,fe=Je.depth,me=0,Me=0,Ee=0),I!==null?(ve=I.x,We=I.y,$e=I.z):(ve=0,We=0,$e=0);const vt=we.convert(P.format),He=we.convert(P.type);let xe;if(P.isData3DTexture)y.setTexture3D(P,0),xe=R.TEXTURE_3D;else if(P.isDataArrayTexture||P.isCompressedArrayTexture)y.setTexture2DArray(P,0),xe=R.TEXTURE_2D_ARRAY;else{console.warn("THREE.WebGLRenderer.copyTextureToTexture3D: only supports THREE.DataTexture3D and THREE.DataTexture2DArray.");return}R.pixelStorei(R.UNPACK_FLIP_Y_WEBGL,P.flipY),R.pixelStorei(R.UNPACK_PREMULTIPLY_ALPHA_WEBGL,P.premultiplyAlpha),R.pixelStorei(R.UNPACK_ALIGNMENT,P.unpackAlignment);const ot=R.getParameter(R.UNPACK_ROW_LENGTH),Ge=R.getParameter(R.UNPACK_IMAGE_HEIGHT),Rt=R.getParameter(R.UNPACK_SKIP_PIXELS),Pn=R.getParameter(R.UNPACK_SKIP_ROWS),xt=R.getParameter(R.UNPACK_SKIP_IMAGES);R.pixelStorei(R.UNPACK_ROW_LENGTH,Je.width),R.pixelStorei(R.UNPACK_IMAGE_HEIGHT,Je.height),R.pixelStorei(R.UNPACK_SKIP_PIXELS,me),R.pixelStorei(R.UNPACK_SKIP_ROWS,Me),R.pixelStorei(R.UNPACK_SKIP_IMAGES,Ee),v.isDataTexture||v.isData3DTexture?R.texSubImage3D(xe,L,ve,We,$e,Q,re,fe,vt,He,Je.data):P.isCompressedArrayTexture?R.compressedTexSubImage3D(xe,L,ve,We,$e,Q,re,fe,vt,Je.data):R.texSubImage3D(xe,L,ve,We,$e,Q,re,fe,vt,He,Je),R.pixelStorei(R.UNPACK_ROW_LENGTH,ot),R.pixelStorei(R.UNPACK_IMAGE_HEIGHT,Ge),R.pixelStorei(R.UNPACK_SKIP_PIXELS,Rt),R.pixelStorei(R.UNPACK_SKIP_ROWS,Pn),R.pixelStorei(R.UNPACK_SKIP_IMAGES,xt),L===0&&P.generateMipmaps&&R.generateMipmap(xe),ye.unbindTexture()},this.initRenderTarget=function(v){be.get(v).__webglFramebuffer===void 0&&y.setupRenderTarget(v)},this.initTexture=function(v){v.isCubeTexture?y.setTextureCube(v,0):v.isData3DTexture?y.setTexture3D(v,0):v.isDataArrayTexture||v.isCompressedArrayTexture?y.setTexture2DArray(v,0):y.setTexture2D(v,0),ye.unbindTexture()},this.resetState=function(){O=0,C=0,b=null,ye.reset(),Ke.reset()},typeof __THREE_DEVTOOLS__<"u"&&__THREE_DEVTOOLS__.dispatchEvent(new CustomEvent("observe",{detail:this}))}get coordinateSystem(){return Kt}get outputColorSpace(){return this._outputColorSpace}set outputColorSpace(e){this._outputColorSpace=e;const t=this.getContext();t.drawingBufferColorSpace=e===Ls?"display-p3":"srgb",t.unpackColorSpace=Ve.workingColorSpace===tr?"display-p3":"srgb"}}class Wf extends yt{constructor(){super(),this.isScene=!0,this.type="Scene",this.background=null,this.environment=null,this.fog=null,this.backgroundBlurriness=0,this.backgroundIntensity=1,this.backgroundRotation=new Bt,this.environmentIntensity=1,this.environmentRotation=new Bt,this.overrideMaterial=null,typeof __THREE_DEVTOOLS__<"u"&&__THREE_DEVTOOLS__.dispatchEvent(new CustomEvent("observe",{detail:this}))}copy(e,t){return super.copy(e,t),e.background!==null&&(this.background=e.background.clone()),e.environment!==null&&(this.environment=e.environment.clone()),e.fog!==null&&(this.fog=e.fog.clone()),this.backgroundBlurriness=e.backgroundBlurriness,this.backgroundIntensity=e.backgroundIntensity,this.backgroundRotation.copy(e.backgroundRotation),this.environmentIntensity=e.environmentIntensity,this.environmentRotation.copy(e.environmentRotation),e.overrideMaterial!==null&&(this.overrideMaterial=e.overrideMaterial.clone()),this.matrixAutoUpdate=e.matrixAutoUpdate,this}toJSON(e){const t=super.toJSON(e);return this.fog!==null&&(t.object.fog=this.fog.toJSON()),this.backgroundBlurriness>0&&(t.object.backgroundBlurriness=this.backgroundBlurriness),this.backgroundIntensity!==1&&(t.object.backgroundIntensity=this.backgroundIntensity),t.object.backgroundRotation=this.backgroundRotation.toArray(),this.environmentIntensity!==1&&(t.object.environmentIntensity=this.environmentIntensity),t.object.environmentRotation=this.environmentRotation.toArray(),t}}class Xf extends ft{constructor(e,t,n,r,s,a,o,l,c){super(e,t,n,r,s,a,o,l,c),this.isVideoTexture=!0,this.minFilter=a!==void 0?a:Et,this.magFilter=s!==void 0?s:Et,this.generateMipmaps=!1;const u=this;function p(){u.needsUpdate=!0,e.requestVideoFrameCallback(p)}"requestVideoFrameCallback"in e&&e.requestVideoFrameCallback(p)}clone(){return new this.constructor(this.image).copy(this)}update(){const e=this.image;"requestVideoFrameCallback"in e===!1&&e.readyState>=e.HAVE_CURRENT_DATA&&(this.needsUpdate=!0)}}const Ha=new tt;class qf{constructor(e,t,n=0,r=1/0){this.ray=new ao(e,t),this.near=n,this.far=r,this.camera=null,this.layers=new Is,this.params={Mesh:{},Line:{threshold:1},LOD:{},Points:{threshold:1},Sprite:{}}}set(e,t){this.ray.set(e,t)}setFromCamera(e,t){t.isPerspectiveCamera?(this.ray.origin.setFromMatrixPosition(t.matrixWorld),this.ray.direction.set(e.x,e.y,.5).unproject(t).sub(this.ray.origin).normalize(),this.camera=t):t.isOrthographicCamera?(this.ray.origin.set(e.x,e.y,(t.near+t.far)/(t.near-t.far)).unproject(t),this.ray.direction.set(0,0,-1).transformDirection(t.matrixWorld),this.camera=t):console.error("THREE.Raycaster: Unsupported camera type: "+t.type)}setFromXRController(e){return Ha.identity().extractRotation(e.matrixWorld),this.ray.origin.setFromMatrixPosition(e.matrixWorld),this.ray.direction.set(0,0,-1).applyMatrix4(Ha),this}intersectObject(e,t=!0,n=[]){return ys(e,this,n,t),n.sort(Ga),n}intersectObjects(e,t=!0,n=[]){for(let r=0,s=e.length;r{this._gpsReceived(e)},e=>{this._eventHandlers.gpserror?this._eventHandlers.gpserror(e.code):alert(`GPS error: code ${e.code}`)},{enableHighAccuracy:!0}),!0):!1}stopGps(){return this._watchPositionId!==null?(navigator.geolocation.clearWatch(this._watchPositionId),this._watchPositionId=null,!0):!1}fakeGps(e,t,n=null,r=0){n!==null&&this.setElevation(n),this._gpsReceived({coords:{longitude:e,latitude:t,accuracy:r}})}lonLatToWorldCoords(e,t){const n=this._proj.project(e,t);if(this.initialPosition)n[0]-=this.initialPosition[0],n[1]-=this.initialPosition[1];else throw"No initial position determined";return[n[0],-n[1]]}add(e,t,n,r,s={}){e.properties=s,this.setWorldPosition(e,t,n,r),this._scene.add(e)}setWorldPosition(e,t,n,r){const s=this.lonLatToWorldCoords(t,n);r!==void 0&&(e.position.y=r),[e.position.x,e.position.z]=s}setElevation(e){this._camera.position.y=e}on(e,t){this._eventHandlers[e]=t}setWorldOrigin(e,t){this.initialPosition=this._proj.project(e,t)}_gpsReceived(e){let t=Number.MAX_VALUE;e.coords.accuracy<=this._gpsMinAccuracy&&(this._lastCoords===null?this._lastCoords={latitude:e.coords.latitude,longitude:e.coords.longitude}:t=this._haversineDist(this._lastCoords,e.coords),t>=this._gpsMinDistance&&(this._lastCoords.longitude=e.coords.longitude,this._lastCoords.latitude=e.coords.latitude,this.initialPosition||this.setWorldOrigin(e.coords.longitude,e.coords.latitude),this.setWorldPosition(this._camera,e.coords.longitude,e.coords.latitude),this._eventHandlers.gpsupdate&&this._eventHandlers.gpsupdate(e,t)))}_haversineDist(e,t){const n=sn.degToRad(t.longitude-e.longitude),r=sn.degToRad(t.latitude-e.latitude),s=Math.sin(r/2)*Math.sin(r/2)+Math.cos(sn.degToRad(e.latitude))*Math.cos(sn.degToRad(t.latitude))*(Math.sin(n/2)*Math.sin(n/2));return 2*Math.atan2(Math.sqrt(s),Math.sqrt(1-s))*6371e3}}class ep{constructor(e,t,n){this.renderer=e,this.renderer.autoClear=!1,this.sceneWebcam=new Wf;let r;t===void 0?(r=document.createElement("video"),r.setAttribute("autoplay",!0),r.setAttribute("playsinline",!0),r.style.display="none",document.body.appendChild(r)):r=document.querySelector(t),this.geom=new gi,this.texture=new Xf(r),this.material=new Ns({map:this.texture});const s=new Ft(this.geom,this.material);if(this.sceneWebcam.add(s),this.cameraWebcam=new _o(-.5,.5,.5,-.5,0,10),navigator.mediaDevices&&navigator.mediaDevices.getUserMedia){const a={video:{width:(n==null?void 0:n.width)||1280,height:(n==null?void 0:n.height)||720,facingMode:"environment"}};navigator.mediaDevices.getUserMedia(a).then(o=>{console.log("using the webcam successfully..."),r.srcObject=o,r.play()}).catch(o=>{setTimeout(()=>{this.createErrorPopup(`Webcam Error
Name: `+o.name+`
-Message: `+o.message)},1e3)})}else setTimeout(()=>{this.createErrorPopup("sorry - media devices API not supported")},1e3)}update(){this.renderer.clear(),this.renderer.render(this.sceneWebcam,this.cameraWebcam),this.renderer.clearDepth()}dispose(){this.material.dispose(),this.texture.dispose(),this.geom.dispose()}createErrorPopup(e){if(!document.getElementById("error-popup")){var t=document.createElement("div");t.innerHTML=e,t.setAttribute("id","error-popup"),document.body.appendChild(t)}}}const Kf=new N(0,0,1),Va=new Bt,Zf=new hn,$f=new hn(-Math.sqrt(.5),0,0,Math.sqrt(.5)),jf={type:"change"};class tp extends wn{constructor(e){super(),window.isSecureContext===!1&&console.error("THREE.DeviceOrientationControls: DeviceOrientationEvent is only available in secure contexts (https)");const t=this,n=1e-6,r=new hn;this.object=e,this.object.rotation.reorder("YXZ"),this.enabled=!0,this.deviceOrientation={},this.screenOrientation=0,this.alphaOffset=0,this.deviceOrientationEventName="ondeviceorientationabsolute"in window?"deviceorientationabsolute":"deviceorientation";const s=function(l){t.deviceOrientation=l},a=function(){t.screenOrientation=window.orientation||0},o=function(l,c,u,p,f){Va.set(u,c,-p,"YXZ"),l.setFromEuler(Va),l.multiply($f),l.multiply(Zf.setFromAxisAngle(Kf,-f))};this.connect=function(){a(),window.DeviceOrientationEvent!==void 0&&typeof window.DeviceOrientationEvent.requestPermission=="function"?window.DeviceOrientationEvent.requestPermission().then(function(l){l=="granted"&&(window.addEventListener("orientationchange",a),window.addEventListener(t.deviceOrientationEventName,s))}).catch(function(l){console.error("THREE.DeviceOrientationControls: Unable to use DeviceOrientation API:",l)}):(window.addEventListener("orientationchange",a),window.addEventListener(t.deviceOrientationEventName,s)),t.enabled=!0},this.disconnect=function(){window.removeEventListener("orientationchange",a),window.removeEventListener(t.deviceOrientationEventName,s),t.enabled=!1},this.update=function(){if(t.enabled===!1)return;const l=t.deviceOrientation;if(l){const c=l.alpha?sn.degToRad(l.alpha)+t.alphaOffset:0,u=l.beta?sn.degToRad(l.beta):0,p=l.gamma?sn.degToRad(l.gamma):0,f=t.screenOrientation?sn.degToRad(t.screenOrientation):0;o(t.object.quaternion,c,u,p,f),8*(1-r.dot(t.object.quaternion))>n&&(r.copy(t.object.quaternion),t.dispatchEvent(jf))}},this.dispose=function(){t.disconnect()},this.connect()}}class np{constructor(e){this.raycaster=new qf,this.normalisedMousePosition=new Ye(null,null),e.domElement.addEventListener("click",t=>{this.normalisedMousePosition.set(t.clientX/e.domElement.clientWidth*2-1,-(t.clientY/e.domElement.clientHeight*2)+1)})}raycast(e,t){if(this.normalisedMousePosition.x!==null&&this.normalisedMousePosition.y!==null){this.raycaster.setFromCamera(this.normalisedMousePosition,e);const n=this.raycaster.intersectObjects(t.children,!1);return this.normalisedMousePosition.set(null,null),n}return[]}}export{_i as B,ep as E,Ft as M,Lt as P,Wf as S,Jf as W,Qf as _,Ns as a,tp as b,sn as c,np as y};
+Message: `+o.message)},1e3)})}else setTimeout(()=>{this.createErrorPopup("sorry - media devices API not supported")},1e3)}update(){this.renderer.clear(),this.renderer.render(this.sceneWebcam,this.cameraWebcam),this.renderer.clearDepth()}dispose(){this.material.dispose(),this.texture.dispose(),this.geom.dispose()}createErrorPopup(e){if(!document.getElementById("error-popup")){var t=document.createElement("div");t.innerHTML=e,t.setAttribute("id","error-popup"),document.body.appendChild(t)}}}const Kf=new N(0,0,1),Va=new Bt,Zf=new hn,$f=new hn(-Math.sqrt(.5),0,0,Math.sqrt(.5)),jf={type:"change"};class tp extends wn{constructor(e){super(),window.isSecureContext===!1&&console.error("THREE.DeviceOrientationControls: DeviceOrientationEvent is only available in secure contexts (https)");const t=this,n=1e-6,r=new hn;this.object=e,this.object.rotation.reorder("YXZ"),this.enabled=!0,this.deviceOrientation={},this.screenOrientation=0,this.alphaOffset=0,this.deviceOrientationEventName="ondeviceorientationabsolute"in window?"deviceorientationabsolute":"deviceorientation";const s=function(l){t.deviceOrientation=l},a=function(){t.screenOrientation=window.orientation||0},o=function(l,c,u,p,f){Va.set(u,c,-p,"YXZ"),l.setFromEuler(Va),l.multiply($f),l.multiply(Zf.setFromAxisAngle(Kf,-f))};this.connect=function(){a(),window.DeviceOrientationEvent!==void 0&&typeof window.DeviceOrientationEvent.requestPermission=="function"?window.DeviceOrientationEvent.requestPermission().then(function(l){l=="granted"&&(window.addEventListener("orientationchange",a),window.addEventListener(t.deviceOrientationEventName,s))}).catch(function(l){console.error("THREE.DeviceOrientationControls: Unable to use DeviceOrientation API:",l)}):(window.addEventListener("orientationchange",a),window.addEventListener(t.deviceOrientationEventName,s)),t.enabled=!0},this.disconnect=function(){window.removeEventListener("orientationchange",a),window.removeEventListener(t.deviceOrientationEventName,s),t.enabled=!1},this.update=function(){if(t.enabled===!1)return;const l=t.deviceOrientation;if(l){const c=l.alpha?sn.degToRad(l.alpha)+t.alphaOffset:0,u=l.beta?sn.degToRad(l.beta):0,p=l.gamma?sn.degToRad(l.gamma):0,f=t.screenOrientation?sn.degToRad(t.screenOrientation):0;o(t.object.quaternion,c,u,p,f),8*(1-r.dot(t.object.quaternion))>n&&(r.copy(t.object.quaternion),t.dispatchEvent(jf))}},this.dispose=function(){t.disconnect()},this.connect()}}class np{constructor(e){this.raycaster=new qf,this.normalisedMousePosition=new Ye(null,null),e.domElement.addEventListener("click",t=>{this.normalisedMousePosition.set(t.clientX/e.domElement.clientWidth*2-1,-(t.clientY/e.domElement.clientHeight*2)+1)})}raycast(e,t){if(this.normalisedMousePosition.x!==null&&this.normalisedMousePosition.y!==null){this.raycaster.setFromCamera(this.normalisedMousePosition,e);const n=this.raycaster.intersectObjects(t.children,!1);return this.normalisedMousePosition.set(null,null),n}return[]}}export{_i as B,Qf as E,Ft as M,Lt as P,Wf as S,Jf as W,ep as _,Ns as a,tp as b,np as y};
diff --git a/docs/index.html b/docs/index.html
index aca7589..66f0987 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -15,10 +15,10 @@
LocAR.js examples
-
Hello World: Display a red cube just to the north of a fake GPS location. Can be tested on a desktop or laptop.
-
GPS and Sensors: Requires a mobile device wih GPS and sensors. Demonstrates use of the GPS and the device sensors to show real AR. Gets your initial location and displays four coloured boxes to the north (red), south (yellow), west (blue) and east (green) of your initial location. Can be used as a test to check whether your device sensors are accurate; if the red box does not appear to the North, your device sensors may be mis-calibrated.
-
AR Objects: shows how you can add real AR objects with a given latitude and longitude and properties. Also shows how you can detect clicks on objects. Uses a hard-coded "fake" location and hard-coded objects, and you can rotate the three.js camera using the mouse, so will work on a desktop or laptop.
-
API Communication: shows how you can communicate with a live GeoJSON API (OpenStreetMap-based). The GeoJSON is parsed, and AR objects created from each GeoJSON feature in the feed. Uses your current real GPS location and the device sensors, so requires a real mobile device.. It also uses each object's OpenStreetMap ID to cache objects in memory as they are added, preventing the same object being added twice. Note that a new request to the server is performed if you move 100 metres; a better solution to minimise the number of server requests would be to implement a tiling system. This will hopefully appear soon! The live example only works in Europe and Turkey due to the coverage of the underlying API, but can easily be modified to work with any GeoJSON API covering other parts of the world.
+
Hello World: Display a red cube just to the north of a fake GPS location. Can be tested on a desktop or laptop.
+
GPS and Sensors: Requires a mobile device wih GPS and sensors. Demonstrates use of the GPS and the device sensors to show real AR. Gets your initial location and displays four coloured boxes to the north (red), south (yellow), west (blue) and east (green) of your initial location. Can be used as a test to check whether your device sensors are accurate; if the red box does not appear to the North, your device sensors may be mis-calibrated.
+
API Communication: shows how you can communicate with a live GeoJSON API (OpenStreetMap-based). The GeoJSON is parsed, and AR objects created from each GeoJSON feature in the feed. Uses your current real GPS location and the device sensors, so requires a real mobile device.. It also uses each object's OpenStreetMap ID to cache objects in memory as they are added, preventing the same object being added twice. Note that a new request to the server is performed if you move 100 metres; a better solution to minimise the number of server requests would be to implement a tiling system. This will hopefully appear soon! The live example only works in Europe and Turkey due to the coverage of the underlying API, but can easily be modified to work with any GeoJSON API covering other parts of the world.
diff --git a/docs/tutorial/index.md b/docs/tutorial/index.md
new file mode 100644
index 0000000..4a23951
--- /dev/null
+++ b/docs/tutorial/index.md
@@ -0,0 +1,41 @@
+# LocAR.js - Develop a simple Points of Interest app
+
+[LocAR.js](https://github.com/AR-js-org/locar.js) is the new - still in early development - standalone location-based API for AR.js.
+
+Here is a series of tutorials taking you through how to use LocAR.js, from the basics to a more advanced example: a simple but working Points of Interest app using a live web API.
+
+It is expected that you have some understanding of the absolute basics of [three.js](https://threejs.org). You might want to read the introductory "Creating a Scene" section of the [three.js manual](https://three.js.org/docs/index.html#manual/en). (Note that there is currently a formatting issue on Firefox with the source code samples, so you should use another browser such as Chrome. There is a fix imminent on Firefox, however).
+
+You should also have very basic knowledge of [Vite](https://vitejs.dev) and the concept of JavaScript build tools and bundling. Vite is a build and development tool which, as well as bundling your code for production, provides a development server allowing you to develop client-side web apps "live" so that when you make a change to your code or its dependencies, your code is reloaded and changes appear instantly. See the Vite docs for more.
+
+**Do note that it is not recommended to use Firefox on a mobile device due to limitations of the device orientation API. Chrome on Android is recommended.**
+
+## Installing and developing
+
+Here is a sample `package.json` containing three.js and LocAR.js as dependencies, and Vite as a dev dependency.
+
+```
+{
+ "dependencies": {
+ "three": "^0.169.0",
+ "locar": "^0.0.2"
+ },
+ "devDependencies": {
+ "vite": "^5.4.8"
+ },
+ "scripts": {
+ "dev" : "vite dev",
+ "build": "vite build"
+ }
+}
+```
+As is standard in Vite, you should place your `index.html` inside your project's main directory and the JavaScript source, e.g. `main.js`, inside the `src` directory. You can then, as is normal with Vite, run in dev mode with `npm run dev`, which will start up a dev server on port 5173 and allow you to make live changes to your code which will be updated instantly.
+
+You can also build a bundle with `npm run build`, which will create a production app (a JavaScript bundle plus a version of `index.html` linking to the bundle) in the `dist` directory.
+
+### Contents
+
+- [Part 1: Hello World](part1.md)
+- [Part 2: Using the GPS and Device Orientation](part2.md)
+- [Part 3: Connecting to a web API](part3.md)
+
diff --git a/docs/tutorial/part1.md b/docs/tutorial/part1.md
new file mode 100644
index 0000000..4967d64
--- /dev/null
+++ b/docs/tutorial/part1.md
@@ -0,0 +1,143 @@
+# Location-based AR.js with LocAR.js
+
+## Part 1 - Hello World!
+
+
+The first part of this tutorial will show you how to create a "hello world" application using LocAR.js. It is assumed you are aware of basic three.js concepts, such as the scene, renderer and camera as well as geometries, materials and meshes. This example will set your location to a "fake" GPS location and add a box a short distance away.
+
+Let's start with the HTML, which is very simple:
+
+```html
+
+
+
+LocAR.js - Hello World
+
+
+
+
+
+```
+
+This example assumes that you have installed LocAR.js via `npm` and are using Vite in dev mode to run the application, as described on the [index page for the tutorial](index.md). We link in our JavaScript source as an ES6 module from `src/main.js`, so this is where you should save your code, as `main.js` inside the `src` directory. Here is the `main.js` code:
+
+```javascript
+import * as THREE from 'three';
+import * as LocAR from 'locar';
+
+const scene = new THREE.Scene();
+const camera = new THREE.PerspectiveCamera(60, window.innerWidth/window.innerHeight, 0.001, 100);
+const renderer = new THREE.WebGLRenderer();
+renderer.setSize(window.innerWidth, window.innerHeight);
+document.body.appendChild(renderer.domElement);
+
+window.addEventListener("resize", e => {
+ renderer.setSize(window.innerWidth, window.innerHeight);
+ camera.aspect = window.innerWidth / window.innerHeight;
+ camera.updateProjectionMatrix();
+});
+const box = new THREE.BoxGeometry(2,2,2);
+const cube = new THREE.Mesh(box, new THREE.MeshBasicMaterial({ color: 0xff0000 }));
+
+const locar = new LocAR.LocationBased(scene, camera);
+const cam = new LocAR.WebcamRenderer(renderer);
+
+
+locar.fakeGps(-0.72, 51.05);
+locar.add(cube, -0.72, 51.0501);
+
+renderer.setAnimationLoop(animate);
+
+
+function animate() {
+ cam.update();
+ renderer.render(scene, camera);
+}
+
+```
+
+Much of this is using standard three.js setup code as described in the [manual](https://threejs.org/docs/index.html#manual/en/introduction/Creating-a-scene); if you do not understand basic three.js concepts such as the scene, camera, and renderer, as well as geometries and meshes, you should read the three.js manual first.
+
+As normal, we create a `THREE.Scene`, a `THREE.PerspectiveCamera` and a `THREE.WebGLRenderer` using our canvas. We also handle resizing the window and handle window resize. We then create a box geometry and a mesh using that box geometry.
+
+What comes next though is new, and specific to AR.js:
+
+```javascript
+const locar = new LocAR.LocationBased(scene, camera);
+const cam = new LocAR.WebcamRenderer(renderer);
+```
+
+We use two new objects, both part of the LocAR.js API. Firstly `LocAR.LocationBased` is the overall AR.js "manager" object and secondly `LocAR.WebcamRenderer` is responsible for rendering the live camera feed. We need to supply our scene and camera as arguments to `LocAR.LocationBased` and our renderer as an argument to `LocAR.WebcamRenderer`.
+
+The `LocAR.WebcamRenderer` will, internally, create a `video` element to capture the webcam. Alternatively, if you have a `video` element already set up in your HTML, you can pass its CSS selector into the `WebcamRenderer` as an optional argument. For example:
+
+```javascript
+const cam = new LocAR.WebcamRenderer(renderer, '#video1');
+```
+
+Next we add our box mesh to LocAR. This next line is interesting:
+
+```javascript
+locar.add(box, -0.72, 51.051);
+```
+
+Rather than setting the box's `position` as we would normally do in standard three.js, we add it to a specific **real-world location** defined by longitude and latitude. The `add()` method of `LocAR.LocationBased` allows us to do that.
+
+Having positioned our box in a specific real-world location, we now need to place **ourselves** (i.e. the camera) at a given real-world location We can do this with `LocAR.LocationBased`s `fakeGps()` method, which takes longitude and latitude as parameters:
+
+```javascript
+arjs.fakeGps(-0.72, 51.05);
+```
+
+This places us just to the south of the red box. By default, we face north, so the red box will appear in front of us.
+
+The remaining code is the standard three.js code for defining a rendering function and setting it as the animation loop. However note this code within the rendering function:
+
+```javascript
+cam.update();
+```
+
+This API call will render the latest camera frame.
+
+### Try it!
+
+Try it on either a desktop machine or an Android device running Chrome. On a mobile device or desktop you should see the feed from the webcam, and a red box just in front of you. Note that the mobile device will not yet respond to changes in orientation: we will add that next time. For this reason you *must ensure the box is to your north* as the default view is to face north.
+
+### Faking rotation on a desktop machine
+
+If you do not have a suitable mobile device, you can simulate rotation with the mouse. The code below will do this (add to your main block of code, just before the rendering function):
+
+```javascript
+const rotationStep = THREE.Math.degToRad(2);
+
+let mousedown = false, lastX =0;
+
+window.addEventListener("mousedown", e=> {
+ mousedown = true;
+});
+
+window.addEventListener("mouseup", e=> {
+ mousedown = false;
+});
+
+window.addEventListener("mousemove", e=> {
+ if(!mousedown) return;
+ if(e.clientX < lastX) {
+ camera.rotation.y -= rotationStep;
+ if(camera.rotation.y < 0) {
+ camera.rotation.y += 2 * Math.PI;
+ }
+ } else if (e.clientX > lastX) {
+ camera.rotation.y += rotationStep;
+ if(camera.rotation.y > 2 * Math.PI) {
+ camera.rotation.y -= 2 * Math.PI;
+ }
+ }
+ lastX = e.clientX;
+});
+```
+
+What does this do? Using mouse events, it detects the direction of movement of the mouse when it's pressed down, and in doing so, determines whether to rotate the camera clockwise or anticlockwise. It does this using the `clientX` property of the event object, which contains the mouse X position. This is compared to the previous value of `e.clientX` and from this, we can determine whether we moved the mouse to the left or to the right, and rotate accordingly.
+
+We move the camera by the amount specified in `rotationStep` and ensure that the camera rotation is always within the range 0 to 2PI radians (i.e. 360 degrees).
+
diff --git a/docs/tutorial/part2.md b/docs/tutorial/part2.md
new file mode 100644
index 0000000..f387af6
--- /dev/null
+++ b/docs/tutorial/part2.md
@@ -0,0 +1,201 @@
+# Location-based AR.js with LocAR.js
+
+## Part 2 - Using the GPS and Device Orientation
+
+Having looked at the basics of the LocAR.js API in the first tutorial, we will now look at how to use the real GPS location. Last time, if you remember, we used a "fake" location with the `LocAR.LocationBased`'s `fakeGps() `call.
+
+We will also look at how we can use the device's orientation controls, so that the orientation sensors are tracked and objects will appear in their real-world position when the device is rotated. For example, an object directly north of the user will only appear when the device is facing north.
+
+### GPS tracking
+
+Here is a revised version of the previous example which obtains your real GPS location:
+
+```javascript
+import * as THREE from 'three';
+import * as LocAR from 'locar';
+
+const scene = new THREE.Scene();
+const camera = new THREE.PerspectiveCamera(60, window.innerWidth/window.innerHeight, 0.001, 100);
+const renderer = new THREE.WebGLRenderer();
+renderer.setSize(window.innerWidth, window.innerHeight);
+document.body.appendChild(renderer.domElement);
+
+window.addEventListener("resize", e => {
+ renderer.setSize(window.innerWidth, window.innerHeight);
+ camera.aspect = window.innerWidth / window.innerHeight;
+ camera.updateProjectionMatrix();
+});
+const box = new THREE.BoxGeometry(2,2,2);
+const cube = new THREE.Mesh(box, new THREE.MeshBasicMaterial({ color: 0xff0000 }));
+
+const locar = new LocAR.LocationBased(scene, camera);
+const cam = new LocAR.WebcamRenderer(renderer);
+
+
+locar.startGps();
+locar.add(cube, -0.72, 51.0501);
+
+renderer.setAnimationLoop(animate);
+
+
+function animate() {
+ cam.update();
+ renderer.render(scene, camera);
+}
+```
+Note that we only needed to make one change, we replace the `fakeGps()` call with:
+```
+arjs.startGps();
+```
+Using the Geolocation API this will make the application start listening for GPS updates. *The nice thing is we do not need to do anything else. The `LocationBased` object automatically updates the camera x and z coordinates to reflect our current GPS location.* Specifically, the GPS latitude and longitude are converted to Spherical Mercator, the sign of `z` reversed (to match the OpenGL coordinate system), and the resulting coordinates used for the camera coordinates.
+
+### Using the device orientation controls
+
+Having looked at obtaining our real GPS position, we will now look at how we can use the orientation controls to ensure our AR scene matches the real world as we rotate the device around. This is, in principle, quite easy: we just need to create a `LocAR.DeviceOrientationControls` object and update it in our rendering function. This object is based on the original `DeviceOrientationControls` from three.js.
+
+However, there is a slight problem. Unfortunately this will only work in Chrome on Android (it may also work in Chrome on iOS, this needs testing). This is due to the difficulty in obtaining absolute orientation (i.e. our orientation relative to north) using the device orientation API. This can be done on Chrome/Android using the `deviceorientationabsolute` event (and in fact, the `LocAR.DeviceOrientationControls` has been modified from the original to handle this event); it can also be done on Safari with `webkitCompassHeading` (but, due to the lack of an iDevice for testing, has not been implemented yet) but sadly it appears that support on Firefox is completely missing for now. See [this table of compatibility for absolute device orientation](https://developer.mozilla.org/en-US/docs/Web/API/Window/ondeviceorientationabsolute).
+
+So it's recommended you use Chrome on Android for the moment. The example below shows the use of orientation tracking:
+
+```javascript
+import * as THREE from 'three';
+import * as LocAR from 'locar';
+
+const scene = new THREE.Scene();
+const camera = new THREE.PerspectiveCamera(60, window.innerWidth/window.innerHeight, 0.001, 100);
+const renderer = new THREE.WebGLRenderer();
+renderer.setSize(window.innerWidth, window.innerHeight);
+document.body.appendChild(renderer.domElement);
+
+window.addEventListener("resize", e => {
+ renderer.setSize(window.innerWidth, window.innerHeight);
+ camera.aspect = window.innerWidth / window.innerHeight;
+ camera.updateProjectionMatrix();
+});
+const box = new THREE.BoxGeometry(2,2,2);
+
+const cube = new THREE.Mesh(box, new THREE.MeshBasicMaterial({ color: 0xff0000 }));
+
+const locar = new LocAR.LocationBased(scene, camera);
+const cam = new LocAR.WebcamRenderer(renderer);
+
+// Create the device orientation tracker
+const deviceOrientationControls = new LocAR.DeviceOrientationControls(camera);
+
+locar.startGps();
+locar.add(cube, -0.72, 51.0501);
+
+renderer.setAnimationLoop(animate);
+
+
+function animate() {
+ // Update the scene using the latest sensor readings
+ deviceOrientationControls.update();
+
+ cam.update();
+ renderer.render(scene, camera);
+}
+```
+
+Note how we create a device orientation tracker with:
+```javascript
+const deviceOrientationControls = new LocAR.DeviceOrientationControls(camera);
+```
+
+The device orientation tracker updates the camera, so we need to pass it in as an argument.
+
+Also note how we update the device orientation tracker in our rendering function, so that new readings from the sensors are accounted for:
+
+```javascript
+deviceOrientationControls.update();
+```
+
+### Try it!
+
+Try it out. As real GPS location and device orientation is used, you will need a mobile device. You should find that the red box appears in its real world position (ensure it's not too far from you, e.g. 0.001 degrees of longitude to the north) and, due to the use of orientation tracking, only appears in the field of view when you are facing its location.
+
+### Adding four boxes to north, south, east and west
+
+One issue on some devices with web AR is that the device's sensors (accelerometer, magnetometer) may be miscalibrated on a small number of devices. A good way of checking this is to write a simple app which displays four boxes to your north, south, east and west when a GPS location is first obtained. You can then check whether those boxes appear in their correct location. (In future we aim to produce a calibration tool to correct any miscalibration on your device).
+
+Here is an enhanced version of the previous example, which will do this:
+
+```javascript
+import * as THREE from 'three';
+import * as LocAR from 'locar';
+
+const camera = new THREE.PerspectiveCamera(80, window.innerWidth / window.innerHeight, 0.001, 1000);
+
+const renderer = new THREE.WebGLRenderer();
+renderer.setSize(window.innerWidth, window.innerHeight);
+document.body.appendChild(renderer.domElement);
+
+const scene = new THREE.Scene();
+
+const locar = new LocAR.LocationBased(scene, camera);
+
+window.addEventListener("resize", e => {
+ renderer.setSize(window.innerWidth, window.innerHeight);
+ camera.aspect = window.innerWidth / window.innerHeight;
+ camera.updateProjectionMatrix();
+});
+
+const cam = new LocAR.WebcamRenderer(renderer);
+
+let firstLocation = true;
+
+const deviceOrientationControls = new LocAR.DeviceOrientationControls(camera);
+
+locar.on("gpsupdate", (pos, distMoved) => {
+ if(firstLocation) {
+
+ const boxProps = [{
+ latDis: 0.001,
+ lonDis: 0,
+ colour: 0xff0000
+ }, {
+ latDis: -0.001,
+ lonDis: 0,
+ colour: 0xffff00
+ }, {
+ latDis: 0,
+ lonDis: -0.001,
+ colour: 0x00ffff
+ }, {
+ latDis: 0,
+ lonDis: 0.001,
+ colour: 0x00ff00
+ }];
+
+ const geom = new THREE.BoxGeometry(20,20,20);
+
+ for(const boxProp of boxProps) {
+ const mesh = new THREE.Mesh(
+ geom,
+ new THREE.MeshBasicMaterial({color: boxProp.colour})
+ );
+
+ locar.add(
+ mesh,
+ pos.coords.longitude + boxProp.lonDis,
+ pos.coords.latitude + boxProp.latDis
+ );
+ }
+
+ firstLocation = false;
+ }
+});
+
+locar.startGps();
+
+renderer.setAnimationLoop(animate);
+
+function animate() {
+ cam.update();
+ deviceOrientationControls.update();
+ renderer.render(scene, camera);
+}
+```
+Note how it works: when we get a location, we check whether this was the first GPS location obtained (to prevent the same boxes being added each time our GPS location changes). If it was, we add four boxes a short distance to the north (red), south (yellow), west (cyan) and east (green) of us.
+
+Try it out, and if your sensors are calibrated correctly, you will see a red box to your north, a yellow box to your south, a cyan (light blue) box to your west and a green box to your east. These are relative to your *initial* position so as you move, the boxes' positions relative to you will change.
diff --git a/docs/tutorial/part3.md b/docs/tutorial/part3.md
new file mode 100644
index 0000000..25dd246
--- /dev/null
+++ b/docs/tutorial/part3.md
@@ -0,0 +1,120 @@
+# Location-based AR.js with LocAR.js
+
+## Part 3 - Connecting to a web API
+
+Having looked at how to use the LocAR.js API, we will now consider an example which connects to a web API providing points of interest. This example does not actually introduce any new AR.js concepts, but shows you how you can work with a web API.
+
+```javascript
+import * as THREE from 'three';
+import * as LocAR from 'locar';
+
+const camera = new THREE.PerspectiveCamera(80, window.innerWidth/window.innerHeight, 0.001, 1000);
+const renderer = new THREE.WebGLRenderer();
+renderer.setSize(window.innerWidth, window.innerHeight);
+const scene = new THREE.Scene();
+
+
+document.body.appendChild(renderer.domElement);
+
+
+window.addEventListener("resize", e => {
+ renderer.setSize(window.innerWidth, window.innerHeight);
+ camera.aspect = window.innerWidth / window.innerHeight;
+ camera.updateProjectionMatrix();
+});
+
+const locar = new LocAR.LocationBased(scene, camera);
+
+const deviceControls = new LocAR.DeviceOrientationControls(camera);
+
+const cam = new LocAR.WebcamRenderer(renderer);
+
+
+let firstPosition = true;
+
+const indexedObjects = { };
+
+const cube = new THREE.BoxGeometry(20, 20, 20);
+
+const clickHandler = new LocAR.ClickHandler(renderer);
+
+locar.on("gpsupdate", async(pos, distMoved) => {
+
+ if(firstPosition || distMoved > 100) {
+
+ const response = await fetch(`https://hikar.org/webapp/map?bbox=${pos.coords.longitude-0.02},${pos.coords.latitude-0.02},${pos.coords.longitude+0.02},${pos.coords.latitude+0.02}&layers=poi&outProj=4326`);
+ const pois = await response.json();
+
+ pois.features.forEach ( poi => {
+ if(!indexedObjects[poi.properties.osm_id]) {
+ const mesh = new THREE.Mesh(
+ cube,
+ new THREE.MeshBasicMaterial({color: 0xff0000})
+ );
+
+ locar.add(
+ mesh,
+ poi.geometry.coordinates[0],
+ poi.geometry.coordinates[1]
+ );
+ indexedObjects[poi.properties.osm_id] = mesh;
+ }
+ });
+ firstPosition = false;
+ }
+
+});
+locar.startGps();
+
+renderer.setAnimationLoop(animate);
+
+function animate() {
+ cam.update();
+ deviceControls.update();
+ renderer.render(scene, camera);
+}
+
+```
+
+How is this working? The key thing is we **handle the `gpsupdate` event** emitted by the `LocationBased` object when a GPS update occurs. This is specifically emitted when the inbuilt Geolocation API receives a GPS update, and allows us to trigger certain code.
+
+Here, we trigger a download from a web API when we get the update. Note that the `gpsupdate` event handler receives the standard position object of the Geolocation API, so that, for example, its `coords` property contains the longitude and latitude. We then download data in a 0.02 x 0.02 degree box centred on our current location from the API at https://hikar.org. This provides [OpenStreetMap](https://openstreetmap.org) POI data, but only for Europe and Turkey due to server capacity constraints. The data is provided as [GeoJSON](https://geojson.org).
+
+So having received the data, we simply loop through it and create one `THREE.Mesh` for each POI, adding it at the appropriate location (accessible via the `coordinates` of the `geometry` of each GeoJSON object).
+
+Note the boolean variable `firstPosition` which is set to false as soon as we have fetched the data. This prevents data being continuously downloaded from the server every time we get a position update, as it's set to `false` as soon as data has been downloaded. In a real application you could implement code to download data by tile, so that new data is downloaded whenever you move into a new tile.
+
+### Detecting clicks with raycasting
+
+We can add the facility to detect clicks on our AR objects by making use of the three.js *raycaster*. This works by sending a line (ray) from a particular point in a certain direction (here, from the camera into the scene) and detecting intersections with objects. LocAR.js provides the `ClickHandler` class, a wrapper round the inbuilt three.js raycaster, to simplify the code. We create a `ClickHandler` object, passing in the renderer as an argument:
+
+```javascript
+const clickHandler = new LocAR.ClickHandler(renderer);
+```
+
+Before we add our raycasting code, we need to modify our code to add objects to the scene so that the object properties (name, etc) are specified:
+```javascript
+locar.add(
+ mesh,
+ poi.geometry.coordinates[0],
+ poi.geometry.coordinates[1],
+ 0,
+ poi.properties
+);
+```
+Note how we specify two additional arguments when adding the object: the elevation (0; a future tutorial will show you how to add elevation from a Digital Elevation Model) and the POI properties from the GeoJSON. These will be used to display the object name to the user when we do our raycasting below.
+
+We perform the raycasting with the `raycast()` method within our animate function. Here is an example:
+
+```javascript
+function animate() {
+ cam.update();
+ deviceControls.update();
+ const objects = clickHandler.raycast(camera, scene);
+ if(objects.length) {
+ alert(`This is ${objects[0].object.properties.name}`);
+ }
+ renderer.render(scene, camera);
+}
+```
+Note how `raycast()` takes the point to raycast from (the camera) and the object to raycast into (the scene). It returns an array of objects intersected by the ray. We are likely to be interested in only the first (the closest to the camera) so we obtain that using index 0. The object returned has an `object` property representing the AR object we found. The properties we specified when we added the AR object can be obtained via the `properties` property of this object. From these, we obtain the name of the object and display it as an alert box.
diff --git a/examples/03-ar-objects/index.html b/examples/03-api-communication/index.html
similarity index 100%
rename from examples/03-ar-objects/index.html
rename to examples/03-api-communication/index.html
diff --git a/examples/04-api-communication/src/main.js b/examples/03-api-communication/src/main.js
similarity index 100%
rename from examples/04-api-communication/src/main.js
rename to examples/03-api-communication/src/main.js
diff --git a/examples/03-ar-objects/src/main.js b/examples/03-ar-objects/src/main.js
deleted file mode 100644
index a116721..0000000
--- a/examples/03-ar-objects/src/main.js
+++ /dev/null
@@ -1,92 +0,0 @@
-import * as THREE from 'three';
-import * as LocAR from 'locar';
-
-const camera = new THREE.PerspectiveCamera(80, window.innerWidth/window.innerHeight, 0.001, 1000);
-const renderer = new THREE.WebGLRenderer();
-renderer.setSize(window.innerWidth, window.innerHeight);
-const scene = new THREE.Scene();
-
-
-document.body.appendChild(renderer.domElement);
-
-
-window.addEventListener("resize", e => {
- renderer.setSize(window.innerWidth, window.innerHeight);
- camera.aspect = window.innerWidth / window.innerHeight;
- camera.updateProjectionMatrix();
-});
-
-const locar = new LocAR.LocationBased(scene, camera);
-
-const cam = new LocAR.WebcamRenderer(renderer);
-
-
-let firstPosition = true;
-const oneDegAsRad = THREE.MathUtils.degToRad(1.0);
-
-const indexedObjects = { };
-
-const cube = new THREE.BoxGeometry(20, 20, 20);
-
-let mouseDown = false, initX;
-let curRotation = 0.0;
-
-const clickHandler = new LocAR.ClickHandler(renderer);
-
-renderer.domElement.addEventListener("mousedown", e=> {
- mouseDown = true;
- initX = e.clientX;
-});
-
-renderer.domElement.addEventListener("mousemove", e=> {
- if(mouseDown) {
- curRotation += e.clientX > initX ? oneDegAsRad*10: -oneDegAsRad*10;
- if(curRotation > Math.PI) {
- curRotation -= 2*Math.PI;
- } else if(curRotation < -Math.PI) {
- curRotation += 2*Math.PI;
- }
- camera.rotation.set(0, curRotation, 0);
- }
-});
-
-renderer.domElement.addEventListener("mouseup", e=> {
- mouseDown = false;
-
-});
-
-locar.on("gpsupdate", async(pos, distMoved) => {
-
- // Even if you have static AR objects, as opposed to objects from an
- // API, you must wait until the first GPS update before adding the
- // objects. This is because the internal x, y and z coordinates of each
- // object are relative to the initial position, and if we do not yet
- // have a GPS position, the initial position will be unknown.
- if(firstPosition) {
- firstPosition = false;
- const guildhall = new THREE.Mesh(
- cube,
- new THREE.MeshBasicMaterial({color: 0x00ffff})
- );
- const oneills = new THREE.Mesh(
- cube,
- new THREE.MeshBasicMaterial({color: 0xff0000})
- );
-
- locar.add(guildhall, -1.406392, 50.908042, 0, { "name": "Guildhall"} );
- locar.add(oneills, -1.404340, 50.907330, 0, { "name": "O'Neills"} );
- }
-
-});
-locar.fakeGps(-1.404555, 50.908015);
-
-renderer.setAnimationLoop(animate);
-
-function animate() {
- cam.update();
- const objects = clickHandler.raycast(camera, scene);
- if(objects.length) {
- alert(`This is ${objects[0].object.properties.name}`);
- }
- renderer.render(scene, camera);
-}
diff --git a/examples/04-api-communication/index.html b/examples/04-api-communication/index.html
deleted file mode 100644
index 29cbd26..0000000
--- a/examples/04-api-communication/index.html
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-three.js
-
-
-
-
-
diff --git a/examples/index.html b/examples/index.html
index aca7589..3174273 100644
--- a/examples/index.html
+++ b/examples/index.html
@@ -15,10 +15,9 @@
LocAR.js examples
-
Hello World: Display a red cube just to the north of a fake GPS location. Can be tested on a desktop or laptop.
-
GPS and Sensors: Requires a mobile device wih GPS and sensors. Demonstrates use of the GPS and the device sensors to show real AR. Gets your initial location and displays four coloured boxes to the north (red), south (yellow), west (blue) and east (green) of your initial location. Can be used as a test to check whether your device sensors are accurate; if the red box does not appear to the North, your device sensors may be mis-calibrated.
-
AR Objects: shows how you can add real AR objects with a given latitude and longitude and properties. Also shows how you can detect clicks on objects. Uses a hard-coded "fake" location and hard-coded objects, and you can rotate the three.js camera using the mouse, so will work on a desktop or laptop.
-
API Communication: shows how you can communicate with a live GeoJSON API (OpenStreetMap-based). The GeoJSON is parsed, and AR objects created from each GeoJSON feature in the feed. Uses your current real GPS location and the device sensors, so requires a real mobile device.. It also uses each object's OpenStreetMap ID to cache objects in memory as they are added, preventing the same object being added twice. Note that a new request to the server is performed if you move 100 metres; a better solution to minimise the number of server requests would be to implement a tiling system. This will hopefully appear soon! The live example only works in Europe and Turkey due to the coverage of the underlying API, but can easily be modified to work with any GeoJSON API covering other parts of the world.
+
Hello World: Display a red cube just to the north of a fake GPS location. Can be tested on a desktop or laptop.
+
GPS and Sensors: Requires a mobile device wih GPS and sensors. Demonstrates use of the GPS and the device sensors to show real AR. Gets your initial location and displays four coloured boxes to the north (red), south (yellow), west (blue) and east (green) of your initial location. Can be used as a test to check whether your device sensors are accurate; if the red box does not appear to the North, your device sensors may be mis-calibrated.
+
API Communication: shows how you can communicate with a live GeoJSON API (OpenStreetMap-based). The GeoJSON is parsed, and AR objects created from each GeoJSON feature in the feed. Uses your current real GPS location and the device sensors, so requires a real mobile device.. It also uses each object's OpenStreetMap ID to cache objects in memory as they are added, preventing the same object being added twice. Note that a new request to the server is performed if you move 100 metres; a better solution to minimise the number of server requests would be to implement a tiling system. This will hopefully appear soon! The live example only works in Europe and Turkey due to the coverage of the underlying API, but can easily be modified to work with any GeoJSON API covering other parts of the world.