diff --git a/404.html b/404.html index 5ffe9ae..a7f2dd0 100644 --- a/404.html +++ b/404.html @@ -5,13 +5,13 @@ Page Not Found | OpenTwins - +
Skip to main content

Page Not Found

We could not find what you were looking for.

Please contact the owner of the site that linked you to the original URL and let them know their link is broken.

- + \ No newline at end of file diff --git a/assets/js/118e913f.ee963465.js b/assets/js/118e913f.b956ff9a.js similarity index 85% rename from assets/js/118e913f.ee963465.js rename to assets/js/118e913f.b956ff9a.js index 819279d..2c13154 100644 --- a/assets/js/118e913f.ee963465.js +++ b/assets/js/118e913f.b956ff9a.js @@ -1 +1 @@ -"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[7979],{3905:(e,t,i)=>{i.d(t,{Zo:()=>p,kt:()=>f});var n=i(7294);function a(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function o(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),i.push.apply(i,n)}return i}function r(e){for(var t=1;t=0||(a[i]=e[i]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,i)&&(a[i]=e[i])}return a}var c=n.createContext({}),s=function(e){var t=n.useContext(c),i=t;return e&&(i="function"==typeof e?e(t):r(r({},t),e)),i},p=function(e){var t=s(e.components);return n.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var i=e.components,a=e.mdxType,o=e.originalType,c=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),d=s(i),m=a,f=d["".concat(c,".").concat(m)]||d[m]||u[m]||o;return i?n.createElement(f,r(r({ref:t},p),{},{components:i})):n.createElement(f,r({ref:t},p))}));function f(e,t){var i=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=i.length,r=new Array(o);r[0]=m;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[d]="string"==typeof e?e:a,r[1]=l;for(var s=2;s{i.r(t),i.d(t,{assets:()=>c,contentTitle:()=>r,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>s});var n=i(7462),a=(i(7294),i(3905));const o={sidebar_position:2},r="Concepts",l={unversionedId:"overview/concepts",id:"overview/concepts",title:"Concepts",description:'In this section, we will explore in depth the concept of a digital twin as defined by the platform. We will detail the information it can contain, explain the idea of a "digital twin type", and discuss how the composition works.',source:"@site/docs/overview/concepts.md",sourceDirName:"overview",slug:"/overview/concepts",permalink:"/opentwins/docs/overview/concepts",draft:!1,editUrl:"https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/docs/overview/concepts.md",tags:[],version:"current",sidebarPosition:2,frontMatter:{sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"Purpose",permalink:"/opentwins/docs/overview/purpose"},next:{title:"Architecture",permalink:"/opentwins/docs/overview/architecture"}},c={},s=[{value:"Digital twin definition",id:"digital-twin-definition",level:3},{value:"Digital twin content",id:"digital-twin-content",level:3},{value:"Digital twin type",id:"digital-twin-type",level:3},{value:"Digital twins composition",id:"digital-twins-composition",level:3}],p={toc:s},d="wrapper";function u(e){let{components:t,...i}=e;return(0,a.kt)(d,(0,n.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"concepts"},"Concepts"),(0,a.kt)("p",null,'In this section, we will explore in depth the concept of a digital twin as defined by the platform. We will detail the information it can contain, explain the idea of a "digital twin type", and discuss how the composition works.'),(0,a.kt)("h3",{id:"digital-twin-definition"},"Digital twin definition"),(0,a.kt)("p",null,"In the platform, ",(0,a.kt)("strong",{parentName:"p"},"a digital twin is defined as a replica of a real entity"),", whether tangible or not. This replica can be considered as an enhancement to monitoring the entity because, although it is not strictly necessary to be classified as a digital twin, it is beneficial to connect the real data of the entity with those generated by means of mathematical simulations or artificial intelligence. In this way, ",(0,a.kt)("strong",{parentName:"p"},"the digital twin becomes a central point that integrates all available sources of information on the entity"),", facilitating a unified, fast and effective query that promotes decision-making and, therefore, the optimization of the real entity."),(0,a.kt)("h3",{id:"digital-twin-content"},"Digital twin content"),(0,a.kt)("p",null,"A digital twin is composed of static and dynamic data. "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("strong",{parentName:"p"},"Static data.")," Information relevant to the digital twin that is expected to remain constant, such as the model, the date of acquisition or the location of the machine we are replicating. ")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("strong",{parentName:"p"},"Dynamic data.")," Data that changes over time and that we will record in time series, such as the position of a mobile robot or the values measured by a sensor."))),(0,a.kt)("p",null,"TENGO QUE EXPLICAR AQUI QUE ES UN THING, QUE STATIC ES ATTRIBUTE Y TO LO DE DITTO VAYA. ABAJO TAMBIEN TEMA POLITICAS Y DEMAS."),(0,a.kt)("p",null,"For example, consider a DHT22 temperature and humidity sensor. Its digital twin, represented in JSON format following the ",(0,a.kt)("a",{parentName:"p",href:"https://eclipse.dev/ditto/basic-thing.html"},"schema provided by Eclipse Ditto"),", would look like this:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n "policyId": "example:DHT22",\n "attributes": {\n "location": "Spain"\n },\n "features": {\n "temperature": {\n "properties": {\n "value": null\n }\n },\n "humidity": {\n "properties": {\n "value": null\n }\n }\n }\n}\n')),(0,a.kt)("h3",{id:"digital-twin-type"},"Digital twin type"),(0,a.kt)("h3",{id:"digital-twins-composition"},"Digital twins composition"))}u.isMDXComponent=!0}}]); \ No newline at end of file +"use strict";(self.webpackChunkdocs=self.webpackChunkdocs||[]).push([[7979],{3905:(e,t,i)=>{i.d(t,{Zo:()=>p,kt:()=>f});var n=i(7294);function a(e,t,i){return t in e?Object.defineProperty(e,t,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[t]=i,e}function o(e,t){var i=Object.keys(e);if(Object.getOwnPropertySymbols){var n=Object.getOwnPropertySymbols(e);t&&(n=n.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),i.push.apply(i,n)}return i}function r(e){for(var t=1;t=0||(a[i]=e[i]);return a}(e,t);if(Object.getOwnPropertySymbols){var o=Object.getOwnPropertySymbols(e);for(n=0;n=0||Object.prototype.propertyIsEnumerable.call(e,i)&&(a[i]=e[i])}return a}var c=n.createContext({}),s=function(e){var t=n.useContext(c),i=t;return e&&(i="function"==typeof e?e(t):r(r({},t),e)),i},p=function(e){var t=s(e.components);return n.createElement(c.Provider,{value:t},e.children)},d="mdxType",u={inlineCode:"code",wrapper:function(e){var t=e.children;return n.createElement(n.Fragment,{},t)}},m=n.forwardRef((function(e,t){var i=e.components,a=e.mdxType,o=e.originalType,c=e.parentName,p=l(e,["components","mdxType","originalType","parentName"]),d=s(i),m=a,f=d["".concat(c,".").concat(m)]||d[m]||u[m]||o;return i?n.createElement(f,r(r({ref:t},p),{},{components:i})):n.createElement(f,r({ref:t},p))}));function f(e,t){var i=arguments,a=t&&t.mdxType;if("string"==typeof e||a){var o=i.length,r=new Array(o);r[0]=m;var l={};for(var c in t)hasOwnProperty.call(t,c)&&(l[c]=t[c]);l.originalType=e,l[d]="string"==typeof e?e:a,r[1]=l;for(var s=2;s{i.r(t),i.d(t,{assets:()=>c,contentTitle:()=>r,default:()=>u,frontMatter:()=>o,metadata:()=>l,toc:()=>s});var n=i(7462),a=(i(7294),i(3905));const o={sidebar_position:2},r="Concepts",l={unversionedId:"overview/concepts",id:"overview/concepts",title:"Concepts",description:'In this section, we will explore in depth the concept of a digital twin as defined by the platform. We will detail the information it can contain, explain the idea of a "digital twin type", and discuss how the composition works.',source:"@site/docs/overview/concepts.md",sourceDirName:"overview",slug:"/overview/concepts",permalink:"/opentwins/docs/overview/concepts",draft:!1,editUrl:"https://github.com/facebook/docusaurus/tree/main/packages/create-docusaurus/templates/shared/docs/overview/concepts.md",tags:[],version:"current",sidebarPosition:2,frontMatter:{sidebar_position:2},sidebar:"tutorialSidebar",previous:{title:"Purpose",permalink:"/opentwins/docs/overview/purpose"},next:{title:"Architecture",permalink:"/opentwins/docs/overview/architecture"}},c={},s=[{value:"Digital twin definition",id:"digital-twin-definition",level:3},{value:"Digital twin content",id:"digital-twin-content",level:3},{value:"Digital twin type",id:"digital-twin-type",level:3},{value:"Digital twins composition",id:"digital-twins-composition",level:3}],p={toc:s},d="wrapper";function u(e){let{components:t,...i}=e;return(0,a.kt)(d,(0,n.Z)({},p,i,{components:t,mdxType:"MDXLayout"}),(0,a.kt)("h1",{id:"concepts"},"Concepts"),(0,a.kt)("p",null,'In this section, we will explore in depth the concept of a digital twin as defined by the platform. We will detail the information it can contain, explain the idea of a "digital twin type", and discuss how the composition works.'),(0,a.kt)("h3",{id:"digital-twin-definition"},"Digital twin definition"),(0,a.kt)("p",null,"In the platform, ",(0,a.kt)("strong",{parentName:"p"},"a digital twin is defined as a replica of a real entity"),", whether tangible or not. This replica can be considered as an enhancement to monitoring the entity because, although it is not strictly necessary to be classified as a digital twin, it is beneficial to connect the real data of the entity with those generated by means of mathematical simulations or artificial intelligence. In this way, ",(0,a.kt)("strong",{parentName:"p"},"the digital twin becomes a central point that integrates all available sources of information on the entity"),", facilitating a unified, fast and effective query that promotes decision-making and, therefore, the optimization of the real entity."),(0,a.kt)("h3",{id:"digital-twin-content"},"Digital twin content"),(0,a.kt)("p",null,"A digital twin is composed of static and dynamic data. "),(0,a.kt)("ul",null,(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("strong",{parentName:"p"},"Static data.")," Information relevant to the digital twin that is expected to remain constant, such as the model, the date of acquisition or the location of the machine we are replicating. ")),(0,a.kt)("li",{parentName:"ul"},(0,a.kt)("p",{parentName:"li"},(0,a.kt)("strong",{parentName:"p"},"Dynamic data.")," Data that changes over time and that we will record in time series, such as the position of a mobile robot or the values measured by a sensor."))),(0,a.kt)("p",null,"For example, consider a DHT22 temperature and humidity sensor. Its digital twin, represented in JSON format following the ",(0,a.kt)("a",{parentName:"p",href:"https://eclipse.dev/ditto/basic-thing.html"},"schema provided by Eclipse Ditto"),", would look like this:"),(0,a.kt)("pre",null,(0,a.kt)("code",{parentName:"pre",className:"language-json"},'{\n "policyId": "example:DHT22",\n "attributes": {\n "location": "Spain"\n },\n "features": {\n "temperature": {\n "properties": {\n "value": null\n }\n },\n "humidity": {\n "properties": {\n "value": null\n }\n }\n }\n}\n')),(0,a.kt)("h3",{id:"digital-twin-type"},"Digital twin type"),(0,a.kt)("h3",{id:"digital-twins-composition"},"Digital twins composition"))}u.isMDXComponent=!0}}]); \ No newline at end of file diff --git a/assets/js/runtime~main.e48836bb.js b/assets/js/runtime~main.11ad087b.js similarity index 98% rename from assets/js/runtime~main.e48836bb.js rename to assets/js/runtime~main.11ad087b.js index c59b427..24e353a 100644 --- a/assets/js/runtime~main.e48836bb.js +++ b/assets/js/runtime~main.11ad087b.js @@ -1 +1 @@ -(()=>{"use strict";var e,a,c,t,r,f={},d={};function b(e){var a=d[e];if(void 0!==a)return a.exports;var c=d[e]={id:e,loaded:!1,exports:{}};return f[e].call(c.exports,c,c.exports,b),c.loaded=!0,c.exports}b.m=f,b.c=d,e=[],b.O=(a,c,t,r)=>{if(!c){var f=1/0;for(i=0;i=r)&&Object.keys(b.O).every((e=>b.O[e](c[o])))?c.splice(o--,1):(d=!1,r0&&e[i-1][2]>r;i--)e[i]=e[i-1];e[i]=[c,t,r]},b.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return b.d(a,{a:a}),a},c=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,b.t=function(e,t){if(1&t&&(e=this(e)),8&t)return e;if("object"==typeof e&&e){if(4&t&&e.__esModule)return e;if(16&t&&"function"==typeof e.then)return e}var r=Object.create(null);b.r(r);var f={};a=a||[null,c({}),c([]),c(c)];for(var d=2&t&&e;"object"==typeof d&&!~a.indexOf(d);d=c(d))Object.getOwnPropertyNames(d).forEach((a=>f[a]=()=>e[a]));return f.default=()=>e,b.d(r,f),r},b.d=(e,a)=>{for(var c in a)b.o(a,c)&&!b.o(e,c)&&Object.defineProperty(e,c,{enumerable:!0,get:a[c]})},b.f={},b.e=e=>Promise.all(Object.keys(b.f).reduce(((a,c)=>(b.f[c](e,a),a)),[])),b.u=e=>"assets/js/"+({53:"935f2afb",114:"908ba98b",788:"b0bae498",799:"c0eb0ada",948:"8717b14a",1144:"40b0d055",1199:"ac75af2e",1605:"d4f9f5df",1761:"acac1da9",1914:"d9f32620",2267:"59362658",2362:"e273c56f",2535:"814f3328",2614:"32191809",2651:"8070e160",2690:"8dd02d2f",3085:"1f391b9e",3089:"a6aa9e1f",3237:"1df93b7f",3514:"73664a40",3608:"9e4087bc",4013:"01a85c17",4482:"0b39e3dd",4576:"0598cbc5",4699:"85531627",5041:"aa3c268d",5391:"9281dd35",5969:"638abf38",5998:"eb58dad4",6103:"ccc49370",6216:"68c71cca",6513:"d72ac48e",6743:"4e0291d9",6933:"4c455ca7",7063:"ec3c7536",7081:"cbe8eee7",7214:"b9febb2b",7414:"393be207",7431:"1c4bf583",7557:"928e06c2",7632:"0964aedb",7826:"8ee96214",7841:"3c4035d3",7918:"17896441",7949:"6cb63160",7979:"118e913f",8364:"96e1810e",8610:"6875c492",8636:"f4f34a3a",8906:"1ba72f0d",9003:"925b3f96",9514:"1be78505",9521:"3c0fcc1c",9534:"3fb959cb",9607:"dd403c78",9642:"7661071f",9817:"14eb3368"}[e]||e)+"."+{53:"df6e34ad",114:"a7d9cf5e",210:"6e5e9f1a",788:"c03d2d33",799:"b281447b",948:"74619e7a",1144:"60ffe9ac",1199:"61dfe39f",1605:"2b2de3b8",1761:"05f2b0af",1914:"b045544d",2267:"22a5d353",2362:"c362744b",2529:"cec79ce1",2535:"0c1d9999",2614:"461d329a",2651:"40304e4e",2690:"7fc106b4",3085:"8fc7c9b3",3089:"845cad8c",3237:"20e07858",3514:"d45d5659",3608:"472c889f",4013:"75a76f22",4482:"2e144efa",4576:"b9f9eae9",4699:"61a331f6",4972:"b60a5582",5041:"11260222",5391:"056d9a3c",5969:"9e27bd44",5998:"2b30f0e7",6103:"d9c41d1e",6216:"ff06e3c7",6513:"0f042fb3",6743:"d888eb5c",6933:"17a20621",7063:"adf7bfc2",7081:"852eb739",7214:"6ab01aa0",7414:"6ad33c11",7431:"58dbf46d",7557:"fee88172",7632:"f08e1154",7826:"b91be920",7841:"ce4d5187",7918:"4f945c03",7949:"31ab2fd6",7979:"ee963465",8364:"a859fe5f",8610:"f37b7b5c",8636:"77d55ebe",8906:"aa79666e",9003:"a62a82fe",9514:"685933da",9521:"e76176e0",9534:"7f1cc7ba",9607:"df81f125",9642:"77dfe874",9817:"716e9ec1"}[e]+".js",b.miniCssF=e=>{},b.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),b.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),t={},r="docs:",b.l=(e,a,c,f)=>{if(t[e])t[e].push(a);else{var d,o;if(void 0!==c)for(var n=document.getElementsByTagName("script"),i=0;i{d.onerror=d.onload=null,clearTimeout(s);var r=t[e];if(delete t[e],d.parentNode&&d.parentNode.removeChild(d),r&&r.forEach((e=>e(c))),a)return a(c)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:d}),12e4);d.onerror=l.bind(null,d.onerror),d.onload=l.bind(null,d.onload),o&&document.head.appendChild(d)}},b.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},b.p="/opentwins/",b.gca=function(e){return e={17896441:"7918",32191809:"2614",59362658:"2267",85531627:"4699","935f2afb":"53","908ba98b":"114",b0bae498:"788",c0eb0ada:"799","8717b14a":"948","40b0d055":"1144",ac75af2e:"1199",d4f9f5df:"1605",acac1da9:"1761",d9f32620:"1914",e273c56f:"2362","814f3328":"2535","8070e160":"2651","8dd02d2f":"2690","1f391b9e":"3085",a6aa9e1f:"3089","1df93b7f":"3237","73664a40":"3514","9e4087bc":"3608","01a85c17":"4013","0b39e3dd":"4482","0598cbc5":"4576",aa3c268d:"5041","9281dd35":"5391","638abf38":"5969",eb58dad4:"5998",ccc49370:"6103","68c71cca":"6216",d72ac48e:"6513","4e0291d9":"6743","4c455ca7":"6933",ec3c7536:"7063",cbe8eee7:"7081",b9febb2b:"7214","393be207":"7414","1c4bf583":"7431","928e06c2":"7557","0964aedb":"7632","8ee96214":"7826","3c4035d3":"7841","6cb63160":"7949","118e913f":"7979","96e1810e":"8364","6875c492":"8610",f4f34a3a:"8636","1ba72f0d":"8906","925b3f96":"9003","1be78505":"9514","3c0fcc1c":"9521","3fb959cb":"9534",dd403c78:"9607","7661071f":"9642","14eb3368":"9817"}[e]||e,b.p+b.u(e)},(()=>{var e={1303:0,532:0};b.f.j=(a,c)=>{var t=b.o(e,a)?e[a]:void 0;if(0!==t)if(t)c.push(t[2]);else if(/^(1303|532)$/.test(a))e[a]=0;else{var r=new Promise(((c,r)=>t=e[a]=[c,r]));c.push(t[2]=r);var f=b.p+b.u(a),d=new Error;b.l(f,(c=>{if(b.o(e,a)&&(0!==(t=e[a])&&(e[a]=void 0),t)){var r=c&&("load"===c.type?"missing":c.type),f=c&&c.target&&c.target.src;d.message="Loading chunk "+a+" failed.\n("+r+": "+f+")",d.name="ChunkLoadError",d.type=r,d.request=f,t[1](d)}}),"chunk-"+a,a)}},b.O.j=a=>0===e[a];var a=(a,c)=>{var t,r,f=c[0],d=c[1],o=c[2],n=0;if(f.some((a=>0!==e[a]))){for(t in d)b.o(d,t)&&(b.m[t]=d[t]);if(o)var i=o(b)}for(a&&a(c);n{"use strict";var e,a,c,t,r,f={},d={};function b(e){var a=d[e];if(void 0!==a)return a.exports;var c=d[e]={id:e,loaded:!1,exports:{}};return f[e].call(c.exports,c,c.exports,b),c.loaded=!0,c.exports}b.m=f,b.c=d,e=[],b.O=(a,c,t,r)=>{if(!c){var f=1/0;for(i=0;i=r)&&Object.keys(b.O).every((e=>b.O[e](c[o])))?c.splice(o--,1):(d=!1,r0&&e[i-1][2]>r;i--)e[i]=e[i-1];e[i]=[c,t,r]},b.n=e=>{var a=e&&e.__esModule?()=>e.default:()=>e;return b.d(a,{a:a}),a},c=Object.getPrototypeOf?e=>Object.getPrototypeOf(e):e=>e.__proto__,b.t=function(e,t){if(1&t&&(e=this(e)),8&t)return e;if("object"==typeof e&&e){if(4&t&&e.__esModule)return e;if(16&t&&"function"==typeof e.then)return e}var r=Object.create(null);b.r(r);var f={};a=a||[null,c({}),c([]),c(c)];for(var d=2&t&&e;"object"==typeof d&&!~a.indexOf(d);d=c(d))Object.getOwnPropertyNames(d).forEach((a=>f[a]=()=>e[a]));return f.default=()=>e,b.d(r,f),r},b.d=(e,a)=>{for(var c in a)b.o(a,c)&&!b.o(e,c)&&Object.defineProperty(e,c,{enumerable:!0,get:a[c]})},b.f={},b.e=e=>Promise.all(Object.keys(b.f).reduce(((a,c)=>(b.f[c](e,a),a)),[])),b.u=e=>"assets/js/"+({53:"935f2afb",114:"908ba98b",788:"b0bae498",799:"c0eb0ada",948:"8717b14a",1144:"40b0d055",1199:"ac75af2e",1605:"d4f9f5df",1761:"acac1da9",1914:"d9f32620",2267:"59362658",2362:"e273c56f",2535:"814f3328",2614:"32191809",2651:"8070e160",2690:"8dd02d2f",3085:"1f391b9e",3089:"a6aa9e1f",3237:"1df93b7f",3514:"73664a40",3608:"9e4087bc",4013:"01a85c17",4482:"0b39e3dd",4576:"0598cbc5",4699:"85531627",5041:"aa3c268d",5391:"9281dd35",5969:"638abf38",5998:"eb58dad4",6103:"ccc49370",6216:"68c71cca",6513:"d72ac48e",6743:"4e0291d9",6933:"4c455ca7",7063:"ec3c7536",7081:"cbe8eee7",7214:"b9febb2b",7414:"393be207",7431:"1c4bf583",7557:"928e06c2",7632:"0964aedb",7826:"8ee96214",7841:"3c4035d3",7918:"17896441",7949:"6cb63160",7979:"118e913f",8364:"96e1810e",8610:"6875c492",8636:"f4f34a3a",8906:"1ba72f0d",9003:"925b3f96",9514:"1be78505",9521:"3c0fcc1c",9534:"3fb959cb",9607:"dd403c78",9642:"7661071f",9817:"14eb3368"}[e]||e)+"."+{53:"df6e34ad",114:"a7d9cf5e",210:"6e5e9f1a",788:"c03d2d33",799:"b281447b",948:"74619e7a",1144:"60ffe9ac",1199:"61dfe39f",1605:"2b2de3b8",1761:"05f2b0af",1914:"b045544d",2267:"22a5d353",2362:"c362744b",2529:"cec79ce1",2535:"0c1d9999",2614:"461d329a",2651:"40304e4e",2690:"7fc106b4",3085:"8fc7c9b3",3089:"845cad8c",3237:"20e07858",3514:"d45d5659",3608:"472c889f",4013:"75a76f22",4482:"2e144efa",4576:"b9f9eae9",4699:"61a331f6",4972:"b60a5582",5041:"11260222",5391:"056d9a3c",5969:"9e27bd44",5998:"2b30f0e7",6103:"d9c41d1e",6216:"ff06e3c7",6513:"0f042fb3",6743:"d888eb5c",6933:"17a20621",7063:"adf7bfc2",7081:"852eb739",7214:"6ab01aa0",7414:"6ad33c11",7431:"58dbf46d",7557:"fee88172",7632:"f08e1154",7826:"b91be920",7841:"ce4d5187",7918:"4f945c03",7949:"31ab2fd6",7979:"b956ff9a",8364:"a859fe5f",8610:"f37b7b5c",8636:"77d55ebe",8906:"aa79666e",9003:"a62a82fe",9514:"685933da",9521:"e76176e0",9534:"7f1cc7ba",9607:"df81f125",9642:"77dfe874",9817:"716e9ec1"}[e]+".js",b.miniCssF=e=>{},b.g=function(){if("object"==typeof globalThis)return globalThis;try{return this||new Function("return this")()}catch(e){if("object"==typeof window)return window}}(),b.o=(e,a)=>Object.prototype.hasOwnProperty.call(e,a),t={},r="docs:",b.l=(e,a,c,f)=>{if(t[e])t[e].push(a);else{var d,o;if(void 0!==c)for(var n=document.getElementsByTagName("script"),i=0;i{d.onerror=d.onload=null,clearTimeout(s);var r=t[e];if(delete t[e],d.parentNode&&d.parentNode.removeChild(d),r&&r.forEach((e=>e(c))),a)return a(c)},s=setTimeout(l.bind(null,void 0,{type:"timeout",target:d}),12e4);d.onerror=l.bind(null,d.onerror),d.onload=l.bind(null,d.onload),o&&document.head.appendChild(d)}},b.r=e=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},b.p="/opentwins/",b.gca=function(e){return e={17896441:"7918",32191809:"2614",59362658:"2267",85531627:"4699","935f2afb":"53","908ba98b":"114",b0bae498:"788",c0eb0ada:"799","8717b14a":"948","40b0d055":"1144",ac75af2e:"1199",d4f9f5df:"1605",acac1da9:"1761",d9f32620:"1914",e273c56f:"2362","814f3328":"2535","8070e160":"2651","8dd02d2f":"2690","1f391b9e":"3085",a6aa9e1f:"3089","1df93b7f":"3237","73664a40":"3514","9e4087bc":"3608","01a85c17":"4013","0b39e3dd":"4482","0598cbc5":"4576",aa3c268d:"5041","9281dd35":"5391","638abf38":"5969",eb58dad4:"5998",ccc49370:"6103","68c71cca":"6216",d72ac48e:"6513","4e0291d9":"6743","4c455ca7":"6933",ec3c7536:"7063",cbe8eee7:"7081",b9febb2b:"7214","393be207":"7414","1c4bf583":"7431","928e06c2":"7557","0964aedb":"7632","8ee96214":"7826","3c4035d3":"7841","6cb63160":"7949","118e913f":"7979","96e1810e":"8364","6875c492":"8610",f4f34a3a:"8636","1ba72f0d":"8906","925b3f96":"9003","1be78505":"9514","3c0fcc1c":"9521","3fb959cb":"9534",dd403c78:"9607","7661071f":"9642","14eb3368":"9817"}[e]||e,b.p+b.u(e)},(()=>{var e={1303:0,532:0};b.f.j=(a,c)=>{var t=b.o(e,a)?e[a]:void 0;if(0!==t)if(t)c.push(t[2]);else if(/^(1303|532)$/.test(a))e[a]=0;else{var r=new Promise(((c,r)=>t=e[a]=[c,r]));c.push(t[2]=r);var f=b.p+b.u(a),d=new Error;b.l(f,(c=>{if(b.o(e,a)&&(0!==(t=e[a])&&(e[a]=void 0),t)){var r=c&&("load"===c.type?"missing":c.type),f=c&&c.target&&c.target.src;d.message="Loading chunk "+a+" failed.\n("+r+": "+f+")",d.name="ChunkLoadError",d.type=r,d.request=f,t[1](d)}}),"chunk-"+a,a)}},b.O.j=a=>0===e[a];var a=(a,c)=>{var t,r,f=c[0],d=c[1],o=c[2],n=0;if(f.some((a=>0!==e[a]))){for(t in d)b.o(d,t)&&(b.m[t]=d[t]);if(o)var i=o(b)}for(a&&a(c);n Blog | OpenTwins - +

· One min read
Sébastien Lorber
Yangshun Tay

Docusaurus blogging features are powered by the blog plugin.

Simply add Markdown files (or folders) to the blog directory.

Regular blog authors can be added to authors.yml.

The blog post date can be extracted from filenames, such as:

  • 2019-05-30-welcome.md
  • 2019-05-30-welcome/index.md

A blog post folder can be convenient to co-locate blog post images:

Docusaurus Plushie

The blog supports tags as well!

And if you don't want a blog: just delete this directory, and use blog: false in your Docusaurus config.

· One min read
Gao Wei

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

- + \ No newline at end of file diff --git a/blog/archive.html b/blog/archive.html index 9e230ff..301ed24 100644 --- a/blog/archive.html +++ b/blog/archive.html @@ -5,13 +5,13 @@ Archive | OpenTwins - + - + \ No newline at end of file diff --git a/blog/first-blog-post.html b/blog/first-blog-post.html index e117e17..928a1fe 100644 --- a/blog/first-blog-post.html +++ b/blog/first-blog-post.html @@ -5,13 +5,13 @@ First Blog Post | OpenTwins - +

First Blog Post

· One min read
Gao Wei

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

- + \ No newline at end of file diff --git a/blog/long-blog-post.html b/blog/long-blog-post.html index c4506f6..6d1dce7 100644 --- a/blog/long-blog-post.html +++ b/blog/long-blog-post.html @@ -5,13 +5,13 @@ Long Blog Post | OpenTwins - +

Long Blog Post

· 3 min read
Endilie Yacop Sucipto

This is the summary of a very long blog post,

Use a <!-- truncate --> comment to limit blog post size in the list view.

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

- + \ No newline at end of file diff --git a/blog/mdx-blog-post.html b/blog/mdx-blog-post.html index b513e42..7084f23 100644 --- a/blog/mdx-blog-post.html +++ b/blog/mdx-blog-post.html @@ -5,13 +5,13 @@ MDX Blog Post | OpenTwins - +
- + \ No newline at end of file diff --git a/blog/tags.html b/blog/tags.html index a625d06..e1395f3 100644 --- a/blog/tags.html +++ b/blog/tags.html @@ -5,13 +5,13 @@ Tags | OpenTwins - + - + \ No newline at end of file diff --git a/blog/tags/docusaurus.html b/blog/tags/docusaurus.html index a5f9292..87058bd 100644 --- a/blog/tags/docusaurus.html +++ b/blog/tags/docusaurus.html @@ -5,13 +5,13 @@ 4 posts tagged with "docusaurus" | OpenTwins - +

4 posts tagged with "docusaurus"

View All Tags

· One min read
Sébastien Lorber
Yangshun Tay

Docusaurus blogging features are powered by the blog plugin.

Simply add Markdown files (or folders) to the blog directory.

Regular blog authors can be added to authors.yml.

The blog post date can be extracted from filenames, such as:

  • 2019-05-30-welcome.md
  • 2019-05-30-welcome/index.md

A blog post folder can be convenient to co-locate blog post images:

Docusaurus Plushie

The blog supports tags as well!

And if you don't want a blog: just delete this directory, and use blog: false in your Docusaurus config.

· One min read
Gao Wei

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

- + \ No newline at end of file diff --git a/blog/tags/facebook.html b/blog/tags/facebook.html index c7b9ecf..4e002bc 100644 --- a/blog/tags/facebook.html +++ b/blog/tags/facebook.html @@ -5,13 +5,13 @@ One post tagged with "facebook" | OpenTwins - +

One post tagged with "facebook"

View All Tags

· One min read
Sébastien Lorber
Yangshun Tay

Docusaurus blogging features are powered by the blog plugin.

Simply add Markdown files (or folders) to the blog directory.

Regular blog authors can be added to authors.yml.

The blog post date can be extracted from filenames, such as:

  • 2019-05-30-welcome.md
  • 2019-05-30-welcome/index.md

A blog post folder can be convenient to co-locate blog post images:

Docusaurus Plushie

The blog supports tags as well!

And if you don't want a blog: just delete this directory, and use blog: false in your Docusaurus config.

- + \ No newline at end of file diff --git a/blog/tags/hello.html b/blog/tags/hello.html index 6eec11d..13ff83f 100644 --- a/blog/tags/hello.html +++ b/blog/tags/hello.html @@ -5,13 +5,13 @@ 2 posts tagged with "hello" | OpenTwins - +

2 posts tagged with "hello"

View All Tags

· One min read
Sébastien Lorber
Yangshun Tay

Docusaurus blogging features are powered by the blog plugin.

Simply add Markdown files (or folders) to the blog directory.

Regular blog authors can be added to authors.yml.

The blog post date can be extracted from filenames, such as:

  • 2019-05-30-welcome.md
  • 2019-05-30-welcome/index.md

A blog post folder can be convenient to co-locate blog post images:

Docusaurus Plushie

The blog supports tags as well!

And if you don't want a blog: just delete this directory, and use blog: false in your Docusaurus config.

- + \ No newline at end of file diff --git a/blog/tags/hola.html b/blog/tags/hola.html index 6280917..138d8d4 100644 --- a/blog/tags/hola.html +++ b/blog/tags/hola.html @@ -5,13 +5,13 @@ One post tagged with "hola" | OpenTwins - +

One post tagged with "hola"

View All Tags

· One min read
Gao Wei

Lorem ipsum dolor sit amet, consectetur adipiscing elit. Pellentesque elementum dignissim ultricies. Fusce rhoncus ipsum tempor eros aliquam consequat. Lorem ipsum dolor sit amet

- + \ No newline at end of file diff --git a/blog/welcome.html b/blog/welcome.html index 1baa392..d94a8d4 100644 --- a/blog/welcome.html +++ b/blog/welcome.html @@ -5,13 +5,13 @@ Welcome | OpenTwins - +

Welcome

· One min read
Sébastien Lorber
Yangshun Tay

Docusaurus blogging features are powered by the blog plugin.

Simply add Markdown files (or folders) to the blog directory.

Regular blog authors can be added to authors.yml.

The blog post date can be extracted from filenames, such as:

  • 2019-05-30-welcome.md
  • 2019-05-30-welcome/index.md

A blog post folder can be convenient to co-locate blog post images:

Docusaurus Plushie

The blog supports tags as well!

And if you don't want a blog: just delete this directory, and use blog: false in your Docusaurus config.

- + \ No newline at end of file diff --git a/docs/category/examples.html b/docs/category/examples.html index 6e68036..b0bd060 100644 --- a/docs/category/examples.html +++ b/docs/category/examples.html @@ -7,14 +7,14 @@ It is recommended using Postman to make all requests but youy can use your own method."> - +

Examples

There are two ways of creating digital twins, the first is using the Eclipse Ditto API and the second is using our Grafana plugin(WIP). It is recommended using Postman to make all requests but youy can use your own method.

- + \ No newline at end of file diff --git a/docs/category/guides.html b/docs/category/guides.html index 15e39cd..bc5a1fa 100644 --- a/docs/category/guides.html +++ b/docs/category/guides.html @@ -5,13 +5,13 @@ Guides | OpenTwins - + - + \ No newline at end of file diff --git a/docs/category/installation.html b/docs/category/installation.html index 04600bd..10d2894 100644 --- a/docs/category/installation.html +++ b/docs/category/installation.html @@ -5,13 +5,13 @@ Installation | OpenTwins - +
- + \ No newline at end of file diff --git a/docs/category/manual.html b/docs/category/manual.html index 7bf8ea1..a28cd22 100644 --- a/docs/category/manual.html +++ b/docs/category/manual.html @@ -5,13 +5,13 @@ Manual | OpenTwins - + - + \ No newline at end of file diff --git a/docs/category/overview.html b/docs/category/overview.html index 873d728..7aed438 100644 --- a/docs/category/overview.html +++ b/docs/category/overview.html @@ -5,13 +5,13 @@ Overview | OpenTwins - + - + \ No newline at end of file diff --git a/docs/examples/ball-example.html b/docs/examples/ball-example.html index 844c67a..f6d90c0 100644 --- a/docs/examples/ball-example.html +++ b/docs/examples/ball-example.html @@ -5,13 +5,13 @@ Bouncing ball example | OpenTwins - + - + \ No newline at end of file diff --git a/docs/examples/raspberry-example.html b/docs/examples/raspberry-example.html index 3fb0510..6055ed0 100644 --- a/docs/examples/raspberry-example.html +++ b/docs/examples/raspberry-example.html @@ -5,7 +5,7 @@ Raspberry example | OpenTwins - + @@ -14,7 +14,7 @@ A twin has two main components:

  • attributes. It contains the basic information of the twin, such as the name, location, etc.
  • features. It contains the variables of the twin. Imagine a twin of a sensor that measures humidity and temperature. You will have two features: humidity and temperature. Each feature must contain a field called properties that contains, as its name says, every property of the feature, for example, the value of the temperature and the time the value has been measured.

Once we know wich data will store our twin, it is time to create it. To create a twin, we need to make HTTP requests, we recommend you to use Postman. We need to create a PUT request to the Ditto url with the next pattern and a specific payload.

PUT http://{DITTO_IP}:{PORT}/api/2/things/{nameOfThing}

The payload has the attributes and features of the twin mentioned above. As attributes we have the location, in this case "Spain".

As features we have temperature and humidity. In this case both features has the same properties, value and timestamp, but they dont have to fit.

{
"attributes": {
"location": "Spain"
},
"features": {
"temperature": {
"properties": {
"value": null,
"timestamp": null
}
},
"humidity": {
"properties": {
"value": null,
"timestamp": null
}
}
}
}

Once we have checked that all the data is correct, just click send. You should recieve a 200 code of a correct execution.

To check if the twin has been created properly, just send a GET request to the same url.

GET http://{DITTO_IP}:{PORT}/api/2/things/{nameOfThing}

You should be granted with the schema of the new twin.

Second step. Recieving the data

A digital twin is a copy of a real object or process, but we just have a schema, so we need to feed it with data. To achieve this we can use both the Kafka or MQTT broker that are installed with the platform.

Ditto needs to recieve the data in a specific format called Ditto Protocol, so we need the data to be sent in that format. But don't worry if you recieve the data on other format, Ditto gives us the chance to create a mapping with Javascript to change the format when the data arrives to Ditto(We will always recommend you to send the data on Ditto protocol).

Asuming that we recieve that data in Ditto protocol we can configure the connection with one of the two brokers, Kafka or MQTT. To create a connection you can proceed with the same steps as creating the twins, make a POST request to the url and a payload that contains the connection information.

POST http://{DITTO_IP}:{PORT}/api/2/connections
  {
"name": "{NAME OF THE CONNECTION}",
"connectionType": "kafka",
"connectionStatus": "open",
"uri": "tcp://KAFKA_BROKER_IP",
"sources": [
{
"addresses": [
{"list Of topics to read"}
],
"consumerCount": 1,
"qos": 1,
"authorizationContext": [
"nginx:ditto"
],
"headerMapping": {
"correlation-id": "{{header:correlation-id}}",
"namespace": "{{ entity:namespace }}",
"content-type": "{{header:content-type}}",
"connection": "{{ connection:id }}",
"id": "{{ entity:id }}",
"reply-to": "{{header:reply-to}}"
},
"replyTarget": {
"address": "{{header:reply-to}}",
"headerMapping": {
"content-type": "{{header:content-type}}",
"correlation-id": "{{header:correlation-id}}"
},
"expectedResponseTypes": [
"response",
"error"
],
"enabled": true
}
}
],
"targets": [],
"clientCount": 5,
"failoverEnabled": true,
"validateCertificates": true,
"processorPoolSize": 1,
"specificConfig": {
"saslMechanism": "plain",
"bootstrapServers": "KAFKA_BROKER_IP"
},
"tags": []
}

Once we have checked that all the data is correct, just click send. You should recieve a 200 code of a correct execution.

To check if the twin has been created properly, just send a GET request to the same url adding the if of the new connection

GET http://{DITTO_IP}:{PORT}/api/2/connections/{connectionID}

You should be granted with the information of the connection.

With all this setup, the configuration should be already done, and Ditto should be recieving the data from the broker. If you want to create an example script to send the data, just click on the next link.

- + \ No newline at end of file diff --git a/docs/examples/raspberry-example/sending-data.html b/docs/examples/raspberry-example/sending-data.html index c665e71..ac3e349 100644 --- a/docs/examples/raspberry-example/sending-data.html +++ b/docs/examples/raspberry-example/sending-data.html @@ -5,13 +5,13 @@ Sending data to Ditto | OpenTwins - +

Sending data to Ditto

In this case we will use a Raspberry Pi 3B with Raspbian buster OS connected to a DHT22 temperature and humidity sensor.

Setting up the Raspberry Pi

In the following image the pins of the Raspberry used are shown.

We will use pins 2, 6, 23 and 24.

Obtaining sensor data

To get the data from the sensor it is necessary to install its library.

sudo pip3 install Adafruit_DHT

We can test the operation of the sensor by creating a .py file with the following code (in our case it is called dht_code.py and I have placed it on the desktop).

import Adafruit_DHT
import time

SENSOR_DHT = Adafruit_DHT.DHT22
PIN_DHT = 24

while True:
humedad, temperatura = Adafruit_DHT.read(SENSOR_DHT, PIN_DHT)
if humedad is not None and temperatura is not None:
print("Temp={0:0.1f}C Hum={1:0.1f}%".format(temperatura, humedad))
else:
print("Lecture fails, chech connection");
time.sleep(3);

And we run it as follows:

cd Desktop/
python3 dht_code.py

Installing Mosquitto on Raspberry

To send the data to DITTO we will use MQTT with the Mosquitto broker.

sudo wget http://repo.mosquitto.org/debian/mosquitto-repo.gpg.key
sudo apt-key add mosquitto-repo.gpg.key
cd /etc/apt/sources.list.d/
sudo wget http://repo.mosquitto.org/debian/mosquitto-buster.list
sudo -i
apt-get update
apt-get install mosquitto
apt-get install mosquitto-clients

With this we would already have Mosquitto installed on our Raspberry. To test it we can open two terminals, subscribe to a topic with one and publish to that topic with another.

mosquitto_sub -h localhost -t casa/comedor/temperatura
mosquitto_pub -h localhost -t casa/comedor/temperatura -m "Temperatura: 25ºC"

Configuring Mosquitto on raspberry

If we wanted to try to send and receive messages by MQTT between the raspberry and another device, we would have to configure the following.

  1. From the main route of the Raspberry edit the Mosquitto configuration file.
sudo nano /etc/mosquitto/mosquitto.conf
  1. Write these three lines at the end of the file to enable connections with any IP through port 1883 and configure authentication.
listener 1883 0.0.0.0

password_file /etc/mosquitto/passwd
allow_anonymous true

So that mosquito.conf would look like this:

# Place your local configuration in /etc/mosquitto/conf.d/
#
# A full description of the configuration file is at
# /usr/share/doc/mosquitto/examples/mosquitto.conf.gz

pid_file /run/mosquitto/mosquitto.pid

persistence true
persistence_location /var/lib/mosquitto/

log_dest file /var/log/mosquitto/mosquitto.log
log_type all
log_timestamp true

include_dir /etc/mosquitto/conf.d

listener 1883 0.0.0.0

password_file /etc/mosquitto/passwd
allow_anonymous true
  1. Save the file with Ctrl-O, Enter and Ctrl-X.
  2. Create a user with password using the following command. Replace USERNAME with the username you want. When you run it, it will ask you to enter a password, which will not be visible while you type it.
sudo mosquitto_passwd -c /etc/mosquitto/passwd USERNAME
  1. Restart Mosquitto with the following command:
sudo systemctl restart mosquitto

Finally, we would have Mosquitto configured to receive and send from other IPs. To do this you have to add -u "USERNAME" and -P "PASSWORD" (including quotes) to the respective command.

For example (in this case being user both the user and the password):

mosquitto_sub -h 192.168.0.27 -u "usuario" -P "usuario" -t "/Raspberry/Sensores/DHT22"

Sending data to MQTT from Raspberry

To work with MQTT in python we will need to make use of Eclipse Paho.

sudo pip3 install paho-mqtt

Now, we will create a .py file that publishes the sensor data in the corresponding topic of MQTT. For this we have adapted the code example exposed in the following link to the DHT22 sensor with the Adafruit_DHT library and the requirements of MQTT.

How to use MQTT in Python (Paho)

In addition, the message sent by MQTT regarding the Ditto Protocol has been made following both the documentation and an example of use.

Things - Create-Or-Modify protocol specification

  • Code to send sensor data to MQTT and Eclipse Ditto
    from paho.mqtt import client as mqtt_client
    import time
    import random
    import Adafruit_DHT
    import json

    #Constants to connect to MQTT
    broker = "IP OF MQTT"
    port = POR OF MQTT
    topic = "telemetry"
    client_id = f'python-mqtt-{random.randint(0, 1000)}'
    username = "raspberry_DHT22_1@ditto"
    password = "password"

    #Constantes para obtener información del sensor
    SENSOR_DHT = Adafruit_DHT.DHT22
    PIN_DHT = 24

    #Constantes para crear el mensaje de Eclipse Ditto
    DITTO_NAMESPACE = "raspberry";
    DITTO_THING_ID = "DHT22_1";

    def connect_mqtt():
    def on_connect(client, userdata, flags, rc):
    if rc == 0:
    print("Connected to MQTT Broker!")
    else:
    print("Failed to connect, return code %d\n", rc)
    # Set Connecting Client ID
    client = mqtt_client.Client(client_id)
    client.username_pw_set(username, password)
    client.on_connect = on_connect
    client.connect(broker, port)
    return client

    def publish(client):
    while True:
    time.sleep(1)
    msg = getValues();
    if msg is not None:
    result = client.publish(topic, msg)
    status = result[0]
    if status == 0:
    print(f"Send '{msg}' to topic '{topic}'")
    else:
    print(f"Failed to send message to topic {topic}")

    def getValues():
    humedad, temperatura = Adafruit_DHT.read(SENSOR_DHT, PIN_DHT)
    if humedad is not None and temperatura is not None:
    temp = "{0:0.1f}".format(temperatura)
    hum = "{0:0.1f}".format(humedad)
    output = "{\"topic\": \""
    output += DITTO_NAMESPACE
    output += "/"
    output += DITTO_THING_ID
    output += "/things/twin/commands/modify\",\"headers\":{\"response-required\":false, \"content-type\":\"application/vnd.eclipse.ditto+json\"},"
    output += "\"path\": \"/features\", \"value\":{"
    output += sensorString("temperature", temp)
    output += ","
    output += sensorString("humidity", hum)
    output += "}}"
    return output
    else:
    print("Failed on lecture, check circuit")
    return None

    def sensorString(name, value):
    return "\"" + name + "\": { \"properties\": { \"value\": " + value + "}}";

    def run():
    client = connect_mqtt()
    client.loop_start()
    publish(client)

    if __name__ == '__main__':
    run()

This code has been saved in a .py file with the name of dht22publisher.py and have saved it on the desktop. To execute it we use:

cd Desktop/
python3 dht22publisher.py
- + \ No newline at end of file diff --git a/docs/examples/string-example.html b/docs/examples/string-example.html index 5071d5e..68461c7 100644 --- a/docs/examples/string-example.html +++ b/docs/examples/string-example.html @@ -5,13 +5,13 @@ String and number example | OpenTwins - + - + \ No newline at end of file diff --git a/docs/guides/add-3d-visualization.html b/docs/guides/add-3d-visualization.html index 082f10c..bd9bbef 100644 --- a/docs/guides/add-3d-visualization.html +++ b/docs/guides/add-3d-visualization.html @@ -5,13 +5,13 @@ add-3d-visualization | OpenTwins - + - + \ No newline at end of file diff --git a/docs/guides/connect-dt-with-real-devices.html b/docs/guides/connect-dt-with-real-devices.html index 1cdaaf6..d922d0e 100644 --- a/docs/guides/connect-dt-with-real-devices.html +++ b/docs/guides/connect-dt-with-real-devices.html @@ -5,13 +5,13 @@ connect-dt-with-real-devices | OpenTwins - + - + \ No newline at end of file diff --git a/docs/guides/create-dt-scheme.html b/docs/guides/create-dt-scheme.html index 4d1a3b0..23abe80 100644 --- a/docs/guides/create-dt-scheme.html +++ b/docs/guides/create-dt-scheme.html @@ -5,13 +5,13 @@ Create Digital Twin scheme | OpenTwins - + - + \ No newline at end of file diff --git a/docs/installation/manual-deploy/3d-part.html b/docs/installation/manual-deploy/3d-part.html index 96e4ccc..f0a9c29 100644 --- a/docs/installation/manual-deploy/3d-part.html +++ b/docs/installation/manual-deploy/3d-part.html @@ -5,13 +5,13 @@ 3D visualization using Unity | OpenTwins - + - + \ No newline at end of file diff --git a/docs/installation/manual-deploy/core.html b/docs/installation/manual-deploy/core.html index 9d5d044..7ea2b6a 100644 --- a/docs/installation/manual-deploy/core.html +++ b/docs/installation/manual-deploy/core.html @@ -5,7 +5,7 @@ Core functionality | OpenTwins - + @@ -13,7 +13,7 @@

Core functionality

This section will explain how to deploy the platform manually. Basically, you will have to deploy or install the different components and then connect them. The procedure explained below is the one followed to deploy them in Kubernetes using in most cases the Helm option, but any other installation in which all the components are correctly installed and there is some kind of network between them to be able to communicate can be used.

It is not necessary to deploy all components if not all functionalities are to be used. Check the architecture section to find out which ones are essential and what functionality is covered by each of them.

Steps to deploy

We recommend installing all components in the same Kubernetes namespace to make it easier to identify and control them all. In our case the namespace name will be stored in a bash variable called NS.

IMPORTANT

Also note that the values files have the variables that we recommend for the installation of each Helm Chart, but they can be extended or modified according to your needs (to do so, please consult the Helm Chart documentation).

Eclipse Ditto and Eclipse Hono

To deploy both Eclipse Ditto and Eclipse Hono we will directly install the cloud2edge package, which is specially created to allow these two tools to connect correctly. Before executing the commands we will need to have the files pv-hono.yaml, pv-mongodb.yaml, pvc-mongodb.yaml and values-cloud2edge.yaml in the folder where we are in the terminal. Once ready, and complying with all the prerequisites of the package, we execute the following commands.

helm repo add eclipse-iot https://eclipse.org/packages/charts
helm repo update

kubectl create namespace $NS
kubectl apply -f pv-hono.yaml -n $NS
kubectl apply -f pv-mongodb.yaml -n $NS
kubectl apply -f pvc-mongodb.yaml -n $NS

helm install -n $NS --wait --timeout 15m dt eclipse-iot/cloud2edge --version=0.2.3 -f values-cloud2edge.yaml --dependency-update --debug

If all pods are running and ready we already have the first two components installed.

Apache Kafka

To deploy Kafka, the yaml files from another project have been reused, but it could also be installed using Helm if you prefer.

For Kafka to work, it is necessary to install ZooKeeper beforehand. In addition, CMAK, a tool to manage Apache Kafka, will be used to make it easier to use. Then, for the deployment, the pod-zookeeper.yaml, svc-zookeeper.yaml, pod-kafka.yaml, svc-kafka.yaml, deploy-kafka-manager.yaml and svc-kafka-manager.yaml files will be needed. Once you have them, you only need to apply them to the chosen namespace.

kubectl apply -f pod-zookeeper.yaml -n $NS
kubectl apply -f svc-zookeeper.yaml -n $NS

kubectl apply -f pod-kafka.yaml -n $NS
kubectl apply -f svc-kafka.yaml -n $NS

kubectl apply -f deploy-kafka-manager.yaml -n $NS
kubectl apply -f svc-kafka-manager.yaml -n $NS

InfluxDB

For InfluxDB, Helm will again be used for deployment. The following sc-influxdb2.yaml and pv-influxdb2.yaml files will be required to be applied before installation. In addition, the recommended values are in the values-influxdb2.yaml file (it is recommended that you check it before installing and change the password variable to your preference).

helm repo add influxdata https://helm.influxdata.com/

kubectl apply -f sc-influxdb2.yaml -n $NS
kubectl apply -f pv-influxdb2.yaml -n $NS

helm install -n $NS influxdb influxdata/influxdb2 -f values-influxdb2.yaml --version=2.0.10

Grafana

Deploying Grafana is very similar to InfluxDB. We will have to apply the file pv-grafana.yaml and install the Helm Chart with the values of the values-grafana.yaml file (it is also recommended to modify the password variable).

helm repo add grafana https://grafana.github.io/helm-charts

kubectl apply -f pv-grafana.yaml -n $NS

helm install -n $NS grafana grafana/grafana -f values-grafana.yaml --version=6.26.3

Steps to connect

Eclipse Hono and Eclipse Ditto

In the following diagram you can see how Eclipse Hono and Eclipse Ditto are related in our platform.

Eclipse Ditto and Eclipse Hono relationship

Basically, you will need to create a connection between both for each Eclipse Hono tenant you want to use. Tenants basically act as device containers, so you could simply create a single tenant connected to Eclipse Ditto and store all the devices you need there. In this case we will do it this way, but you could create as many tenants and connections as your needs require.

The first thing to do is to check the IPs and ports to use with kubectl get services -n $NS. At this point we are interested in the dt-service-device-registry-ext and dt-ditto-nginx services, which correspond to Eclipse Hono and Eclipse Ditto respectively (if you have followed these instructions and services are NodePort, you will have to use port 3XXXX).

We will then create a Hono tenant called, for example, ditto (you must override the variable HONO_TENANT if you have chosen another name).

HONO_TENANT=ditto
curl -i -X POST http://$HONO_IP:$HONO_PORT/v1/tenants/$HONO_TENANT

Now we will create the connection from Eclipse Ditto, which will act as a consumer of the AMQP endpoint of that tenant. To do this you will need to know the Eclipse Ditto devops password with the following command (the variable RELEASE is the name we gave to the Helm release when installing cloud2edge, if you have followed these instructions it should be dt).

RELEASE=dt
DITTO_DEVOPS_PWD=$(kubectl --namespace ${NS} get secret ${RELEASE}-ditto-gateway-secret -o jsonpath="{.data.devops-password}" | base64 --decode)

Now we create the connection from Eclipse Ditto with the following command.

curl -i -X POST -u devops:${DITTO_DEVOPS_PWD} -H 'Content-Type: application/json' --data '{
"targetActorSelection": "/system/sharding/connection",
"headers": {
"aggregate": false
},
"piggybackCommand": {
"type": "connectivity.commands:createConnection",
"connection": {
"id": "hono-connection-for-'"${HONO_TENANT}"'",
"connectionType": "amqp-10",
"connectionStatus": "open",
"uri": "amqp://consumer%40HONO:verysecret@'"${RELEASE}"'-dispatch-router-ext:15672",
"failoverEnabled": true,
"sources": [
{
"addresses": [
"telemetry/'"${HONO_TENANT}"'",
"event/'"${HONO_TENANT}"'"
],
"authorizationContext": [
"pre-authenticated:hono-connection"
],
"enforcement": {
"input": "{{ header:device_id }}",
"filters": [
"{{ entity:id }}"
]
},
"headerMapping": {
"hono-device-id": "{{ header:device_id }}",
"content-type": "{{ header:content-type }}"
},
"replyTarget": {
"enabled": true,
"address": "{{ header:reply-to }}",
"headerMapping": {
"to": "command/'"${HONO_TENANT}"'/{{ header:hono-device-id }}",
"subject": "{{ header:subject | fn:default(topic:action-subject) | fn:default(topic:criterion) }}-response",
"correlation-id": "{{ header:correlation-id }}",
"content-type": "{{ header:content-type | fn:default('"'"'application/vnd.eclipse.ditto+json'"'"') }}"
},
"expectedResponseTypes": [
"response",
"error"
]
},
"acknowledgementRequests": {
"includes": [],
"filter": "fn:filter(header:qos,'"'"'ne'"'"','"'"'0'"'"')"
}
},
{
"addresses": [
"command_response/'"${HONO_TENANT}"'/replies"
],
"authorizationContext": [
"pre-authenticated:hono-connection"
],
"headerMapping": {
"content-type": "{{ header:content-type }}",
"correlation-id": "{{ header:correlation-id }}",
"status": "{{ header:status }}"
},
"replyTarget": {
"enabled": false,
"expectedResponseTypes": [
"response",
"error"
]
}
}
],
"targets": [
{
"address": "command/'"${HONO_TENANT}"'",
"authorizationContext": [
"pre-authenticated:hono-connection"
],
"topics": [
"_/_/things/live/commands",
"_/_/things/live/messages"
],
"headerMapping": {
"to": "command/'"${HONO_TENANT}"'/{{ thing:id }}",
"subject": "{{ header:subject | fn:default(topic:action-subject) }}",
"content-type": "{{ header:content-type | fn:default('"'"'application/vnd.eclipse.ditto+json'"'"') }}",
"correlation-id": "{{ header:correlation-id }}",
"reply-to": "{{ fn:default('"'"'command_response/'"${HONO_TENANT}"'/replies'"'"') | fn:filter(header:response-required,'"'"'ne'"'"','"'"'false'"'"') }}"
}
},
{
"address": "command/'"${HONO_TENANT}"'",
"authorizationContext": [
"pre-authenticated:hono-connection"
],
"topics": [
"_/_/things/twin/events",
"_/_/things/live/events"
],
"headerMapping": {
"to": "command/'"${HONO_TENANT}"'/{{ thing:id }}",
"subject": "{{ header:subject | fn:default(topic:action-subject) }}",
"content-type": "{{ header:content-type | fn:default('"'"'application/vnd.eclipse.ditto+json'"'"') }}",
"correlation-id": "{{ header:correlation-id }}"
}
}
]
}
}
}' http://$DITTO_IP:$DITTO_PORT/devops/piggyback/connectivity

This connection is configured so that if an Eclipse Hono device has the ThingId of an Eclipse Ditto twin as its identifier, its messages will be redirected to that twin directly (explained in more detail in the usage section).

Eclipse Ditto and Apache Kafka

To connect Eclipse Ditto to Kafka we will need to create a topic in Kafka and a Ditto connection to it. All events that occur in any of the Eclipse Ditto twins will be sent to this topic. You could also filter these events by twin or namespace and create several connections to multiple topics, but this is not really necessary and adds some complexity.

To create the topic in Kafka the above deployed manager will be used. Check the IP and port of Kafka's manager with kubectl get services -n $NS and access it in a browser. Already on the page, if you don't have a cluster created, create a new one and create a topic inside it. In our case this topic will be called digitaltwins.

To create a cluster go to Cluster > Add Cluster and fill in at least the name and host of Zookeeper (if our files are used you will have to put zookeeper-1:2181). The other settings can be left as default. It should look like the image.

Create cluster in Apache Kafka

After creating it, access it and go to Topic > Create to create a new topic. Here it is only necessary to assign a name.

Create topic in Apache Kafka

Once the topic is created we have to create a Kafka target connection in Eclipse Ditto, where it will be indicated that we want it to publish all events. You will need the Ditto devops password obtained in the previous section.

curl -i -X POST -u devops:${DITTO_DEVOPS_PWD} -H 'Content-Type: application/json' --data '{
"targetActorSelection": "/system/sharding/connection",
"headers": {
"aggregate": false
},
"piggybackCommand": {
"type": "connectivity.commands:createConnection",
"connection": {
"id": "kafka-connection",
"connectionType": "kafka",
"connectionStatus": "open",
"failoverEnabled": true,
"uri": "tcp://kafka-cluster:9094",
"specificConfig": {
"bootstrapServers": "kafka-cluster:9094",
"saslMechanism": "plain"
},
"sources": [],
"targets": [
{
"address": "digitaltwins",
"topics": [
"_/_/things/twin/events",
"_/_/things/live/messages"
],
"authorizationContext": [
"nginx:ditto"
]
}
]
}
}
}' http:///$DITTO_IP:$DITTO_PORT/devops/piggyback/connectivity

If the connection is successfully established, Eclipse Ditto and Kafka are already connected.

Apache Kafka and InfluxDB: Deploying Telegraf

Telegraf will be in charge of collecting the messages posted in the created Kafka topic and write the relevant information in InfluxDB, i.e. it will connect both tools. Telegraf consists of input and output plugins. In our case we will use the kafka consumer input plugin and the InfluxDB v2.x output plugin. The Telegraf configuration will be defined in its telegraf-values.yaml file, before deployment. This will be written in YAML as another installation variable and will be automatically transformed to TOML during deployment.

First of all we need to get a token from InfluxDB that gives Telegraf at least write permissions. To do this we access the InfluxDB interface (ip and port of its service) and go to Data > API Tokens. Click on Generate API Token and select All Access API Token. Assign any name, save and select the token we have just created to copy it to the clipboard. This is stored in a variable called INFLUX_TOKEN.

export INFLUX_TOKEN=<INFLUX_TOKEN>

Create token in InfluxDB

You also need to store in variables the IPs and ports of both Kafka and InfluxDB, as well as the name of the Kafka topic. These variables will be INFLUX_IP, INFLUX_PORT, KAFKA_IP, KAFKA_PORT and KAFKA_TOPIC. Once all variables are ready, Telegraf can be displayed with the values defined in the values-telegraf.yaml file.

helm install -n $NS telegraf influxdata/telegraf -f values-telegraf.yaml --version=1.8.18

If the pod is ready and running it should be working, but it is advisable to check its logs to make sure.

kubectl logs -f --namespace $NS $(kubectl get pods --namespace $NS -l app.kubernetes.io/name=telegraf -o jsonpath='{ .items[0].metadata.name }')

InfluxDB and Grafana

Connecting these two tools is very simple. The first thing to do is to get a full access token for Grafana in InfluxDB, as explained in the previous section. Then, access Configuration > Data sources on the Grafana interface and click on Add data source. Select InfluxDB from the list. In the settings it is very important to select Flux as query language. It will be necessary to fill in the URL section with the one that corresponds to InfluxDB. You will also have to activate Auth Basic and fill in the fields (in our case we have set the default admin of InfluxDB, but you can create a new user and fill in these fields). In the InfluxDB details you should indicate the organisation (default is influxdata), the bucket (default is default) and the token you have generated. When saving and testing, it should come out that at least one bucket has been found, indicating that they are already connected.

Create datasource in Grafana

- + \ No newline at end of file diff --git a/docs/installation/manual-deploy/ml-part.html b/docs/installation/manual-deploy/ml-part.html index 1f2351f..b1b7208 100644 --- a/docs/installation/manual-deploy/ml-part.html +++ b/docs/installation/manual-deploy/ml-part.html @@ -5,13 +5,13 @@ Kafka-ML conection | OpenTwins - + - + \ No newline at end of file diff --git a/docs/installation/manual-deploy/simulations-part.html b/docs/installation/manual-deploy/simulations-part.html index 1fda8ef..d2f484a 100644 --- a/docs/installation/manual-deploy/simulations-part.html +++ b/docs/installation/manual-deploy/simulations-part.html @@ -5,13 +5,13 @@ Simulations | OpenTwins - + - + \ No newline at end of file diff --git a/docs/installation/requirements.html b/docs/installation/requirements.html index 256f22a..b064501 100644 --- a/docs/installation/requirements.html +++ b/docs/installation/requirements.html @@ -5,13 +5,13 @@ Requirements | OpenTwins - +
- + \ No newline at end of file diff --git a/docs/installation/using-helm.html b/docs/installation/using-helm.html index ec55d2f..da7136a 100644 --- a/docs/installation/using-helm.html +++ b/docs/installation/using-helm.html @@ -5,14 +5,14 @@ Helm | OpenTwins - +

Helm

Installation

First of all, you have to add ERTIS Research group helm repository to your helm repository list:

helm repo add ertis https://ertis-research.github.io/Helm-charts/

Once done, the next step is installing the chart by executing this line on your terminal (in our case, we will use opentwins as release name and opentwins as namespace, but you can choose the one that you prefeer). To customize the installation, please refer to Helm's values file.

helm upgrade --install opentwins ertis/OpenTwins -n opentwins --wait --dependency-update

After waiting some time, the installation will be ready for use.

Lightweight installation

As described in the main page, OpenTwins has it's own lightweight version that aims to run on IoT devices such as Raspberry Pi devices. To install this versión, you have to follow the first step in order to add ERTIS repository to your repository list and then install the platform using the command bellow:

helm install ot ertis/OpenTwins-Lightweight -n opentwins

In this case connections still need to be made for the platform to work properly.

- + \ No newline at end of file diff --git a/docs/overview/architecture.html b/docs/overview/architecture.html index 8bd8d06..559b59a 100644 --- a/docs/overview/architecture.html +++ b/docs/overview/architecture.html @@ -5,13 +5,13 @@ Architecture | OpenTwins - +

Architecture

OpenTwins is built on a open source microservices architecture, designed to enhance scalability, flexibility and efficiency in the development, extension, deployment and maintenance of the platform. All the components that make up this architecture are encapsulated in Docker containers, ideally managed through Kubernetes, which ensures efficient portability and management.

note

Although it is possible to deploy and connect the different components without containerization, this approach is not recommended due to the difficulties involved in terms of installation and management. However, it is important to note that OpenTwins could be manually connected to non-containerized components, such as a local instance of Grafana.

The following image illustrates the current architecture of OpenTwins, in which each color of the boxes represents the functionality covered by each component. Most of these components are external projects to our organization, however, we also include certain services specifically designed to enrich the functionality of the platform. Both the code and documentation of the components are available in their respective repositories.

Architecture

Essential functionality

The elements highlighted in blue form the heart of OpenTwins, as they provide the essential functionalities of a digital twin development platform: the definition of digital twins, the connection to IoT devices, the storage of information and the user-friendly visualisation of data. The tools used in this case include:

  • Eclipse Ditto. This is the core component of OpenTwins, an open-source framework for digital twins developed by the Eclipse Foundation. Eclipse Ditto provides an asbstract entity "Thing", which allows describing digital twins through JSON schemas that include both static and dynamic data of the entity. The framework stores the current state of the "Thing" entity and facilitates its connection to input and output data sources through various IoT protocols. In a typical scenario, the Thing entity will update its information via a source connection, generating events that are sent to the indicated target connections. In addition, the tool provides an API that allows querying the current state of the entity and managing its schema and connections.

  • Eclipse Hono. This component facilitates the reception of data through various IoT protocols and centralizes it into a single endpoint, either AMQP 1.0 or Kafka. This output connects directly to Eclipse Ditto, eliminating the need for users to manually connect to an external broker to extract data. This allows the platform to receive data through the most common IoT protocols, giving devices the flexibility to connect to the most appropriate protocol for their particular case.

    danger

    Despite its advantages, we have observed that Eclipse Hono does not scale correctly when the message frequency is high, so we do not recommend its use in these cases. For this reason, or if it is not necessary to offer different input protocols, you can choose to connect Eclipse Ditto to one or more specific messaging brokers, such as Mosquitto or RabbitMQ.

  • MongoDB. This tool is the internal database used by Eclipse Hono and Eclipse Ditto. Eclipse Ditto stores data about the current state of digital twins ("things"), policies, connections and recent events, while Eclipse Hono stores information about defined devices and groups.

  • InfluxDB v2. This database provides an optimized architecture for time series, which guarantees superior performance in storing and querying digital twin data. Its high scalability and simplicity of use allow it to efficiently handle large volumes of data, facilitating the integration and analysis of information in real time. In addition, it is one of the most popular options in the field of the Internet of Things (IoT), generating an active community that consolidates its position as a robust solution.

  • Telegraf. This server-based agent for collecting and sending metrics offers easy configuration and a wide range of plugins to integrate various data sources and destinations. It is the recommended choice for data ingestion into InfluxDB. Its role in the platform is to capture digital twin updates, presented as Eclipse Ditto events, processing the data as time series for storage in the database.

  • Apache Kafka or Eclipse Mosquitto. An intermediary messaging broker is required for Telegraf to collect event data from Eclipse Ditto, since none of these technologies provide this role and do not have a direct connection. For this purpose, any messaging broker where Eclipse Ditto can publish and read Telegraf is valid. The options available on the platform include Apache Kafka, known for its scalability and error tolerance in processing large volumes of data, and Mosquitto, recognized for its efficiency in messaging and its flexibility in IoT environments.

  • Grafana. This solution acts as the platform's main front-end, providing a highly adaptable data visualization that allows users to create intuitive and easily understandable dashboards. Its ability to integrate with a wide variety of data sources and its active community of users and developers make it a powerful tool for monitoring and analyzing complex systems, such as digital twins. In addition, it allows users to expand its functionality by creating custom plugins, giving them the ability to integrate new visualizations, use-case specific panels and connectors to additional data sources.

Compositional support

The composition of digital twins represents one of the main contributions of this platform, distinguishing it from other similar solutions. In addition, OpenTwins provides the ability to define and compose "types" of digital twins, making development simpler. The services marked in green in the architecture are responsible for integrating these functionalities.

  • Extended API for Eclipse Ditto. The Thing entity provided by Eclipse Ditto must follow a specific JSON schema, although it offers great flexibility within it. Our goal is to simplify type definition and entity composition by taking advantage of the flexibility of this schema. This "extended API" acts as a layer on top of the Eclipse Ditto API, distinguishing between the management of twins and types, and applying all the necessary constraints and checks to ensure the composition of these entities according to the constraints imposed by each (types form graphs, while twins form trees).

  • OpenTwins app plugin for Grafana. To have a pleasant and usable platform for as many users as possible, it is important to have a simple interface capable of performing the functionalities available. Therefore, an app plugin is included for Grafana that uses the extended API to query and manage twins, types and their composition in a user-friendly way. Moreover, this approach keeps the entire platform front-end within a single tool, making it easy to use and accessible.

Data prediction with machine learning

The architecture highlights in yellow the components that facilitate the integration of digital twins with Machine Learning models. Providing this support represents a crucial aspect in the development of a digital twin, since it provides a complementary or comparative perspective with real data, enriching the understanding of the replicated object. To achieve this goal, the following tools are used:

  • Kafka-ML. This open source framework manages the lifecycle of ML/AI applications in production environments through continuous data streams. Unlike traditional frameworks that work with static data sets, Kafka-ML enables both training and inference with continuous data streams, allowing users to have fine-grained control over ingestion data. Currently, Kafka-ML is compatible with the most popular ML frameworks, TensorFlow and PyTorch, and enables the management and deployment of ML models, from definition to final deployment for inference. This component operates as a black box in OpenTwins, receiving input data for a deployed model through a Kafka topic and sending the predicted result to another topic, which is connected to Eclipse Ditto in a way that updates to the corresponding digital twin.

  • Eclipse Hono to Kafka-ML. Kafka-ML can receive input data from any source that is able to publish to a Kafka topic. However, at OpenTwins we have decided to simplify this process when the data comes from Eclipse Hono. Therefore, we have developed an optional service that automates the data feed of ML models deployed in Kafka-ML. This service automatically sends the data needed to make a prediction every time a new data is received from any of the devices required by the model. To use this tool, we provide an API that allows you to specify which devices should be taken into account, what data is required from these devices and how they should be formatted to work correctly as input for Kafka-ML.

  • Error detection for Eclipse Hono with Kafka-ML. An ML model useful in the construction of a digital twin is one capable of generating data that a sensor should produce in case it loses connection or experiences a failure. To automate this, we have developed an optional service with similar functionalities to the one mentioned above, but with an important particularity: it will only invoke the model when an interruption in data reception by the device is detected. This service takes into account the frequency with which the data is emitted by the device. As soon as an anomaly is identified, the service will format and send the last data received following the expected frequency until the connection is restored and real data is received again. In this way, the normal operation of the device is simulated, ensuring continuity of information for the digital twin.

3D representation

The red part...

- + \ No newline at end of file diff --git a/docs/overview/concepts.html b/docs/overview/concepts.html index e168460..7d877b1 100644 --- a/docs/overview/concepts.html +++ b/docs/overview/concepts.html @@ -5,13 +5,13 @@ Concepts | OpenTwins - +
-

Concepts

In this section, we will explore in depth the concept of a digital twin as defined by the platform. We will detail the information it can contain, explain the idea of a "digital twin type", and discuss how the composition works.

Digital twin definition

In the platform, a digital twin is defined as a replica of a real entity, whether tangible or not. This replica can be considered as an enhancement to monitoring the entity because, although it is not strictly necessary to be classified as a digital twin, it is beneficial to connect the real data of the entity with those generated by means of mathematical simulations or artificial intelligence. In this way, the digital twin becomes a central point that integrates all available sources of information on the entity, facilitating a unified, fast and effective query that promotes decision-making and, therefore, the optimization of the real entity.

Digital twin content

A digital twin is composed of static and dynamic data.

  • Static data. Information relevant to the digital twin that is expected to remain constant, such as the model, the date of acquisition or the location of the machine we are replicating.

  • Dynamic data. Data that changes over time and that we will record in time series, such as the position of a mobile robot or the values measured by a sensor.

TENGO QUE EXPLICAR AQUI QUE ES UN THING, QUE STATIC ES ATTRIBUTE Y TO LO DE DITTO VAYA. ABAJO TAMBIEN TEMA POLITICAS Y DEMAS.

For example, consider a DHT22 temperature and humidity sensor. Its digital twin, represented in JSON format following the schema provided by Eclipse Ditto, would look like this:

{
"policyId": "example:DHT22",
"attributes": {
"location": "Spain"
},
"features": {
"temperature": {
"properties": {
"value": null
}
},
"humidity": {
"properties": {
"value": null
}
}
}
}

Digital twin type

Digital twins composition

- +

Concepts

In this section, we will explore in depth the concept of a digital twin as defined by the platform. We will detail the information it can contain, explain the idea of a "digital twin type", and discuss how the composition works.

Digital twin definition

In the platform, a digital twin is defined as a replica of a real entity, whether tangible or not. This replica can be considered as an enhancement to monitoring the entity because, although it is not strictly necessary to be classified as a digital twin, it is beneficial to connect the real data of the entity with those generated by means of mathematical simulations or artificial intelligence. In this way, the digital twin becomes a central point that integrates all available sources of information on the entity, facilitating a unified, fast and effective query that promotes decision-making and, therefore, the optimization of the real entity.

Digital twin content

A digital twin is composed of static and dynamic data.

  • Static data. Information relevant to the digital twin that is expected to remain constant, such as the model, the date of acquisition or the location of the machine we are replicating.

  • Dynamic data. Data that changes over time and that we will record in time series, such as the position of a mobile robot or the values measured by a sensor.

For example, consider a DHT22 temperature and humidity sensor. Its digital twin, represented in JSON format following the schema provided by Eclipse Ditto, would look like this:

{
"policyId": "example:DHT22",
"attributes": {
"location": "Spain"
},
"features": {
"temperature": {
"properties": {
"value": null
}
},
"humidity": {
"properties": {
"value": null
}
}
}
}

Digital twin type

Digital twins composition

+ \ No newline at end of file diff --git a/docs/overview/purpose.html b/docs/overview/purpose.html index 83014a7..fc735d4 100644 --- a/docs/overview/purpose.html +++ b/docs/overview/purpose.html @@ -5,13 +5,13 @@ Purpose | OpenTwins - +

Purpose

This platform has been designed to facilitate the development of digital twins and is characterised by the exclusive use of open source components. The aim is to achieve a platform that covers all the functionalities that a digital twin may require, from the most basic ones, such as simply checking its real-time state, to more advanced ones, such as the inclusion of predicted or simulated data or visualisation of 3D models of the twins.

Take care

This platform is currently under development, so its use in production environments is not recommended at this stage.

- + \ No newline at end of file diff --git a/docs/quickstart.html b/docs/quickstart.html index 6c8fbd4..41e167b 100644 --- a/docs/quickstart.html +++ b/docs/quickstart.html @@ -5,13 +5,13 @@ Quickstart | OpenTwins - +

Quickstart

Welcome to OpenTwins, a flexible platform adapted to your needs! Although OpenTwins offers extensive customization options, we understand the importance of simplicity for beginners. Therefore, let's embark on a short journey together, showing you the quickest route to deploy the platform and develop a simple, functional digital twin.

Prerequisites

Please be sure you have the following utilities installed on your host machine:

If you don't have a Kubernetes cluster, you can set one up on local using minikube. For a smooth deployment experience, we suggest you use the following minimum configuration values.

minikube start --cpus 4 --disk-size 40gb --memory 8192
kubectl config use-context minikube

Installation

The quickest way to deploy OpenTwins is using Helm.

The following command adds the ERTIS repository where the OpenTwins helm chart is located.

helm repo add ertis https://ertis-research.github.io/Helm-charts/

To deploy the platform with recommended functionality, use the command below:

helm upgrade --install opentwins ertis/OpenTwins -n opentwins --wait --dependency-update

To modify the components to be deployed and connected during the installation, you can check the installation via Helm.

Define your first digital twin

A digital twin is composed of static and dynamic data.

Static data. Information relevant to the digital twin that is expected to remain constant, such as the model, the date of acquisition or the location of the machine we are replicating.

Dynamic data. Data that changes over time and that we will record in time series, such as the position of a mobile robot or the values measured by a sensor.

For example, consider a DHT22 temperature and humidity sensor. Its digital twin, represented in JSON format following the schema provided by Eclipse Ditto, would look like this:

Visualize twin data

- + \ No newline at end of file diff --git a/index.html b/index.html index d684206..52e29e3 100644 --- a/index.html +++ b/index.html @@ -5,13 +5,13 @@ OpenTwins - +
Docusaurus themed imageDocusaurus themed image
opentwins

Innovative open-source platform that specializes in
developing next-gen compositional digital twins

ertis logoertis logoitis logoitis logouma logouma logo
- + \ No newline at end of file diff --git a/markdown-page.html b/markdown-page.html index 25c6d99..df26a99 100644 --- a/markdown-page.html +++ b/markdown-page.html @@ -5,13 +5,13 @@ Markdown page example | OpenTwins - +

Markdown page example

You don't need React to write simple standalone pages.

- + \ No newline at end of file